From 29f583e9361b18ac5ae81f7c8d8107b8bb06fb25 Mon Sep 17 00:00:00 2001 From: Hayden <64056131+hay-kot@users.noreply.github.com> Date: Mon, 29 Aug 2022 18:30:36 -0800 Subject: [PATCH 001/530] Initial commit --- .github/workflows/go.yaml | 60 + .github/workflows/publish.yaml | 23 + .gitignore | 35 + README.md | 260 ++ Taskfile.yml | 59 + backend/.dockerignore | 24 + backend/Dockerfile | 25 + backend/app/api/app.go | 46 + backend/app/api/base/base_ctrl.go | 48 + backend/app/api/base/base_ctrl_test.go | 35 + backend/app/api/docs/docs.go | 558 +++ backend/app/api/docs/swagger.json | 534 +++ backend/app/api/docs/swagger.yaml | 318 ++ backend/app/api/main.go | 116 + backend/app/api/middleware.go | 117 + backend/app/api/routes.go | 82 + backend/app/api/seed.go | 98 + backend/app/api/v1/controller.go | 29 + backend/app/api/v1/controller_test.go | 20 + backend/app/api/v1/main_test.go | 51 + backend/app/api/v1/v1_ctrl_admin.go | 207 ++ backend/app/api/v1/v1_ctrl_admin_test.go | 109 + backend/app/api/v1/v1_ctrl_auth.go | 136 + backend/app/api/v1/v1_ctrl_user.go | 80 + backend/app/cli/app.go | 9 + backend/app/cli/app_users.go | 105 + backend/app/cli/main.go | 82 + backend/app/cli/reader/reader.go | 65 + backend/app/generator/main.go | 72 + backend/config.template.yml | 31 + backend/docker-compose.yml | 10 + backend/ent/authtokens.go | 165 + backend/ent/authtokens/authtokens.go | 67 + backend/ent/authtokens/where.go | 403 +++ backend/ent/authtokens_create.go | 326 ++ backend/ent/authtokens_delete.go | 111 + backend/ent/authtokens_query.go | 1000 ++++++ backend/ent/authtokens_update.go | 472 +++ backend/ent/client.go | 344 ++ backend/ent/config.go | 60 + backend/ent/context.go | 33 + backend/ent/ent.go | 261 ++ backend/ent/enttest/enttest.go | 78 + backend/ent/generate.go | 3 + backend/ent/hook/hook.go | 217 ++ backend/ent/migrate/migrate.go | 71 + backend/ent/migrate/schema.go | 63 + backend/ent/mutation.go | 1091 ++++++ backend/ent/predicate/predicate.go | 13 + backend/ent/runtime.go | 50 + backend/ent/runtime/runtime.go | 10 + backend/ent/schema/authtokens.go | 43 + backend/ent/schema/user.go | 38 + backend/ent/tx.go | 213 ++ backend/ent/user.go | 157 + backend/ent/user/user.go | 65 + backend/ent/user/where.go | 528 +++ backend/ent/user_create.go | 363 ++ backend/ent/user_delete.go | 111 + backend/ent/user_query.go | 993 ++++++ backend/ent/user_update.go | 592 ++++ backend/go.mod | 49 + backend/go.sum | 154 + backend/internal/config/conf.go | 81 + backend/internal/config/conf_database.go | 27 + backend/internal/config/conf_database_test.go | 36 + backend/internal/config/conf_logger.go | 6 + backend/internal/config/conf_mailer.go | 15 + backend/internal/config/conf_mailer_test.go | 40 + backend/internal/config/conf_seed.go | 13 + backend/internal/mapper/users_automapper.go | 27 + backend/internal/mocks/chimocker/chimocker.go | 30 + backend/internal/mocks/factories/users.go | 16 + backend/internal/mocks/mock_logger.go | 11 + backend/internal/mocks/mocker_services.go | 10 + backend/internal/mocks/mocks_ent_repo.go | 22 + backend/internal/repo/main_test.go | 38 + backend/internal/repo/repos_all.go | 16 + backend/internal/repo/token_ent.go | 74 + backend/internal/repo/token_ent_test.go | 110 + backend/internal/repo/token_interface.go | 20 + backend/internal/repo/users_ent.go | 141 + backend/internal/repo/users_ent_test.go | 148 + backend/internal/repo/users_interface.go | 27 + backend/internal/services/all.go | 15 + backend/internal/services/contexts.go | 40 + backend/internal/services/contexts_test.go | 39 + backend/internal/services/service_admin.go | 47 + backend/internal/services/service_user.go | 84 + backend/internal/types/about_types.go | 11 + backend/internal/types/token_types.go | 39 + backend/internal/types/users_types.go | 58 + backend/internal/types/users_types_test.go | 76 + backend/pkgs/automapper/README.md | 56 + backend/pkgs/automapper/automapper.go | 92 + backend/pkgs/automapper/conf.go | 11 + backend/pkgs/automapper/main.go | 48 + backend/pkgs/automapper/templates.go | 22 + backend/pkgs/faker/random.go | 37 + backend/pkgs/faker/randoms_test.go | 95 + backend/pkgs/hasher/password.go | 13 + backend/pkgs/hasher/password_test.go | 40 + backend/pkgs/hasher/token.go | 30 + backend/pkgs/hasher/token_test.go | 44 + backend/pkgs/logger/struct_logger.go | 121 + backend/pkgs/logger/struct_logger_test.go | 119 + backend/pkgs/mailer/mailer.go | 51 + backend/pkgs/mailer/mailer_test.go | 66 + backend/pkgs/mailer/message.go | 56 + backend/pkgs/mailer/message_test.go | 26 + backend/pkgs/mailer/templates.go | 62 + backend/pkgs/mailer/templates/welcome.html | 444 +++ backend/pkgs/mailer/test-mailer-template.json | 7 + backend/pkgs/server/constants.go | 7 + backend/pkgs/server/request.go | 48 + backend/pkgs/server/request_test.go | 210 ++ backend/pkgs/server/response.go | 61 + backend/pkgs/server/response_error_builder.go | 51 + .../server/response_error_builder_test.go | 107 + backend/pkgs/server/response_test.go | 78 + backend/pkgs/server/result.go | 27 + backend/pkgs/server/server.go | 123 + backend/pkgs/server/server_test.go | 97 + backend/pkgs/server/worker.go | 20 + backend/static/favicon.ico | Bin 0 -> 1366 bytes client/client/index.ts | 5 + client/client/v1client.ts | 93 + client/package-lock.json | 3024 +++++++++++++++++ client/package.json | 17 + client/test/base/base.test.ts | 26 + client/test/config.ts | 4 + client/test/setup.ts | 20 + client/test/v1/login.test.ts | 75 + client/test/vitest.config.ts | 8 + client/tsconfig.json | 17 + 135 files changed, 18463 insertions(+) create mode 100644 .github/workflows/go.yaml create mode 100644 .github/workflows/publish.yaml create mode 100644 .gitignore create mode 100644 README.md create mode 100644 Taskfile.yml create mode 100644 backend/.dockerignore create mode 100644 backend/Dockerfile create mode 100644 backend/app/api/app.go create mode 100644 backend/app/api/base/base_ctrl.go create mode 100644 backend/app/api/base/base_ctrl_test.go create mode 100644 backend/app/api/docs/docs.go create mode 100644 backend/app/api/docs/swagger.json create mode 100644 backend/app/api/docs/swagger.yaml create mode 100644 backend/app/api/main.go create mode 100644 backend/app/api/middleware.go create mode 100644 backend/app/api/routes.go create mode 100644 backend/app/api/seed.go create mode 100644 backend/app/api/v1/controller.go create mode 100644 backend/app/api/v1/controller_test.go create mode 100644 backend/app/api/v1/main_test.go create mode 100644 backend/app/api/v1/v1_ctrl_admin.go create mode 100644 backend/app/api/v1/v1_ctrl_admin_test.go create mode 100644 backend/app/api/v1/v1_ctrl_auth.go create mode 100644 backend/app/api/v1/v1_ctrl_user.go create mode 100644 backend/app/cli/app.go create mode 100644 backend/app/cli/app_users.go create mode 100644 backend/app/cli/main.go create mode 100644 backend/app/cli/reader/reader.go create mode 100644 backend/app/generator/main.go create mode 100644 backend/config.template.yml create mode 100644 backend/docker-compose.yml create mode 100644 backend/ent/authtokens.go create mode 100644 backend/ent/authtokens/authtokens.go create mode 100644 backend/ent/authtokens/where.go create mode 100644 backend/ent/authtokens_create.go create mode 100644 backend/ent/authtokens_delete.go create mode 100644 backend/ent/authtokens_query.go create mode 100644 backend/ent/authtokens_update.go create mode 100644 backend/ent/client.go create mode 100644 backend/ent/config.go create mode 100644 backend/ent/context.go create mode 100644 backend/ent/ent.go create mode 100644 backend/ent/enttest/enttest.go create mode 100644 backend/ent/generate.go create mode 100644 backend/ent/hook/hook.go create mode 100644 backend/ent/migrate/migrate.go create mode 100644 backend/ent/migrate/schema.go create mode 100644 backend/ent/mutation.go create mode 100644 backend/ent/predicate/predicate.go create mode 100644 backend/ent/runtime.go create mode 100644 backend/ent/runtime/runtime.go create mode 100644 backend/ent/schema/authtokens.go create mode 100644 backend/ent/schema/user.go create mode 100644 backend/ent/tx.go create mode 100644 backend/ent/user.go create mode 100644 backend/ent/user/user.go create mode 100644 backend/ent/user/where.go create mode 100644 backend/ent/user_create.go create mode 100644 backend/ent/user_delete.go create mode 100644 backend/ent/user_query.go create mode 100644 backend/ent/user_update.go create mode 100644 backend/go.mod create mode 100644 backend/go.sum create mode 100644 backend/internal/config/conf.go create mode 100644 backend/internal/config/conf_database.go create mode 100644 backend/internal/config/conf_database_test.go create mode 100644 backend/internal/config/conf_logger.go create mode 100644 backend/internal/config/conf_mailer.go create mode 100644 backend/internal/config/conf_mailer_test.go create mode 100644 backend/internal/config/conf_seed.go create mode 100644 backend/internal/mapper/users_automapper.go create mode 100644 backend/internal/mocks/chimocker/chimocker.go create mode 100644 backend/internal/mocks/factories/users.go create mode 100644 backend/internal/mocks/mock_logger.go create mode 100644 backend/internal/mocks/mocker_services.go create mode 100644 backend/internal/mocks/mocks_ent_repo.go create mode 100644 backend/internal/repo/main_test.go create mode 100644 backend/internal/repo/repos_all.go create mode 100644 backend/internal/repo/token_ent.go create mode 100644 backend/internal/repo/token_ent_test.go create mode 100644 backend/internal/repo/token_interface.go create mode 100644 backend/internal/repo/users_ent.go create mode 100644 backend/internal/repo/users_ent_test.go create mode 100644 backend/internal/repo/users_interface.go create mode 100644 backend/internal/services/all.go create mode 100644 backend/internal/services/contexts.go create mode 100644 backend/internal/services/contexts_test.go create mode 100644 backend/internal/services/service_admin.go create mode 100644 backend/internal/services/service_user.go create mode 100644 backend/internal/types/about_types.go create mode 100644 backend/internal/types/token_types.go create mode 100644 backend/internal/types/users_types.go create mode 100644 backend/internal/types/users_types_test.go create mode 100644 backend/pkgs/automapper/README.md create mode 100644 backend/pkgs/automapper/automapper.go create mode 100644 backend/pkgs/automapper/conf.go create mode 100644 backend/pkgs/automapper/main.go create mode 100644 backend/pkgs/automapper/templates.go create mode 100644 backend/pkgs/faker/random.go create mode 100644 backend/pkgs/faker/randoms_test.go create mode 100644 backend/pkgs/hasher/password.go create mode 100644 backend/pkgs/hasher/password_test.go create mode 100644 backend/pkgs/hasher/token.go create mode 100644 backend/pkgs/hasher/token_test.go create mode 100644 backend/pkgs/logger/struct_logger.go create mode 100644 backend/pkgs/logger/struct_logger_test.go create mode 100644 backend/pkgs/mailer/mailer.go create mode 100644 backend/pkgs/mailer/mailer_test.go create mode 100644 backend/pkgs/mailer/message.go create mode 100644 backend/pkgs/mailer/message_test.go create mode 100644 backend/pkgs/mailer/templates.go create mode 100644 backend/pkgs/mailer/templates/welcome.html create mode 100644 backend/pkgs/mailer/test-mailer-template.json create mode 100644 backend/pkgs/server/constants.go create mode 100644 backend/pkgs/server/request.go create mode 100644 backend/pkgs/server/request_test.go create mode 100644 backend/pkgs/server/response.go create mode 100644 backend/pkgs/server/response_error_builder.go create mode 100644 backend/pkgs/server/response_error_builder_test.go create mode 100644 backend/pkgs/server/response_test.go create mode 100644 backend/pkgs/server/result.go create mode 100644 backend/pkgs/server/server.go create mode 100644 backend/pkgs/server/server_test.go create mode 100644 backend/pkgs/server/worker.go create mode 100644 backend/static/favicon.ico create mode 100644 client/client/index.ts create mode 100644 client/client/v1client.ts create mode 100644 client/package-lock.json create mode 100644 client/package.json create mode 100644 client/test/base/base.test.ts create mode 100644 client/test/config.ts create mode 100644 client/test/setup.ts create mode 100644 client/test/v1/login.test.ts create mode 100644 client/test/vitest.config.ts create mode 100644 client/tsconfig.json diff --git a/.github/workflows/go.yaml b/.github/workflows/go.yaml new file mode 100644 index 0000000..5674ede --- /dev/null +++ b/.github/workflows/go.yaml @@ -0,0 +1,60 @@ +name: Go Build/Test + +on: + push: + branches: [main] + paths: + - "**.go" + - "client/**/*.ts" + pull_request: + branches: [main] + paths: + - "**.go" + - "client/**/*.ts" + +jobs: + go-test: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + + - name: Set up Go + uses: actions/setup-go@v2 + with: + go-version: 1.18 + + - name: Install Task + uses: arduino/setup-task@v1 + + - name: Build API + run: task api:build + + - name: Build CLI + run: task cli:build + - name: Test + run: task api:coverage + + - name: Upload coverage to Codecov + run: cd backend && bash <(curl -s https://codecov.io/bash) + end-to-end: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + + - name: Install Task + uses: arduino/setup-task@v1 + + - name: Set up Go + uses: actions/setup-go@v2 + with: + go-version: 1.18 + + - name: Build API + run: task api:build + + - name: Setup Node + working-directory: ./client + run: npm install + + - name: Test + run: task client:test diff --git a/.github/workflows/publish.yaml b/.github/workflows/publish.yaml new file mode 100644 index 0000000..d1f25e9 --- /dev/null +++ b/.github/workflows/publish.yaml @@ -0,0 +1,23 @@ +name: Build Docker Image + +on: + push: + branches: [main] + +jobs: + publish: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Set up Go + uses: actions/setup-go@v2 + with: + go-version: 1.18 + - name: login to container registry + run: docker login ghcr.io --username hay-kot --password $CR_PAT + env: + CR_PAT: ${{ secrets.CR_PAT }} + - name: Build Docker Image + run: cd backend && docker build -t ghcr.io/hay-kot/go-web-template:latest . + - name: push to container registry + run: docker push ghcr.io/hay-kot/go-web-template:latest diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..5f78192 --- /dev/null +++ b/.gitignore @@ -0,0 +1,35 @@ +# Project Specific +api.log +config.yml +ent.db +.idea +.vscode + +.DS_Store +test-mailer.json +node_modules + + +# If you prefer the allow list template instead of the deny list, see community template: +# https://github.com/github/gitignore/blob/main/community/Golang/Go.AllowList.gitignore +# +# Binaries for programs and plugins +*.exe +*.exe~ +*.dll +*.so +*.dylib + +# Test binary, built with `go test -c` +*.test + +# Output of the go coverage tool, specifically when used with LiteIDE +*.out + +# Dependency directories (remove the comment below to include it) +# vendor/ + +# Go workspace file +go.work +.task/ +backend/.env diff --git a/README.md b/README.md new file mode 100644 index 0000000..9042c20 --- /dev/null +++ b/README.md @@ -0,0 +1,260 @@ +

Go Web Template

+

+ + + + + + +

+ +This Go Web Template is a simple starter template for a Go web application. It includes a web server API, as well as a starter CLI to manage the web server/database inside the container. It should be noted that while while use of the standard library is a high priority, this template does make use of multiple external packages. It does however abide by the standard http handler pattern. + +- [Template Features](#template-features) + - [General](#general) + - [Mailer](#mailer) + - [Admin / Superuser Management](#admin--superuser-management) + - [Admin](#admin) + - [Self Service](#self-service) + - [Logging](#logging) + - [App Router](#app-router) + - [Web Server](#web-server) + - [Database](#database) + - [Application Configuration](#application-configuration) +- [Management CLI](#management-cli) + - [Docker Setup](#docker-setup) +- [Makefile](#makefile) +- [How To Use: Application API](#how-to-use-application-api) + - [Package Structure (Backend)](#package-structure-backend) + - [app](#app) + - [internal](#internal) + - [pkgs](#pkgs) + - [ent](#ent) + - [Configuring The API](#configuring-the-api) +- [How To Use: Application CLI](#how-to-use-application-cli) + - [Manage Users](#manage-users) + - [List Users](#list-users) + - [Create User](#create-user) + - [Delete User](#delete-user) + +## Template Features + +### General + +- [ ] Test Coverage (WIP) +- [ ] End to End Testing Framework + - [x] Build with TS for ready to go frontend client + - [x] Github CI for end to end testing + - [ ] Basic route tests for end to end testing + - [x] User Auth + - [ ] Admin User Services + - [x] Base API Route +- [x] Basic Backend CI/CD Workflow + - [x] Lint + - [x] Test w/ Coverage + - [x] Build CLI and API +- [ ] Frontend Client + - [ ] Autogenerated types + - [ ] All API Routes (w/ Auth) + +### Mailer + +- [ ] Mailer builder for easy email sending +- [x] Starter email templates + - [x] Activate Account + - [ ] Password Reset +- [ ] Bulk Messages + +### Admin / Superuser Management + +#### Admin + +- [ ] CRUD Operations for Users + +#### Self Service + +- [ ] User sign-up +- [ ] Require Activation by Email +- [ ] Stateful Token Auth +- [ ] Login/Logout +- [ ] Password Reset by Email + +### Logging + +- [x] Logging +- [x] File Logging + STDOUT +- [x] Request Logging (sugar in development structured in prod) +- [x] Dependency Free +- [x] Basic Structured Logging + +### App Router + +- [x] Built on Chi Router +- [x] Basic Middleware Stack + - [x] Logging/Structured Logging + - [x] RealIP + - [x] RequestID + - [x] Strip Trailing Slash + - [x] Panic Recovery + - [x] Timeout + - [x] User Auth + - [ ] Admin Auth +- [x] Auto log registered routes for easy debugging + +### Web Server + +- [x] Router agnostic +- [x] Background Tasks +- [ ] Limited Worker Pool +- [x] Graceful shutdown + - [x] Finish HTTP requests with timeout + - [x] Finish background tasks (no timeout) +- [x] Response Helpers + - [x] Error response builder + - [x] Utility responses + - [x] Wrapper class for uniform responses + +### Database + +- [x] [Ent for Database](https://entgo.io/) + +### Application Configuration + +- [x] Yaml/CLI/ENV Configuration + +
+ CLI Args + +``` +Usage: api [options] [arguments] + +OPTIONS + --mode/$API_MODE (default: development) + --web-port/$API_WEB_PORT (default: 3000) + --web-host/$API_WEB_HOST (default: 127.0.0.1) + --database-driver/$API_DATABASE_DRIVER (default: sqlite3) + --database-sqlite-url/$API_DATABASE_SQLITE_URL (default: file:ent?mode=memory&cache=shared&_fk=1) + --database-postgres-url/$API_DATABASE_POSTGRES_URL + --log-level/$API_LOG_LEVEL (default: debug) + --log-file/$API_LOG_FILE + --mailer-host/$API_MAILER_HOST + --mailer-port/$API_MAILER_PORT + --mailer-username/$API_MAILER_USERNAME + --mailer-password/$API_MAILER_PASSWORD + --mailer-from/$API_MAILER_FROM + --seed-enabled/$API_SEED_ENABLED (default: false) + --seed-users/$API_SEED_USERS ,[value...] + --help/-h + display this help message +``` + +
+ +
+ YAML Config + +```yaml +# config.yml +--- +mode: development +web: + port: 3915 + host: 127.0.0.1 +database: + driver: sqlite3 + sqlite-url: ./ent.db?_fk=1 +logger: + level: debug + file: api.log +mailer: + host: smtp.example.com + port: 465 + username: + password: + from: example@email.com +``` + +
+ +## Management CLI + +- [ ] CLI Interface (Partial) + +### Docker Setup + +- [x] Build and Run API +- [x] Build and Setup CLI in path + +## Makefile + +- **Build and Run API:** `make api` +- **Build Production Image** `make prod` +- **Build CLI** `make cli` +- **Test** `make test` +- **Coverage** `make coverage` + +## How To Use: Application API + +### Package Structure (Backend) + +#### app + +The App folder contains the main modules packages/applications that utilize the other packages. These are the applications that are compiled and shipped with the docker-image. + +#### internal + +Internal packages are used to provide the core functionality of the application that need to be shared across Applications _but_ are still tightly coupled to other packages or applications. These can often be bridges from the pkgs folder to the app folder to provide a common interface. + +#### pkgs + +The packages directory contains packages that are considered drop-in and are not tightly coupled to the application. These packages should provide a simple and easily describable feature. For example. The `hasher` package provides a Password Hashing function and checker and can easily be used in this application or any other. + +A good rule to follow is, if you can copy the code from one package to a completely. different project with no-modifications, it belongs here. + +#### ent + +As an exception to the above, this project adhears to the convention set by `Ent` we use a `ent` folder to contain the database schema. If you'd like to replace the Ent package with an alternative, you can review the repository layer in the `internal` folder. + +[Checkout the Entgo.io Getting Started Page](https://entgo.io/docs/getting-started) + +### Configuring The API + +See the [Application Configuration](#application-configuration) section for more information. + +## How To Use: Application CLI + +### Manage Users + +#### List Users + +```bash +go run ./app/cli/*.go users list +``` + +#### Create User + +**Development** + +```bash +go run ./app/cli/*.go users add --name=hay-kot --password=password --email=hay-kot@pm.me --is-super +``` + +**Docker** + +```bash +manage users add --name=hay-kot --password=password --email=hay-kot@pm.me +``` + +#### Delete User + +**Development** + +```bash +go run ./app/cli/*.go users delete --id=2 +``` + +**Docker** + +```bash +manage users delete --id=2 +``` diff --git a/Taskfile.yml b/Taskfile.yml new file mode 100644 index 0000000..ce64fff --- /dev/null +++ b/Taskfile.yml @@ -0,0 +1,59 @@ +version: "3" + +tasks: + cli: + cmds: + - cd backend && go run ./app/cli/ {{.CLI_ARGS}} + silent: false + + cli:build: + cmds: + - cd backend && go build ./app/cli/ + silent: false + + api: + cmds: + - cd backend/app/api/ && swag fmt + - cd backend/app/api/ && swag init --dir=./,../../internal,../../pkgs + # - | + # npx swagger-typescript-api \ + # --path ./backend/app/api/docs/swagger.json \ + # --output ./client/auto-client \ + # --module-name-first-tag \ + # --modular + - cd backend && go run ./app/api/ {{.CLI_ARGS}} + silent: false + sources: + - ./backend/**/*.go + + api:build: + cmds: + - cd backend && go build ./app/api/ + silent: true + + api:test: + cmds: + - cd backend && go test ./app/api/ + silent: true + + api:coverage: + cmds: + - cd backend && go test -race -coverprofile=coverage.out -covermode=atomic ./app/... ./internal/... ./pkgs/... -v -cover + silent: true + + client:test: + cmds: + - cd backend && go run ./app/api/ & + - sleep 5 + - cd client && npm run test:ci + silent: true + + docker:build: + cmds: + - cd backend && docker-compose up --build + silent: true + + generate:types: + cmds: + - cd backend && go run ./app/generator + silent: true diff --git a/backend/.dockerignore b/backend/.dockerignore new file mode 100644 index 0000000..720e7a0 --- /dev/null +++ b/backend/.dockerignore @@ -0,0 +1,24 @@ +**/.classpath +**/.dockerignore +**/.env +**/.git +**/.gitignore +**/.project +**/.settings +**/.toolstarget +**/.vs +**/.vscode +**/*.*proj.user +**/*.dbmdl +**/*.jfm +**/bin +**/charts +**/docker-compose* +**/compose* +**/Dockerfile* +**/node_modules +**/npm-debug.log +**/obj +**/secrets.dev.yaml +**/values.dev.yaml +README.md diff --git a/backend/Dockerfile b/backend/Dockerfile new file mode 100644 index 0000000..3602d98 --- /dev/null +++ b/backend/Dockerfile @@ -0,0 +1,25 @@ +# Build API +FROM golang:alpine AS builder +RUN apk add --no-cache git build-base +WORKDIR /go/src/app +COPY . . +RUN go get -d -v ./... +RUN go build -o /go/bin/api -v ./app/api/*.go +RUN go build -o /go/bin/manage -v ./app/cli/*.go + + +# Production Stage +FROM alpine:latest + +RUN apk --no-cache add ca-certificates +COPY ./config.template.yml /app/config.yml +COPY --from=builder /go/bin/api /app +COPY --from=builder /go/bin/manage /bin + +RUN chmod +x /app/api +RUN chmod +x /bin/manage + +LABEL Name=gowebtemplate Version=0.0.1 +EXPOSE 7745 +WORKDIR /app +CMD [ "./api" ] diff --git a/backend/app/api/app.go b/backend/app/api/app.go new file mode 100644 index 0000000..5a062b0 --- /dev/null +++ b/backend/app/api/app.go @@ -0,0 +1,46 @@ +package main + +import ( + "time" + + "github.com/hay-kot/git-web-template/backend/ent" + "github.com/hay-kot/git-web-template/backend/internal/config" + "github.com/hay-kot/git-web-template/backend/internal/repo" + "github.com/hay-kot/git-web-template/backend/internal/services" + "github.com/hay-kot/git-web-template/backend/pkgs/logger" + "github.com/hay-kot/git-web-template/backend/pkgs/mailer" + "github.com/hay-kot/git-web-template/backend/pkgs/server" +) + +type app struct { + conf *config.Config + logger *logger.Logger + mailer mailer.Mailer + db *ent.Client + server *server.Server + repos *repo.AllRepos + services *services.AllServices +} + +func NewApp(conf *config.Config) *app { + s := &app{ + conf: conf, + } + + s.mailer = mailer.Mailer{ + Host: s.conf.Mailer.Host, + Port: s.conf.Mailer.Port, + Username: s.conf.Mailer.Username, + Password: s.conf.Mailer.Password, + From: s.conf.Mailer.From, + } + + return s +} + +func (a *app) StartReoccurringTasks(t time.Duration, fn func()) { + for { + a.server.Background(fn) + time.Sleep(t) + } +} diff --git a/backend/app/api/base/base_ctrl.go b/backend/app/api/base/base_ctrl.go new file mode 100644 index 0000000..7649b8e --- /dev/null +++ b/backend/app/api/base/base_ctrl.go @@ -0,0 +1,48 @@ +package base + +import ( + "net/http" + + "github.com/hay-kot/git-web-template/backend/internal/types" + "github.com/hay-kot/git-web-template/backend/pkgs/logger" + "github.com/hay-kot/git-web-template/backend/pkgs/server" +) + +type ReadyFunc func() bool + +type BaseController struct { + log *logger.Logger + svr *server.Server +} + +func NewBaseController(log *logger.Logger, svr *server.Server) *BaseController { + h := &BaseController{ + log: log, + svr: svr, + } + return h +} + +// HandleBase godoc +// @Summary Retrieves the basic information about the API +// @Tags Base +// @Produce json +// @Success 200 {object} server.Result{item=types.ApiSummary} +// @Router /status [GET] +func (ctrl *BaseController) HandleBase(ready ReadyFunc, versions ...string) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + data := types.ApiSummary{ + Healthy: ready(), + Versions: versions, + Title: "Go API Template", + Message: "Welcome to the Go API Template Application!", + } + + err := server.Respond(w, http.StatusOK, server.Wrap(data)) + + if err != nil { + ctrl.log.Error(err, nil) + server.RespondInternalServerError(w) + } + } +} diff --git a/backend/app/api/base/base_ctrl_test.go b/backend/app/api/base/base_ctrl_test.go new file mode 100644 index 0000000..972f55e --- /dev/null +++ b/backend/app/api/base/base_ctrl_test.go @@ -0,0 +1,35 @@ +package base + +import ( + "net/http" + "net/http/httptest" + "testing" + + "github.com/hay-kot/git-web-template/backend/internal/mocks" +) + +func GetTestHandler(t *testing.T) *BaseController { + return NewBaseController(mocks.GetStructLogger(), nil) +} + +func TestHandlersv1_HandleBase(t *testing.T) { + // Setup + hdlrFunc := GetTestHandler(t).HandleBase(func() bool { return true }, "v1") + + // Call Handler Func + rr := httptest.NewRecorder() + hdlrFunc(rr, nil) + + // Validate Status Code + if rr.Code != http.StatusOK { + t.Errorf("Expected status code to be %d, got %d", http.StatusOK, rr.Code) + } + + // Validate Json Payload + expected := `{"item":{"health":true,"versions":["v1"],"title":"Go API Template","message":"Welcome to the Go API Template Application!"}}` + + if rr.Body.String() != expected { + t.Errorf("Expected json to be %s, got %s", expected, rr.Body.String()) + } + +} diff --git a/backend/app/api/docs/docs.go b/backend/app/api/docs/docs.go new file mode 100644 index 0000000..9a371a6 --- /dev/null +++ b/backend/app/api/docs/docs.go @@ -0,0 +1,558 @@ +// Package docs GENERATED BY SWAG; DO NOT EDIT +// This file was generated by swaggo/swag +package docs + +import "github.com/swaggo/swag" + +const docTemplate = `{ + "schemes": {{ marshal .Schemes }}, + "swagger": "2.0", + "info": { + "description": "{{escape .Description}}", + "title": "{{.Title}}", + "contact": { + "name": "Don't" + }, + "license": { + "name": "MIT" + }, + "version": "{{.Version}}" + }, + "host": "{{.Host}}", + "basePath": "{{.BasePath}}", + "paths": { + "/status": { + "get": { + "produces": [ + "application/json" + ], + "tags": [ + "Base" + ], + "summary": "Retrieves the basic information about the API", + "responses": { + "200": { + "description": "OK", + "schema": { + "allOf": [ + { + "$ref": "#/definitions/server.Result" + }, + { + "type": "object", + "properties": { + "item": { + "$ref": "#/definitions/types.ApiSummary" + } + } + } + ] + } + } + } + } + }, + "/v1/admin/users": { + "get": { + "security": [ + { + "Bearer": [] + } + ], + "produces": [ + "application/json" + ], + "tags": [ + "Admin: Users" + ], + "summary": "Gets all users from the database", + "responses": { + "200": { + "description": "OK", + "schema": { + "allOf": [ + { + "$ref": "#/definitions/server.Result" + }, + { + "type": "object", + "properties": { + "item": { + "type": "array", + "items": { + "$ref": "#/definitions/types.UserOut" + } + } + } + } + ] + } + } + } + }, + "post": { + "security": [ + { + "Bearer": [] + } + ], + "produces": [ + "application/json" + ], + "tags": [ + "Admin: Users" + ], + "summary": "Create a new user", + "parameters": [ + { + "description": "User Data", + "name": "payload", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/types.UserCreate" + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "allOf": [ + { + "$ref": "#/definitions/server.Result" + }, + { + "type": "object", + "properties": { + "item": { + "$ref": "#/definitions/types.UserOut" + } + } + } + ] + } + } + } + } + }, + "/v1/admin/users/{id}": { + "get": { + "security": [ + { + "Bearer": [] + } + ], + "produces": [ + "application/json" + ], + "tags": [ + "Admin: Users" + ], + "summary": "Get a user from the database", + "parameters": [ + { + "type": "string", + "description": "User ID", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "allOf": [ + { + "$ref": "#/definitions/server.Result" + }, + { + "type": "object", + "properties": { + "item": { + "$ref": "#/definitions/types.UserOut" + } + } + } + ] + } + } + } + }, + "put": { + "security": [ + { + "Bearer": [] + } + ], + "produces": [ + "application/json" + ], + "tags": [ + "Admin: Users" + ], + "summary": "Update a User", + "parameters": [ + { + "type": "string", + "description": "User ID", + "name": "id", + "in": "path", + "required": true + }, + { + "description": "User Data", + "name": "payload", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/types.UserUpdate" + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "allOf": [ + { + "$ref": "#/definitions/server.Result" + }, + { + "type": "object", + "properties": { + "item": { + "$ref": "#/definitions/types.UserOut" + } + } + } + ] + } + } + } + }, + "delete": { + "security": [ + { + "Bearer": [] + } + ], + "produces": [ + "application/json" + ], + "tags": [ + "Admin: Users" + ], + "summary": "Delete a User", + "parameters": [ + { + "type": "string", + "description": "User ID", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "204": { + "description": "" + } + } + } + }, + "/v1/users/login": { + "post": { + "consumes": [ + "application/x-www-form-urlencoded", + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Authentication" + ], + "summary": "User Login", + "parameters": [ + { + "type": "string", + "example": "admin@admin.com", + "description": "string", + "name": "username", + "in": "formData" + }, + { + "type": "string", + "example": "admin", + "description": "string", + "name": "password", + "in": "formData" + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/types.TokenResponse" + } + } + } + } + }, + "/v1/users/logout": { + "post": { + "security": [ + { + "Bearer": [] + } + ], + "tags": [ + "Authentication" + ], + "summary": "User Logout", + "responses": { + "204": { + "description": "" + } + } + } + }, + "/v1/users/refresh": { + "get": { + "security": [ + { + "Bearer": [] + } + ], + "description": "handleAuthRefresh returns a handler that will issue a new token from an existing token.\nThis does not validate that the user still exists within the database.", + "tags": [ + "Authentication" + ], + "summary": "User Token Refresh", + "responses": { + "200": { + "description": "" + } + } + } + }, + "/v1/users/self": { + "get": { + "security": [ + { + "Bearer": [] + } + ], + "produces": [ + "application/json" + ], + "tags": [ + "User" + ], + "summary": "Get the current user", + "responses": { + "200": { + "description": "OK", + "schema": { + "allOf": [ + { + "$ref": "#/definitions/server.Result" + }, + { + "type": "object", + "properties": { + "item": { + "$ref": "#/definitions/types.UserOut" + } + } + } + ] + } + } + } + }, + "put": { + "security": [ + { + "Bearer": [] + } + ], + "produces": [ + "application/json" + ], + "tags": [ + "User" + ], + "summary": "Update the current user", + "parameters": [ + { + "description": "User Data", + "name": "payload", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/types.UserUpdate" + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "allOf": [ + { + "$ref": "#/definitions/server.Result" + }, + { + "type": "object", + "properties": { + "item": { + "$ref": "#/definitions/types.UserUpdate" + } + } + } + ] + } + } + } + } + }, + "/v1/users/self/password": { + "put": { + "security": [ + { + "Bearer": [] + } + ], + "produces": [ + "application/json" + ], + "tags": [ + "User" + ], + "summary": "Update the current user's password // TODO:", + "responses": { + "204": { + "description": "" + } + } + } + } + }, + "definitions": { + "server.Result": { + "type": "object", + "properties": { + "details": {}, + "error": { + "type": "boolean" + }, + "item": {}, + "message": { + "type": "string" + } + } + }, + "types.ApiSummary": { + "type": "object", + "properties": { + "health": { + "type": "boolean" + }, + "message": { + "type": "string" + }, + "title": { + "type": "string" + }, + "versions": { + "type": "array", + "items": { + "type": "string" + } + } + } + }, + "types.TokenResponse": { + "type": "object", + "properties": { + "expiresAt": { + "type": "string" + }, + "token": { + "type": "string" + } + } + }, + "types.UserCreate": { + "type": "object", + "properties": { + "email": { + "type": "string" + }, + "isSuperuser": { + "type": "boolean" + }, + "name": { + "type": "string" + }, + "password": { + "type": "string" + } + } + }, + "types.UserOut": { + "type": "object", + "properties": { + "email": { + "type": "string" + }, + "id": { + "type": "string" + }, + "isSuperuser": { + "type": "boolean" + }, + "name": { + "type": "string" + } + } + }, + "types.UserUpdate": { + "type": "object", + "properties": { + "email": { + "type": "string" + }, + "name": { + "type": "string" + } + } + } + }, + "securityDefinitions": { + "Bearer": { + "description": "\"Type 'Bearer TOKEN' to correctly set the API Key\"", + "type": "apiKey", + "name": "Authorization", + "in": "header" + } + } +}` + +// SwaggerInfo holds exported Swagger Info so clients can modify it +var SwaggerInfo = &swag.Spec{ + Version: "1.0", + Host: "", + BasePath: "/api", + Schemes: []string{}, + Title: "Go API Templates", + Description: "This is a simple Rest API Server Template that implements some basic User and Authentication patterns to help you get started and bootstrap your next project!.", + InfoInstanceName: "swagger", + SwaggerTemplate: docTemplate, +} + +func init() { + swag.Register(SwaggerInfo.InstanceName(), SwaggerInfo) +} diff --git a/backend/app/api/docs/swagger.json b/backend/app/api/docs/swagger.json new file mode 100644 index 0000000..51932ec --- /dev/null +++ b/backend/app/api/docs/swagger.json @@ -0,0 +1,534 @@ +{ + "swagger": "2.0", + "info": { + "description": "This is a simple Rest API Server Template that implements some basic User and Authentication patterns to help you get started and bootstrap your next project!.", + "title": "Go API Templates", + "contact": { + "name": "Don't" + }, + "license": { + "name": "MIT" + }, + "version": "1.0" + }, + "basePath": "/api", + "paths": { + "/status": { + "get": { + "produces": [ + "application/json" + ], + "tags": [ + "Base" + ], + "summary": "Retrieves the basic information about the API", + "responses": { + "200": { + "description": "OK", + "schema": { + "allOf": [ + { + "$ref": "#/definitions/server.Result" + }, + { + "type": "object", + "properties": { + "item": { + "$ref": "#/definitions/types.ApiSummary" + } + } + } + ] + } + } + } + } + }, + "/v1/admin/users": { + "get": { + "security": [ + { + "Bearer": [] + } + ], + "produces": [ + "application/json" + ], + "tags": [ + "Admin: Users" + ], + "summary": "Gets all users from the database", + "responses": { + "200": { + "description": "OK", + "schema": { + "allOf": [ + { + "$ref": "#/definitions/server.Result" + }, + { + "type": "object", + "properties": { + "item": { + "type": "array", + "items": { + "$ref": "#/definitions/types.UserOut" + } + } + } + } + ] + } + } + } + }, + "post": { + "security": [ + { + "Bearer": [] + } + ], + "produces": [ + "application/json" + ], + "tags": [ + "Admin: Users" + ], + "summary": "Create a new user", + "parameters": [ + { + "description": "User Data", + "name": "payload", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/types.UserCreate" + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "allOf": [ + { + "$ref": "#/definitions/server.Result" + }, + { + "type": "object", + "properties": { + "item": { + "$ref": "#/definitions/types.UserOut" + } + } + } + ] + } + } + } + } + }, + "/v1/admin/users/{id}": { + "get": { + "security": [ + { + "Bearer": [] + } + ], + "produces": [ + "application/json" + ], + "tags": [ + "Admin: Users" + ], + "summary": "Get a user from the database", + "parameters": [ + { + "type": "string", + "description": "User ID", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "allOf": [ + { + "$ref": "#/definitions/server.Result" + }, + { + "type": "object", + "properties": { + "item": { + "$ref": "#/definitions/types.UserOut" + } + } + } + ] + } + } + } + }, + "put": { + "security": [ + { + "Bearer": [] + } + ], + "produces": [ + "application/json" + ], + "tags": [ + "Admin: Users" + ], + "summary": "Update a User", + "parameters": [ + { + "type": "string", + "description": "User ID", + "name": "id", + "in": "path", + "required": true + }, + { + "description": "User Data", + "name": "payload", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/types.UserUpdate" + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "allOf": [ + { + "$ref": "#/definitions/server.Result" + }, + { + "type": "object", + "properties": { + "item": { + "$ref": "#/definitions/types.UserOut" + } + } + } + ] + } + } + } + }, + "delete": { + "security": [ + { + "Bearer": [] + } + ], + "produces": [ + "application/json" + ], + "tags": [ + "Admin: Users" + ], + "summary": "Delete a User", + "parameters": [ + { + "type": "string", + "description": "User ID", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "204": { + "description": "" + } + } + } + }, + "/v1/users/login": { + "post": { + "consumes": [ + "application/x-www-form-urlencoded", + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Authentication" + ], + "summary": "User Login", + "parameters": [ + { + "type": "string", + "example": "admin@admin.com", + "description": "string", + "name": "username", + "in": "formData" + }, + { + "type": "string", + "example": "admin", + "description": "string", + "name": "password", + "in": "formData" + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/types.TokenResponse" + } + } + } + } + }, + "/v1/users/logout": { + "post": { + "security": [ + { + "Bearer": [] + } + ], + "tags": [ + "Authentication" + ], + "summary": "User Logout", + "responses": { + "204": { + "description": "" + } + } + } + }, + "/v1/users/refresh": { + "get": { + "security": [ + { + "Bearer": [] + } + ], + "description": "handleAuthRefresh returns a handler that will issue a new token from an existing token.\nThis does not validate that the user still exists within the database.", + "tags": [ + "Authentication" + ], + "summary": "User Token Refresh", + "responses": { + "200": { + "description": "" + } + } + } + }, + "/v1/users/self": { + "get": { + "security": [ + { + "Bearer": [] + } + ], + "produces": [ + "application/json" + ], + "tags": [ + "User" + ], + "summary": "Get the current user", + "responses": { + "200": { + "description": "OK", + "schema": { + "allOf": [ + { + "$ref": "#/definitions/server.Result" + }, + { + "type": "object", + "properties": { + "item": { + "$ref": "#/definitions/types.UserOut" + } + } + } + ] + } + } + } + }, + "put": { + "security": [ + { + "Bearer": [] + } + ], + "produces": [ + "application/json" + ], + "tags": [ + "User" + ], + "summary": "Update the current user", + "parameters": [ + { + "description": "User Data", + "name": "payload", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/types.UserUpdate" + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "allOf": [ + { + "$ref": "#/definitions/server.Result" + }, + { + "type": "object", + "properties": { + "item": { + "$ref": "#/definitions/types.UserUpdate" + } + } + } + ] + } + } + } + } + }, + "/v1/users/self/password": { + "put": { + "security": [ + { + "Bearer": [] + } + ], + "produces": [ + "application/json" + ], + "tags": [ + "User" + ], + "summary": "Update the current user's password // TODO:", + "responses": { + "204": { + "description": "" + } + } + } + } + }, + "definitions": { + "server.Result": { + "type": "object", + "properties": { + "details": {}, + "error": { + "type": "boolean" + }, + "item": {}, + "message": { + "type": "string" + } + } + }, + "types.ApiSummary": { + "type": "object", + "properties": { + "health": { + "type": "boolean" + }, + "message": { + "type": "string" + }, + "title": { + "type": "string" + }, + "versions": { + "type": "array", + "items": { + "type": "string" + } + } + } + }, + "types.TokenResponse": { + "type": "object", + "properties": { + "expiresAt": { + "type": "string" + }, + "token": { + "type": "string" + } + } + }, + "types.UserCreate": { + "type": "object", + "properties": { + "email": { + "type": "string" + }, + "isSuperuser": { + "type": "boolean" + }, + "name": { + "type": "string" + }, + "password": { + "type": "string" + } + } + }, + "types.UserOut": { + "type": "object", + "properties": { + "email": { + "type": "string" + }, + "id": { + "type": "string" + }, + "isSuperuser": { + "type": "boolean" + }, + "name": { + "type": "string" + } + } + }, + "types.UserUpdate": { + "type": "object", + "properties": { + "email": { + "type": "string" + }, + "name": { + "type": "string" + } + } + } + }, + "securityDefinitions": { + "Bearer": { + "description": "\"Type 'Bearer TOKEN' to correctly set the API Key\"", + "type": "apiKey", + "name": "Authorization", + "in": "header" + } + } +} \ No newline at end of file diff --git a/backend/app/api/docs/swagger.yaml b/backend/app/api/docs/swagger.yaml new file mode 100644 index 0000000..b268aa0 --- /dev/null +++ b/backend/app/api/docs/swagger.yaml @@ -0,0 +1,318 @@ +basePath: /api +definitions: + server.Result: + properties: + details: {} + error: + type: boolean + item: {} + message: + type: string + type: object + types.ApiSummary: + properties: + health: + type: boolean + message: + type: string + title: + type: string + versions: + items: + type: string + type: array + type: object + types.TokenResponse: + properties: + expiresAt: + type: string + token: + type: string + type: object + types.UserCreate: + properties: + email: + type: string + isSuperuser: + type: boolean + name: + type: string + password: + type: string + type: object + types.UserOut: + properties: + email: + type: string + id: + type: string + isSuperuser: + type: boolean + name: + type: string + type: object + types.UserUpdate: + properties: + email: + type: string + name: + type: string + type: object +info: + contact: + name: Don't + description: This is a simple Rest API Server Template that implements some basic + User and Authentication patterns to help you get started and bootstrap your next + project!. + license: + name: MIT + title: Go API Templates + version: "1.0" +paths: + /status: + get: + produces: + - application/json + responses: + "200": + description: OK + schema: + allOf: + - $ref: '#/definitions/server.Result' + - properties: + item: + $ref: '#/definitions/types.ApiSummary' + type: object + summary: Retrieves the basic information about the API + tags: + - Base + /v1/admin/users: + get: + produces: + - application/json + responses: + "200": + description: OK + schema: + allOf: + - $ref: '#/definitions/server.Result' + - properties: + item: + items: + $ref: '#/definitions/types.UserOut' + type: array + type: object + security: + - Bearer: [] + summary: Gets all users from the database + tags: + - 'Admin: Users' + post: + parameters: + - description: User Data + in: body + name: payload + required: true + schema: + $ref: '#/definitions/types.UserCreate' + produces: + - application/json + responses: + "200": + description: OK + schema: + allOf: + - $ref: '#/definitions/server.Result' + - properties: + item: + $ref: '#/definitions/types.UserOut' + type: object + security: + - Bearer: [] + summary: Create a new user + tags: + - 'Admin: Users' + /v1/admin/users/{id}: + delete: + parameters: + - description: User ID + in: path + name: id + required: true + type: string + produces: + - application/json + responses: + "204": + description: "" + security: + - Bearer: [] + summary: Delete a User + tags: + - 'Admin: Users' + get: + parameters: + - description: User ID + in: path + name: id + required: true + type: string + produces: + - application/json + responses: + "200": + description: OK + schema: + allOf: + - $ref: '#/definitions/server.Result' + - properties: + item: + $ref: '#/definitions/types.UserOut' + type: object + security: + - Bearer: [] + summary: Get a user from the database + tags: + - 'Admin: Users' + put: + parameters: + - description: User ID + in: path + name: id + required: true + type: string + - description: User Data + in: body + name: payload + required: true + schema: + $ref: '#/definitions/types.UserUpdate' + produces: + - application/json + responses: + "200": + description: OK + schema: + allOf: + - $ref: '#/definitions/server.Result' + - properties: + item: + $ref: '#/definitions/types.UserOut' + type: object + security: + - Bearer: [] + summary: Update a User + tags: + - 'Admin: Users' + /v1/users/login: + post: + consumes: + - application/x-www-form-urlencoded + - application/json + parameters: + - description: string + example: admin@admin.com + in: formData + name: username + type: string + - description: string + example: admin + in: formData + name: password + type: string + produces: + - application/json + responses: + "200": + description: OK + schema: + $ref: '#/definitions/types.TokenResponse' + summary: User Login + tags: + - Authentication + /v1/users/logout: + post: + responses: + "204": + description: "" + security: + - Bearer: [] + summary: User Logout + tags: + - Authentication + /v1/users/refresh: + get: + description: |- + handleAuthRefresh returns a handler that will issue a new token from an existing token. + This does not validate that the user still exists within the database. + responses: + "200": + description: "" + security: + - Bearer: [] + summary: User Token Refresh + tags: + - Authentication + /v1/users/self: + get: + produces: + - application/json + responses: + "200": + description: OK + schema: + allOf: + - $ref: '#/definitions/server.Result' + - properties: + item: + $ref: '#/definitions/types.UserOut' + type: object + security: + - Bearer: [] + summary: Get the current user + tags: + - User + put: + parameters: + - description: User Data + in: body + name: payload + required: true + schema: + $ref: '#/definitions/types.UserUpdate' + produces: + - application/json + responses: + "200": + description: OK + schema: + allOf: + - $ref: '#/definitions/server.Result' + - properties: + item: + $ref: '#/definitions/types.UserUpdate' + type: object + security: + - Bearer: [] + summary: Update the current user + tags: + - User + /v1/users/self/password: + put: + produces: + - application/json + responses: + "204": + description: "" + security: + - Bearer: [] + summary: 'Update the current user''s password // TODO:' + tags: + - User +securityDefinitions: + Bearer: + description: '"Type ''Bearer TOKEN'' to correctly set the API Key"' + in: header + name: Authorization + type: apiKey +swagger: "2.0" diff --git a/backend/app/api/main.go b/backend/app/api/main.go new file mode 100644 index 0000000..12570bd --- /dev/null +++ b/backend/app/api/main.go @@ -0,0 +1,116 @@ +package main + +import ( + "context" + "io" + "log" + "os" + "time" + + "github.com/hay-kot/git-web-template/backend/app/api/docs" + "github.com/hay-kot/git-web-template/backend/ent" + "github.com/hay-kot/git-web-template/backend/internal/config" + "github.com/hay-kot/git-web-template/backend/internal/repo" + "github.com/hay-kot/git-web-template/backend/internal/services" + "github.com/hay-kot/git-web-template/backend/pkgs/logger" + "github.com/hay-kot/git-web-template/backend/pkgs/server" + _ "github.com/mattn/go-sqlite3" +) + +// @title Go API Templates +// @version 1.0 +// @description This is a simple Rest API Server Template that implements some basic User and Authentication patterns to help you get started and bootstrap your next project!. +// @contact.name Don't +// @license.name MIT +// @BasePath /api +// @securityDefinitions.apikey Bearer +// @in header +// @name Authorization +// @description "Type 'Bearer TOKEN' to correctly set the API Key" +func main() { + cfgFile := "config.yml" + + cfg, err := config.NewConfig(cfgFile) + if err != nil { + panic(err) + } + + docs.SwaggerInfo.Host = cfg.Swagger.Host + + if err := run(cfg); err != nil { + panic(err) + } +} + +func run(cfg *config.Config) error { + app := NewApp(cfg) + + // ========================================================================= + // Setup Logger + + var wrt io.Writer + wrt = os.Stdout + if app.conf.Log.File != "" { + f, err := os.OpenFile(app.conf.Log.File, os.O_RDWR|os.O_CREATE|os.O_APPEND, 0666) + if err != nil { + log.Fatalf("error opening file: %v", err) + } + defer func(f *os.File) { + _ = f.Close() + }(f) + wrt = io.MultiWriter(wrt, f) + } + + app.logger = logger.New(wrt, logger.LevelDebug) + + // ========================================================================= + // Initialize Database & Repos + + c, err := ent.Open(cfg.Database.GetDriver(), cfg.Database.GetUrl()) + if err != nil { + app.logger.Fatal(err, logger.Props{ + "details": "failed to connect to database", + "database": cfg.Database.GetDriver(), + "url": cfg.Database.GetUrl(), + }) + } + defer func(c *ent.Client) { + _ = c.Close() + }(c) + if err := c.Schema.Create(context.Background()); err != nil { + app.logger.Fatal(err, logger.Props{ + "details": "failed to create schema", + }) + } + + app.db = c + app.repos = repo.EntAllRepos(c) + app.services = services.NewServices(app.repos) + + // ========================================================================= + // Start Server + + app.conf.Print() + + app.server = server.NewServer(app.conf.Web.Host, app.conf.Web.Port) + + routes := app.newRouter(app.repos) + app.LogRoutes(routes) + + app.EnsureAdministrator() + app.SeedDatabase(app.repos) + + app.logger.Info("Starting HTTP Server", logger.Props{ + "host": app.server.Host, + "port": app.server.Port, + }) + + // ========================================================================= + // Start Reoccurring Tasks + + go app.StartReoccurringTasks(time.Duration(24)*time.Hour, func() { + app.repos.AuthTokens.PurgeExpiredTokens(context.Background()) + }) + + return app.server.Start(routes) +} diff --git a/backend/app/api/middleware.go b/backend/app/api/middleware.go new file mode 100644 index 0000000..1a0d813 --- /dev/null +++ b/backend/app/api/middleware.go @@ -0,0 +1,117 @@ +package main + +import ( + "fmt" + "net/http" + "strings" + "time" + + "github.com/go-chi/chi/v5" + "github.com/go-chi/chi/v5/middleware" + "github.com/hay-kot/git-web-template/backend/internal/config" + "github.com/hay-kot/git-web-template/backend/internal/services" + "github.com/hay-kot/git-web-template/backend/pkgs/hasher" + "github.com/hay-kot/git-web-template/backend/pkgs/logger" + "github.com/hay-kot/git-web-template/backend/pkgs/server" +) + +func (a *app) setGlobalMiddleware(r *chi.Mux) { + // ========================================================================= + // Middleware + r.Use(middleware.RequestID) + r.Use(middleware.RealIP) + r.Use(mwStripTrailingSlash) + + // Use struct logger in production for requests, but use + // pretty console logger in development. + if a.conf.Mode == config.ModeDevelopment { + r.Use(middleware.Logger) + } else { + r.Use(a.mwStructLogger) + } + r.Use(middleware.Recoverer) + + // Set a timeout value on the request context (ctx), that will signal + // through ctx.Done() that the request has timed out and further + // processing should be stopped. + r.Use(middleware.Timeout(60 * time.Second)) +} + +// mwAuthToken is a middleware that will check the database for a stateful token +// and attach it to the request context with the user, or return a 401 if it doesn't exist. +func (a *app) mwAuthToken(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + requestToken := r.Header.Get("Authorization") + + if requestToken == "" { + server.RespondUnauthorized(w) + return + } + + requestToken = strings.TrimPrefix(requestToken, "Bearer ") + + hash := hasher.HashToken(requestToken) + + // Check the database for the token + usr, err := a.repos.AuthTokens.GetUserFromToken(r.Context(), hash) + + if err != nil { + a.logger.Error(err, logger.Props{ + "token": requestToken, + "hash": fmt.Sprintf("%x", hash), + }) + server.RespondUnauthorized(w) + return + } + + r = r.WithContext(services.SetUserCtx(r.Context(), &usr, requestToken)) + + next.ServeHTTP(w, r) + }) +} + +// mwAdminOnly is a middleware that extends the mwAuthToken middleware to only allow +// requests from superusers. +func (a *app) mwAdminOnly(next http.Handler) http.Handler { + + mw := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + usr := services.UseUserCtx(r.Context()) + + if !usr.IsSuperuser { + server.RespondUnauthorized(w) + return + } + + next.ServeHTTP(w, r) + }) + + return a.mwAuthToken(mw) +} + +// mqStripTrailingSlash is a middleware that will strip trailing slashes from the request path. +func mwStripTrailingSlash(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + r.URL.Path = strings.TrimSuffix(r.URL.Path, "/") + next.ServeHTTP(w, r) + }) +} + +func (a *app) mwStructLogger(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + scheme := "http" + if r.TLS != nil { + scheme = "https" + } + + url := fmt.Sprintf("%s://%s%s %s", scheme, r.Host, r.RequestURI, r.Proto) + + a.logger.Info(fmt.Sprintf("[%s] %s", r.Method, url), logger.Props{ + "id": middleware.GetReqID(r.Context()), + "method": r.Method, + "url": url, + "remote": r.RemoteAddr, + }) + + next.ServeHTTP(w, r) + }) +} diff --git a/backend/app/api/routes.go b/backend/app/api/routes.go new file mode 100644 index 0000000..e8887a5 --- /dev/null +++ b/backend/app/api/routes.go @@ -0,0 +1,82 @@ +package main + +import ( + "fmt" + "net/http" + + "github.com/go-chi/chi/v5" + "github.com/hay-kot/git-web-template/backend/app/api/base" + _ "github.com/hay-kot/git-web-template/backend/app/api/docs" + v1 "github.com/hay-kot/git-web-template/backend/app/api/v1" + "github.com/hay-kot/git-web-template/backend/internal/repo" + httpSwagger "github.com/swaggo/http-swagger" // http-swagger middleware +) + +const prefix = "/api" + +// registerRoutes registers all the routes for the API +func (a *app) newRouter(repos *repo.AllRepos) *chi.Mux { + r := chi.NewRouter() + a.setGlobalMiddleware(r) + + // ========================================================================= + // Base Routes + + r.Get("/swagger/*", httpSwagger.Handler( + httpSwagger.URL(fmt.Sprintf("%s://%s/swagger/doc.json", a.conf.Swagger.Scheme, a.conf.Swagger.Host)), + )) + + // Server Favicon + r.Get("/favicon.ico", func(w http.ResponseWriter, r *http.Request) { + http.ServeFile(w, r, "static/favicon.ico") + }) + + baseHandler := base.NewBaseController(a.logger, a.server) + r.Get(prefix+"/status", baseHandler.HandleBase(func() bool { return true }, "v1")) + + // ========================================================================= + // API Version 1 + v1Base := v1.BaseUrlFunc(prefix) + v1Handlers := v1.NewControllerV1(a.logger, a.services) + r.Post(v1Base("/users/login"), v1Handlers.HandleAuthLogin()) + r.Group(func(r chi.Router) { + r.Use(a.mwAuthToken) + r.Get(v1Base("/users/self"), v1Handlers.HandleUserSelf()) + r.Put(v1Base("/users/self"), v1Handlers.HandleUserUpdate()) + r.Put(v1Base("/users/self/password"), v1Handlers.HandleUserUpdatePassword()) + r.Post(v1Base("/users/logout"), v1Handlers.HandleAuthLogout()) + r.Get(v1Base("/users/refresh"), v1Handlers.HandleAuthRefresh()) + }) + + r.Group(func(r chi.Router) { + r.Use(a.mwAdminOnly) + r.Get(v1Base("/admin/users"), v1Handlers.HandleAdminUserGetAll()) + r.Post(v1Base("/admin/users"), v1Handlers.HandleAdminUserCreate()) + r.Get(v1Base("/admin/users/{id}"), v1Handlers.HandleAdminUserGet()) + r.Put(v1Base("/admin/users/{id}"), v1Handlers.HandleAdminUserUpdate()) + r.Delete(v1Base("/admin/users/{id}"), v1Handlers.HandleAdminUserDelete()) + }) + + return r +} + +// LogRoutes logs the routes of the server that are registered within Server.registerRoutes(). This is useful for debugging. +// See https://github.com/go-chi/chi/issues/332 for details and inspiration. +func (a *app) LogRoutes(r *chi.Mux) { + desiredSpaces := 10 + + walkFunc := func(method string, route string, handler http.Handler, middlewares ...func(http.Handler) http.Handler) error { + text := "[" + method + "]" + + for len(text) < desiredSpaces { + text = text + " " + } + + fmt.Printf("Registered Route: %s%s\n", text, route) + return nil + } + + if err := chi.Walk(r, walkFunc); err != nil { + fmt.Printf("Logging err: %s\n", err.Error()) + } +} diff --git a/backend/app/api/seed.go b/backend/app/api/seed.go new file mode 100644 index 0000000..3fbea74 --- /dev/null +++ b/backend/app/api/seed.go @@ -0,0 +1,98 @@ +package main + +import ( + "context" + + "github.com/google/uuid" + "github.com/hay-kot/git-web-template/backend/internal/repo" + "github.com/hay-kot/git-web-template/backend/internal/types" + "github.com/hay-kot/git-web-template/backend/pkgs/hasher" + "github.com/hay-kot/git-web-template/backend/pkgs/logger" +) + +const ( + DefaultName = "Admin" + DefaultEmail = "admin@admin.com" + DefaultPassword = "admin" +) + +// EnsureAdministrator ensures that there is at least one superuser in the database +// if one isn't found a default is generate using the default credentials +func (a *app) EnsureAdministrator() { + superusers, err := a.repos.Users.GetSuperusers(context.Background()) + + if err != nil { + a.logger.Error(err, nil) + } + + if len(superusers) > 0 { + return + } + + pw, _ := hasher.HashPassword(DefaultPassword) + + newSuperUser := types.UserCreate{ + Name: DefaultName, + Email: DefaultEmail, + IsSuperuser: true, + Password: pw, + } + + a.logger.Info("creating default superuser", logger.Props{ + "name": newSuperUser.Name, + "email": newSuperUser.Email, + }) + + _, err = a.repos.Users.Create(context.Background(), newSuperUser) + + if err != nil { + a.logger.Fatal(err, nil) + } + +} + +func (a *app) SeedDatabase(repos *repo.AllRepos) { + if !a.conf.Seed.Enabled { + return + } + + for _, user := range a.conf.Seed.Users { + + // Check if User Exists + usr, _ := repos.Users.GetOneEmail(context.Background(), user.Email) + + if usr.ID != uuid.Nil { + a.logger.Info("seed user already exists", logger.Props{ + "user": user.Name, + }) + continue + } + + hashedPw, err := hasher.HashPassword(user.Password) + + if err != nil { + a.logger.Error(err, logger.Props{ + "details": "failed to hash password", + "user": user.Name, + }) + } + + _, err = repos.Users.Create(context.Background(), types.UserCreate{ + Name: user.Name, + Email: user.Email, + IsSuperuser: user.IsSuperuser, + Password: hashedPw, + }) + + if err != nil { + a.logger.Error(err, logger.Props{ + "details": "failed to create seed user", + "name": user.Name, + }) + } + + a.logger.Info("creating seed user", logger.Props{ + "name": user.Name, + }) + } +} diff --git a/backend/app/api/v1/controller.go b/backend/app/api/v1/controller.go new file mode 100644 index 0000000..2d13045 --- /dev/null +++ b/backend/app/api/v1/controller.go @@ -0,0 +1,29 @@ +package v1 + +import ( + "github.com/hay-kot/git-web-template/backend/internal/services" + "github.com/hay-kot/git-web-template/backend/pkgs/logger" +) + +type V1Controller struct { + log *logger.Logger + svc *services.AllServices +} + +func BaseUrlFunc(prefix string) func(s string) string { + v1Base := prefix + "/v1" + prefixFunc := func(s string) string { + return v1Base + s + } + + return prefixFunc +} + +func NewControllerV1(log *logger.Logger, svc *services.AllServices) *V1Controller { + ctrl := &V1Controller{ + log: log, + svc: svc, + } + + return ctrl +} diff --git a/backend/app/api/v1/controller_test.go b/backend/app/api/v1/controller_test.go new file mode 100644 index 0000000..685d8c6 --- /dev/null +++ b/backend/app/api/v1/controller_test.go @@ -0,0 +1,20 @@ +package v1 + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_NewHandlerV1(t *testing.T) { + + v1Base := BaseUrlFunc("/testing/v1") + ctrl := NewControllerV1(mockHandler.log, mockHandler.svc) + + assert.NotNil(t, ctrl) + + assert.Equal(t, ctrl.log, mockHandler.log) + + assert.Equal(t, "/testing/v1/v1/abc123", v1Base("/abc123")) + assert.Equal(t, "/testing/v1/v1/abc123", v1Base("/abc123")) +} diff --git a/backend/app/api/v1/main_test.go b/backend/app/api/v1/main_test.go new file mode 100644 index 0000000..c9a1276 --- /dev/null +++ b/backend/app/api/v1/main_test.go @@ -0,0 +1,51 @@ +package v1 + +import ( + "context" + "testing" + + "github.com/hay-kot/git-web-template/backend/internal/mocks" + "github.com/hay-kot/git-web-template/backend/internal/mocks/factories" + "github.com/hay-kot/git-web-template/backend/internal/types" +) + +var mockHandler = &V1Controller{} +var users = []types.UserOut{} + +func userPool() func() { + create := []types.UserCreate{ + factories.UserFactory(), + factories.UserFactory(), + factories.UserFactory(), + factories.UserFactory(), + } + + userOut := []types.UserOut{} + + for _, user := range create { + usrOut, _ := mockHandler.svc.Admin.Create(context.Background(), user) + userOut = append(userOut, usrOut) + } + + users = userOut + + purge := func() { + mockHandler.svc.Admin.DeleteAll(context.Background()) + } + + return purge +} + +func TestMain(m *testing.M) { + // Set Handler Vars + mockHandler.log = mocks.GetStructLogger() + repos, closeDb := mocks.GetEntRepos() + mockHandler.svc = mocks.GetMockServices(repos) + + defer closeDb() + + purge := userPool() + defer purge() + + m.Run() +} diff --git a/backend/app/api/v1/v1_ctrl_admin.go b/backend/app/api/v1/v1_ctrl_admin.go new file mode 100644 index 0000000..81afd43 --- /dev/null +++ b/backend/app/api/v1/v1_ctrl_admin.go @@ -0,0 +1,207 @@ +package v1 + +import ( + "errors" + "net/http" + + "github.com/go-chi/chi/v5" + "github.com/google/uuid" + "github.com/hay-kot/git-web-template/backend/internal/services" + "github.com/hay-kot/git-web-template/backend/internal/types" + "github.com/hay-kot/git-web-template/backend/pkgs/hasher" + "github.com/hay-kot/git-web-template/backend/pkgs/logger" + "github.com/hay-kot/git-web-template/backend/pkgs/server" +) + +// HandleAdminUserGetAll godoc +// @Summary Gets all users from the database +// @Tags Admin: Users +// @Produce json +// @Success 200 {object} server.Result{item=[]types.UserOut} +// @Router /v1/admin/users [get] +// @Security Bearer +func (ctrl *V1Controller) HandleAdminUserGetAll() http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + users, err := ctrl.svc.Admin.GetAll(r.Context()) + + if err != nil { + server.RespondError(w, http.StatusInternalServerError, err) + return + } + + server.Respond(w, http.StatusOK, server.Wrap(users)) + } +} + +// HandleAdminUserGet godoc +// @Summary Get a user from the database +// @Tags Admin: Users +// @Produce json +// @Param id path string true "User ID" +// @Success 200 {object} server.Result{item=types.UserOut} +// @Router /v1/admin/users/{id} [get] +// @Security Bearer +func (ctrl *V1Controller) HandleAdminUserGet() http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + uid, err := uuid.Parse(chi.URLParam(r, "id")) + + if err != nil { + ctrl.log.Debug(err.Error(), logger.Props{ + "scope": "admin", + "details": "failed to convert id to valid UUID", + }) + server.RespondError(w, http.StatusBadRequest, err) + return + } + + user, err := ctrl.svc.Admin.GetByID(r.Context(), uid) + + if err != nil { + ctrl.log.Error(err, nil) + server.RespondError(w, http.StatusInternalServerError, err) + return + } + server.Respond(w, http.StatusOK, server.Wrap(user)) + + } +} + +// HandleAdminUserCreate godoc +// @Summary Create a new user +// @Tags Admin: Users +// @Produce json +// @Param payload body types.UserCreate true "User Data" +// @Success 200 {object} server.Result{item=types.UserOut} +// @Router /v1/admin/users [POST] +// @Security Bearer +func (ctrl *V1Controller) HandleAdminUserCreate() http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + createData := types.UserCreate{} + + if err := server.Decode(r, &createData); err != nil { + ctrl.log.Error(err, logger.Props{ + "scope": "admin", + "details": "failed to decode user create data", + }) + server.RespondError(w, http.StatusBadRequest, err) + return + } + + err := createData.Validate() + + if err != nil { + server.RespondError(w, http.StatusUnprocessableEntity, err) + return + } + + hashedPw, err := hasher.HashPassword(createData.Password) + + if err != nil { + ctrl.log.Error(err, logger.Props{ + "scope": "admin", + "details": "failed to hash password", + }) + + server.RespondError(w, http.StatusInternalServerError, err) + return + } + + createData.Password = hashedPw + userOut, err := ctrl.svc.Admin.Create(r.Context(), createData) + + if err != nil { + ctrl.log.Error(err, logger.Props{ + "scope": "admin", + "details": "failed to create user", + }) + + server.RespondError(w, http.StatusInternalServerError, err) + return + } + + server.Respond(w, http.StatusCreated, server.Wrap(userOut)) + } +} + +// HandleAdminUserUpdate godoc +// @Summary Update a User +// @Tags Admin: Users +// @Param id path string true "User ID" +// @Param payload body types.UserUpdate true "User Data" +// @Produce json +// @Success 200 {object} server.Result{item=types.UserOut} +// @Router /v1/admin/users/{id} [PUT] +// @Security Bearer +func (ctrl *V1Controller) HandleAdminUserUpdate() http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + uid, err := uuid.Parse(chi.URLParam(r, "id")) + if err != nil { + ctrl.log.Debug(err.Error(), logger.Props{ + "scope": "admin", + "details": "failed to convert id to valid UUID", + }) + } + + updateData := types.UserUpdate{} + + if err := server.Decode(r, &updateData); err != nil { + ctrl.log.Error(err, logger.Props{ + "scope": "admin", + "details": "failed to decode user update data", + }) + server.RespondError(w, http.StatusBadRequest, err) + return + } + + newData, err := ctrl.svc.Admin.UpdateProperties(r.Context(), uid, updateData) + + if err != nil { + ctrl.log.Error(err, logger.Props{ + "scope": "admin", + "details": "failed to update user", + }) + server.RespondError(w, http.StatusInternalServerError, err) + return + } + + server.Respond(w, http.StatusOK, server.Wrap(newData)) + } +} + +// HandleAdminUserDelete godoc +// @Summary Delete a User +// @Tags Admin: Users +// @Param id path string true "User ID" +// @Produce json +// @Success 204 +// @Router /v1/admin/users/{id} [DELETE] +// @Security Bearer +func (ctrl *V1Controller) HandleAdminUserDelete() http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + uid, err := uuid.Parse(chi.URLParam(r, "id")) + if err != nil { + ctrl.log.Debug(err.Error(), logger.Props{ + "scope": "admin", + "details": "failed to convert id to valid UUID", + }) + } + + actor := services.UseUserCtx(r.Context()) + + if actor.ID == uid { + server.RespondError(w, http.StatusBadRequest, errors.New("cannot delete yourself")) + return + } + + err = ctrl.svc.Admin.Delete(r.Context(), uid) + + if err != nil { + ctrl.log.Error(err, logger.Props{ + "scope": "admin", + "details": "failed to delete user", + }) + server.RespondError(w, http.StatusInternalServerError, err) + return + } + } +} diff --git a/backend/app/api/v1/v1_ctrl_admin_test.go b/backend/app/api/v1/v1_ctrl_admin_test.go new file mode 100644 index 0000000..c0066c7 --- /dev/null +++ b/backend/app/api/v1/v1_ctrl_admin_test.go @@ -0,0 +1,109 @@ +package v1 + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "net/http" + "net/http/httptest" + "testing" + + "github.com/hay-kot/git-web-template/backend/internal/mocks/chimocker" + "github.com/hay-kot/git-web-template/backend/internal/mocks/factories" + "github.com/hay-kot/git-web-template/backend/internal/types" + "github.com/hay-kot/git-web-template/backend/pkgs/server" + "github.com/stretchr/testify/assert" +) + +const ( + UrlUser = "/api/v1/admin/users" + UrlUserId = "/api/v1/admin/users/%v" + UrlUserIdChi = "/api/v1/admin/users/{id}" +) + +type usersResponse struct { + Users []types.UserOut `json:"item"` +} + +type userResponse struct { + User types.UserOut `json:"item"` +} + +func Test_HandleAdminUserGetAll_Success(t *testing.T) { + r := httptest.NewRecorder() + req := httptest.NewRequest(http.MethodGet, UrlUser, nil) + + mockHandler.HandleAdminUserGetAll()(r, req) + + response := usersResponse{ + Users: []types.UserOut{}, + } + + _ = json.Unmarshal(r.Body.Bytes(), &response) + assert.Equal(t, http.StatusOK, r.Code) + assert.Equal(t, len(users), len(response.Users)) + + knowEmail := []string{ + users[0].Email, + users[1].Email, + users[2].Email, + users[3].Email, + } + + for _, user := range users { + assert.Contains(t, knowEmail, user.Email) + } + +} + +func Test_HandleAdminUserGet_Success(t *testing.T) { + targetUser := users[2] + res := httptest.NewRecorder() + req := httptest.NewRequest(http.MethodGet, fmt.Sprintf(UrlUserId, targetUser.ID), nil) + + req = chimocker.WithUrlParam(req, "id", fmt.Sprintf("%v", targetUser.ID)) + + mockHandler.HandleAdminUserGet()(res, req) + assert.Equal(t, http.StatusOK, res.Code) + + response := userResponse{ + User: types.UserOut{}, + } + + _ = json.Unmarshal(res.Body.Bytes(), &response) + assert.Equal(t, targetUser.ID, response.User.ID) +} + +func Test_HandleAdminUserCreate_Success(t *testing.T) { + payload := factories.UserFactory() + + r := httptest.NewRecorder() + + body, err := json.Marshal(payload) + assert.NoError(t, err) + + req := httptest.NewRequest(http.MethodGet, UrlUser, bytes.NewBuffer(body)) + req.Header.Set(server.ContentType, server.ContentJSON) + + mockHandler.HandleAdminUserCreate()(r, req) + + assert.Equal(t, http.StatusCreated, r.Code) + + usr, err := mockHandler.svc.Admin.GetByEmail(context.Background(), payload.Email) + + assert.NoError(t, err) + assert.Equal(t, payload.Email, usr.Email) + assert.Equal(t, payload.Name, usr.Name) + assert.NotEqual(t, payload.Password, usr.Password) // smoke test - check password is hashed + + _ = mockHandler.svc.Admin.Delete(context.Background(), usr.ID) +} + +func Test_HandleAdminUserUpdate_Success(t *testing.T) { + t.Skip() +} + +func Test_HandleAdminUserUpdate_Delete(t *testing.T) { + t.Skip() +} diff --git a/backend/app/api/v1/v1_ctrl_auth.go b/backend/app/api/v1/v1_ctrl_auth.go new file mode 100644 index 0000000..f204e1e --- /dev/null +++ b/backend/app/api/v1/v1_ctrl_auth.go @@ -0,0 +1,136 @@ +package v1 + +import ( + "errors" + "net/http" + + "github.com/hay-kot/git-web-template/backend/internal/services" + "github.com/hay-kot/git-web-template/backend/internal/types" + "github.com/hay-kot/git-web-template/backend/pkgs/logger" + "github.com/hay-kot/git-web-template/backend/pkgs/server" +) + +var ( + HeaderFormData = "application/x-www-form-urlencoded" + HeaderJSON = "application/json" +) + +// HandleAuthLogin godoc +// @Summary User Login +// @Tags Authentication +// @Accept x-www-form-urlencoded +// @Accept application/json +// @Param username formData string false "string" example(admin@admin.com) +// @Param password formData string false "string" example(admin) +// @Produce json +// @Success 200 {object} types.TokenResponse +// @Router /v1/users/login [POST] +func (ctrl *V1Controller) HandleAuthLogin() http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + loginForm := &types.LoginForm{} + + if r.Header.Get("Content-Type") == HeaderFormData { + err := r.ParseForm() + + if err != nil { + server.Respond(w, http.StatusBadRequest, server.Wrap(err)) + return + } + + loginForm.Username = r.PostFormValue("username") + loginForm.Password = r.PostFormValue("password") + } else if r.Header.Get("Content-Type") == HeaderJSON { + err := server.Decode(r, loginForm) + + if err != nil { + server.Respond(w, http.StatusBadRequest, server.Wrap(err)) + return + } + } else { + server.Respond(w, http.StatusBadRequest, errors.New("invalid content type")) + return + } + + if loginForm.Username == "" || loginForm.Password == "" { + server.RespondError(w, http.StatusBadRequest, errors.New("username and password are required")) + return + } + + newToken, err := ctrl.svc.User.Login(r.Context(), loginForm.Username, loginForm.Password) + + if err != nil { + server.RespondError(w, http.StatusUnauthorized, err) + return + } + + err = server.Respond(w, http.StatusOK, types.TokenResponse{ + BearerToken: "Bearer " + newToken.Raw, + ExpiresAt: newToken.ExpiresAt, + }) + + if err != nil { + ctrl.log.Error(err, logger.Props{ + "user": loginForm.Username, + }) + return + } + } +} + +// HandleAuthLogout godoc +// @Summary User Logout +// @Tags Authentication +// @Success 204 +// @Router /v1/users/logout [POST] +// @Security Bearer +func (ctrl *V1Controller) HandleAuthLogout() http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + token := services.UseTokenCtx(r.Context()) + + if token == "" { + server.RespondError(w, http.StatusUnauthorized, errors.New("no token within request context")) + return + } + + err := ctrl.svc.User.Logout(r.Context(), token) + + if err != nil { + server.RespondError(w, http.StatusInternalServerError, err) + return + } + + err = server.Respond(w, http.StatusNoContent, nil) + } +} + +// HandleAuthLogout godoc +// @Summary User Token Refresh +// @Description handleAuthRefresh returns a handler that will issue a new token from an existing token. +// @Description This does not validate that the user still exists within the database. +// @Tags Authentication +// @Success 200 +// @Router /v1/users/refresh [GET] +// @Security Bearer +func (ctrl *V1Controller) HandleAuthRefresh() http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + requestToken := services.UseTokenCtx(r.Context()) + + if requestToken == "" { + server.RespondError(w, http.StatusUnauthorized, errors.New("no user token found")) + return + } + + newToken, err := ctrl.svc.User.RenewToken(r.Context(), requestToken) + + if err != nil { + server.RespondUnauthorized(w) + return + } + + err = server.Respond(w, http.StatusOK, newToken) + + if err != nil { + return + } + } +} diff --git a/backend/app/api/v1/v1_ctrl_user.go b/backend/app/api/v1/v1_ctrl_user.go new file mode 100644 index 0000000..aed64b6 --- /dev/null +++ b/backend/app/api/v1/v1_ctrl_user.go @@ -0,0 +1,80 @@ +package v1 + +import ( + "errors" + "net/http" + + "github.com/hay-kot/git-web-template/backend/internal/services" + "github.com/hay-kot/git-web-template/backend/internal/types" + "github.com/hay-kot/git-web-template/backend/pkgs/logger" + "github.com/hay-kot/git-web-template/backend/pkgs/server" +) + +// HandleUserSelf godoc +// @Summary Get the current user +// @Tags User +// @Produce json +// @Success 200 {object} server.Result{item=types.UserOut} +// @Router /v1/users/self [GET] +// @Security Bearer +func (ctrl *V1Controller) HandleUserSelf() http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + token := services.UseTokenCtx(r.Context()) + usr, err := ctrl.svc.User.GetSelf(r.Context(), token) + if usr.IsNull() || err != nil { + ctrl.log.Error(errors.New("no user within request context"), nil) + server.RespondInternalServerError(w) + return + } + + _ = server.Respond(w, http.StatusOK, server.Wrap(usr)) + } +} + +// HandleUserUpdate godoc +// @Summary Update the current user +// @Tags User +// @Produce json +// @Param payload body types.UserUpdate true "User Data" +// @Success 200 {object} server.Result{item=types.UserUpdate} +// @Router /v1/users/self [PUT] +// @Security Bearer +func (ctrl *V1Controller) HandleUserUpdate() http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + updateData := types.UserUpdate{} + if err := server.Decode(r, &updateData); err != nil { + ctrl.log.Error(err, logger.Props{ + "scope": "user", + "details": "failed to decode user update data", + }) + server.RespondError(w, http.StatusBadRequest, err) + return + } + + actor := services.UseUserCtx(r.Context()) + newData, err := ctrl.svc.User.UpdateSelf(r.Context(), actor.ID, updateData) + + if err != nil { + ctrl.log.Error(err, logger.Props{ + "scope": "user", + "details": "failed to update user", + }) + server.RespondError(w, http.StatusInternalServerError, err) + return + } + + _ = server.Respond(w, http.StatusOK, server.Wrap(newData)) + } +} + +// HandleUserUpdatePassword godoc +// @Summary Update the current user's password // TODO: +// @Tags User +// @Produce json +// @Success 204 +// @Router /v1/users/self/password [PUT] +// @Security Bearer +func (ctrl *V1Controller) HandleUserUpdatePassword() http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + } +} diff --git a/backend/app/cli/app.go b/backend/app/cli/app.go new file mode 100644 index 0000000..dd31ed9 --- /dev/null +++ b/backend/app/cli/app.go @@ -0,0 +1,9 @@ +package main + +import ( + "github.com/hay-kot/git-web-template/backend/internal/repo" +) + +type app struct { + repos *repo.AllRepos +} diff --git a/backend/app/cli/app_users.go b/backend/app/cli/app_users.go new file mode 100644 index 0000000..c13ac29 --- /dev/null +++ b/backend/app/cli/app_users.go @@ -0,0 +1,105 @@ +package main + +import ( + "context" + "fmt" + "os" + "text/tabwriter" + + "github.com/google/uuid" + "github.com/hay-kot/git-web-template/backend/app/cli/reader" + "github.com/hay-kot/git-web-template/backend/internal/types" + "github.com/hay-kot/git-web-template/backend/pkgs/hasher" + "github.com/urfave/cli/v2" +) + +func (a *app) UserCreate(c *cli.Context) error { + var defaultValidators = []reader.StringValidator{ + reader.StringRequired, + reader.StringNoLeadingOrTrailingWhitespace, + } + // Get Flags + name := reader.ReadString("Name: ", + defaultValidators..., + ) + password := reader.ReadString("Password: ", + defaultValidators..., + ) + + email := reader.ReadString("Email: ", + reader.StringRequired, + reader.StringNoLeadingOrTrailingWhitespace, + reader.StringContainsAt, + ) + isSuper := reader.ReadBool("Is Superuser?") + + pwHash, err := hasher.HashPassword(password) + if err != nil { + return err + } + + usr := types.UserCreate{ + Name: name, + Email: email, + Password: pwHash, + IsSuperuser: isSuper, + } + + _, err = a.repos.Users.Create(context.Background(), usr) + + if err == nil { + fmt.Println("Super user created") + } + return err +} + +func (a *app) UserDelete(c *cli.Context) error { + // Get Flags + id := c.String("id") + uid := uuid.MustParse(id) + + fmt.Printf("Deleting user with id: %s\n", id) + + // Confirm Action + fmt.Printf("Are you sure you want to delete this user? (y/n) ") + var answer string + _, err := fmt.Scanln(&answer) + if answer != "y" || err != nil { + fmt.Println("Aborting") + return nil + } + + err = a.repos.Users.Delete(context.Background(), uid) + + if err == nil { + fmt.Printf("%v User(s) deleted (id=%v)\n", 1, id) + } + return err +} + +func (a *app) UserList(c *cli.Context) error { + fmt.Println("Superuser List") + + users, err := a.repos.Users.GetAll(context.Background()) + + if err != nil { + return err + } + + tabWriter := tabwriter.NewWriter(os.Stdout, 0, 0, 2, ' ', 0) + defer func(tabWriter *tabwriter.Writer) { + _ = tabWriter.Flush() + }(tabWriter) + + _, err = fmt.Fprintln(tabWriter, "Id\tName\tEmail\tIsSuper") + + if err != nil { + return err + } + + for _, u := range users { + _, _ = fmt.Fprintf(tabWriter, "%v\t%s\t%s\t%v\n", u.ID, u.Name, u.Email, u.IsSuperuser) + } + + return nil +} diff --git a/backend/app/cli/main.go b/backend/app/cli/main.go new file mode 100644 index 0000000..d778b1f --- /dev/null +++ b/backend/app/cli/main.go @@ -0,0 +1,82 @@ +package main + +import ( + "context" + "log" + "os" + + "github.com/hay-kot/git-web-template/backend/ent" + "github.com/hay-kot/git-web-template/backend/internal/config" + "github.com/hay-kot/git-web-template/backend/internal/repo" + _ "github.com/mattn/go-sqlite3" + + "github.com/urfave/cli/v2" +) + +func main() { + cfg, err := config.NewConfig("config.yml") + + if err != nil { + panic(err) + } + + if err := run(cfg); err != nil { + log.Fatal(err) + } +} + +func run(cfg *config.Config) error { + // ========================================================================= + // Initialize Database + c, err := ent.Open(cfg.Database.GetDriver(), cfg.Database.GetUrl()) + if err != nil { + log.Fatalf("failed opening connection to sqlite: %v", err) + } + defer func(c *ent.Client) { + _ = c.Close() + }(c) + if err := c.Schema.Create(context.Background()); err != nil { + log.Fatalf("failed creating schema resources: %v", err) + } + + // Create App + a := &app{ + repos: repo.EntAllRepos(c), + } + + app := &cli.App{ + Commands: []*cli.Command{ + { + Name: "users", + Aliases: []string{"u"}, + Usage: "options to manage users", + Subcommands: []*cli.Command{ + { + Name: "list", + Usage: "list users in database", + Action: a.UserList, + }, + { + Name: "add", + Usage: "add a new user", + Action: a.UserCreate, + }, + { + Name: "delete", + Usage: "delete user in database", + Action: a.UserDelete, + Flags: []cli.Flag{ + &cli.IntFlag{ + Name: "id", + Usage: "name of the user to add", + Required: true, + }, + }, + }, + }, + }, + }, + } + + return app.Run(os.Args) +} diff --git a/backend/app/cli/reader/reader.go b/backend/app/cli/reader/reader.go new file mode 100644 index 0000000..a8cc92c --- /dev/null +++ b/backend/app/cli/reader/reader.go @@ -0,0 +1,65 @@ +package reader + +import "fmt" + +type StringValidator func(s string) bool + +func StringRequired(s string) bool { + return s != "" +} + +func StringNoLeadingOrTrailingWhitespace(s string) bool { + return s != "" && len(s) > 0 && s[0] != ' ' && s[len(s)-1] != ' ' +} + +func StringContainsAt(s string) bool { + for _, c := range s { + if c == '@' { + return true + } + } + return false +} + +func ReadString(message string, sv ...StringValidator) string { + for { + fmt.Print(message) + var input string + fmt.Scanln(&input) + + if len(sv) == 0 { + return input + } + + isValid := true + for _, validator := range sv { + if !validator(input) { + isValid = false + fmt.Println("Invalid input") + continue + } + + } + + if isValid { + return input + } + + } +} + +func ReadBool(message string) bool { + for { + fmt.Print(message + " (y/n) ") + var input string + fmt.Scanln(&input) + + if input == "y" { + return true + } else if input == "n" { + return false + } else { + fmt.Println("Invalid input") + } + } +} diff --git a/backend/app/generator/main.go b/backend/app/generator/main.go new file mode 100644 index 0000000..d1d6efb --- /dev/null +++ b/backend/app/generator/main.go @@ -0,0 +1,72 @@ +package main + +import ( + "time" + + "github.com/google/uuid" + "github.com/hay-kot/git-web-template/backend/ent" + "github.com/hay-kot/git-web-template/backend/internal/types" + "github.com/hay-kot/git-web-template/backend/pkgs/automapper" + "github.com/tkrajina/typescriptify-golang-structs/typescriptify" +) + +// generateMappers serialized the config file into a list of automapper struct +func generateMappers() []automapper.AutoMapper { + return []automapper.AutoMapper{ + { + Package: "mapper", + Prefix: "users", + Name: "User Out", + Schema: automapper.Schema{ + Type: types.UserOut{}, + Prefix: "types", + }, + Model: automapper.Model{ + Type: ent.User{}, + Prefix: "ent", + }, + Imports: []string{}, + }, + } +} + +func generateTypeScript() { + // Configuration + converter := typescriptify.New() + converter.CreateInterface = true + converter.ManageType(uuid.UUID{}, typescriptify.TypeOptions{TSType: "string"}) + converter.ManageType(time.Time{}, typescriptify.TypeOptions{TSType: "Date", TSTransform: "new Date(__VALUE__)"}) + + // General + public := []any{ + // Base Types + types.ApiSummary{}, + + // User Types + types.UserOut{}, + types.UserCreate{}, + types.UserIn{}, + types.UserUpdate{}, + + // Auth Types + types.LoginForm{}, + types.TokenResponse{}, + } + + for i := 0; i < len(public); i++ { + converter.Add(public[i]) + } + + // Creation + converter.ConvertToFile("./generated-types.ts") + +} + +func main() { + automappers := generateMappers() + conf := automapper.DefaultConf() + + automapper.Generate(automappers, conf) + + generateTypeScript() +} diff --git a/backend/config.template.yml b/backend/config.template.yml new file mode 100644 index 0000000..0dc2626 --- /dev/null +++ b/backend/config.template.yml @@ -0,0 +1,31 @@ +--- +mode: development +swagger: + host: localhost:7745 + scheme: http +web: + port: 3915 + host: 127.0.0.1 +database: + driver: sqlite3 + sqlite-url: ./ent.db?_fk=1 +logger: + level: debug + file: api.log +mailer: + host: smtp.example.com + port: 465 + username: + password: + from: example@email.com +seed: + enabled: true + users: + - name: Admin + email: admin@admin.com + password: admin + isSuperuser: true + - name: User + email: user@user.com + password: user + isSuperuser: false diff --git a/backend/docker-compose.yml b/backend/docker-compose.yml new file mode 100644 index 0000000..298153c --- /dev/null +++ b/backend/docker-compose.yml @@ -0,0 +1,10 @@ +version: "3.4" + +services: + gowebtemplate: + image: gowebtemplate + build: + context: . + dockerfile: ./Dockerfile + ports: + - 3001:7745 diff --git a/backend/ent/authtokens.go b/backend/ent/authtokens.go new file mode 100644 index 0000000..ecf611c --- /dev/null +++ b/backend/ent/authtokens.go @@ -0,0 +1,165 @@ +// Code generated by entc, DO NOT EDIT. + +package ent + +import ( + "fmt" + "strings" + "time" + + "entgo.io/ent/dialect/sql" + "github.com/google/uuid" + "github.com/hay-kot/git-web-template/backend/ent/authtokens" + "github.com/hay-kot/git-web-template/backend/ent/user" +) + +// AuthTokens is the model entity for the AuthTokens schema. +type AuthTokens struct { + config `json:"-"` + // ID of the ent. + ID int `json:"id,omitempty"` + // Token holds the value of the "token" field. + Token []byte `json:"token,omitempty"` + // ExpiresAt holds the value of the "expires_at" field. + ExpiresAt time.Time `json:"expires_at,omitempty"` + // CreatedAt holds the value of the "created_at" field. + CreatedAt time.Time `json:"created_at,omitempty"` + // Edges holds the relations/edges for other nodes in the graph. + // The values are being populated by the AuthTokensQuery when eager-loading is set. + Edges AuthTokensEdges `json:"edges"` + user_auth_tokens *uuid.UUID +} + +// AuthTokensEdges holds the relations/edges for other nodes in the graph. +type AuthTokensEdges struct { + // User holds the value of the user edge. + User *User `json:"user,omitempty"` + // loadedTypes holds the information for reporting if a + // type was loaded (or requested) in eager-loading or not. + loadedTypes [1]bool +} + +// UserOrErr returns the User value or an error if the edge +// was not loaded in eager-loading, or loaded but was not found. +func (e AuthTokensEdges) UserOrErr() (*User, error) { + if e.loadedTypes[0] { + if e.User == nil { + // The edge user was loaded in eager-loading, + // but was not found. + return nil, &NotFoundError{label: user.Label} + } + return e.User, nil + } + return nil, &NotLoadedError{edge: "user"} +} + +// scanValues returns the types for scanning values from sql.Rows. +func (*AuthTokens) scanValues(columns []string) ([]interface{}, error) { + values := make([]interface{}, len(columns)) + for i := range columns { + switch columns[i] { + case authtokens.FieldToken: + values[i] = new([]byte) + case authtokens.FieldID: + values[i] = new(sql.NullInt64) + case authtokens.FieldExpiresAt, authtokens.FieldCreatedAt: + values[i] = new(sql.NullTime) + case authtokens.ForeignKeys[0]: // user_auth_tokens + values[i] = &sql.NullScanner{S: new(uuid.UUID)} + default: + return nil, fmt.Errorf("unexpected column %q for type AuthTokens", columns[i]) + } + } + return values, nil +} + +// assignValues assigns the values that were returned from sql.Rows (after scanning) +// to the AuthTokens fields. +func (at *AuthTokens) assignValues(columns []string, values []interface{}) error { + if m, n := len(values), len(columns); m < n { + return fmt.Errorf("mismatch number of scan values: %d != %d", m, n) + } + for i := range columns { + switch columns[i] { + case authtokens.FieldID: + value, ok := values[i].(*sql.NullInt64) + if !ok { + return fmt.Errorf("unexpected type %T for field id", value) + } + at.ID = int(value.Int64) + case authtokens.FieldToken: + if value, ok := values[i].(*[]byte); !ok { + return fmt.Errorf("unexpected type %T for field token", values[i]) + } else if value != nil { + at.Token = *value + } + case authtokens.FieldExpiresAt: + if value, ok := values[i].(*sql.NullTime); !ok { + return fmt.Errorf("unexpected type %T for field expires_at", values[i]) + } else if value.Valid { + at.ExpiresAt = value.Time + } + case authtokens.FieldCreatedAt: + if value, ok := values[i].(*sql.NullTime); !ok { + return fmt.Errorf("unexpected type %T for field created_at", values[i]) + } else if value.Valid { + at.CreatedAt = value.Time + } + case authtokens.ForeignKeys[0]: + if value, ok := values[i].(*sql.NullScanner); !ok { + return fmt.Errorf("unexpected type %T for field user_auth_tokens", values[i]) + } else if value.Valid { + at.user_auth_tokens = new(uuid.UUID) + *at.user_auth_tokens = *value.S.(*uuid.UUID) + } + } + } + return nil +} + +// QueryUser queries the "user" edge of the AuthTokens entity. +func (at *AuthTokens) QueryUser() *UserQuery { + return (&AuthTokensClient{config: at.config}).QueryUser(at) +} + +// Update returns a builder for updating this AuthTokens. +// Note that you need to call AuthTokens.Unwrap() before calling this method if this AuthTokens +// was returned from a transaction, and the transaction was committed or rolled back. +func (at *AuthTokens) Update() *AuthTokensUpdateOne { + return (&AuthTokensClient{config: at.config}).UpdateOne(at) +} + +// Unwrap unwraps the AuthTokens entity that was returned from a transaction after it was closed, +// so that all future queries will be executed through the driver which created the transaction. +func (at *AuthTokens) Unwrap() *AuthTokens { + tx, ok := at.config.driver.(*txDriver) + if !ok { + panic("ent: AuthTokens is not a transactional entity") + } + at.config.driver = tx.drv + return at +} + +// String implements the fmt.Stringer. +func (at *AuthTokens) String() string { + var builder strings.Builder + builder.WriteString("AuthTokens(") + builder.WriteString(fmt.Sprintf("id=%v", at.ID)) + builder.WriteString(", token=") + builder.WriteString(fmt.Sprintf("%v", at.Token)) + builder.WriteString(", expires_at=") + builder.WriteString(at.ExpiresAt.Format(time.ANSIC)) + builder.WriteString(", created_at=") + builder.WriteString(at.CreatedAt.Format(time.ANSIC)) + builder.WriteByte(')') + return builder.String() +} + +// AuthTokensSlice is a parsable slice of AuthTokens. +type AuthTokensSlice []*AuthTokens + +func (at AuthTokensSlice) config(cfg config) { + for _i := range at { + at[_i].config = cfg + } +} diff --git a/backend/ent/authtokens/authtokens.go b/backend/ent/authtokens/authtokens.go new file mode 100644 index 0000000..5c10d3a --- /dev/null +++ b/backend/ent/authtokens/authtokens.go @@ -0,0 +1,67 @@ +// Code generated by entc, DO NOT EDIT. + +package authtokens + +import ( + "time" +) + +const ( + // Label holds the string label denoting the authtokens type in the database. + Label = "auth_tokens" + // FieldID holds the string denoting the id field in the database. + FieldID = "id" + // FieldToken holds the string denoting the token field in the database. + FieldToken = "token" + // FieldExpiresAt holds the string denoting the expires_at field in the database. + FieldExpiresAt = "expires_at" + // FieldCreatedAt holds the string denoting the created_at field in the database. + FieldCreatedAt = "created_at" + // EdgeUser holds the string denoting the user edge name in mutations. + EdgeUser = "user" + // Table holds the table name of the authtokens in the database. + Table = "auth_tokens" + // UserTable is the table that holds the user relation/edge. + UserTable = "auth_tokens" + // UserInverseTable is the table name for the User entity. + // It exists in this package in order to avoid circular dependency with the "user" package. + UserInverseTable = "users" + // UserColumn is the table column denoting the user relation/edge. + UserColumn = "user_auth_tokens" +) + +// Columns holds all SQL columns for authtokens fields. +var Columns = []string{ + FieldID, + FieldToken, + FieldExpiresAt, + FieldCreatedAt, +} + +// ForeignKeys holds the SQL foreign-keys that are owned by the "auth_tokens" +// table and are not defined as standalone fields in the schema. +var ForeignKeys = []string{ + "user_auth_tokens", +} + +// ValidColumn reports if the column name is valid (part of the table columns). +func ValidColumn(column string) bool { + for i := range Columns { + if column == Columns[i] { + return true + } + } + for i := range ForeignKeys { + if column == ForeignKeys[i] { + return true + } + } + return false +} + +var ( + // DefaultExpiresAt holds the default value on creation for the "expires_at" field. + DefaultExpiresAt func() time.Time + // DefaultCreatedAt holds the default value on creation for the "created_at" field. + DefaultCreatedAt func() time.Time +) diff --git a/backend/ent/authtokens/where.go b/backend/ent/authtokens/where.go new file mode 100644 index 0000000..5fda3f0 --- /dev/null +++ b/backend/ent/authtokens/where.go @@ -0,0 +1,403 @@ +// Code generated by entc, DO NOT EDIT. + +package authtokens + +import ( + "time" + + "entgo.io/ent/dialect/sql" + "entgo.io/ent/dialect/sql/sqlgraph" + "github.com/hay-kot/git-web-template/backend/ent/predicate" +) + +// ID filters vertices based on their ID field. +func ID(id int) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldID), id)) + }) +} + +// IDEQ applies the EQ predicate on the ID field. +func IDEQ(id int) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldID), id)) + }) +} + +// IDNEQ applies the NEQ predicate on the ID field. +func IDNEQ(id int) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldID), id)) + }) +} + +// IDIn applies the In predicate on the ID field. +func IDIn(ids ...int) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + // if not arguments were provided, append the FALSE constants, + // since we can't apply "IN ()". This will make this predicate falsy. + if len(ids) == 0 { + s.Where(sql.False()) + return + } + v := make([]interface{}, len(ids)) + for i := range v { + v[i] = ids[i] + } + s.Where(sql.In(s.C(FieldID), v...)) + }) +} + +// IDNotIn applies the NotIn predicate on the ID field. +func IDNotIn(ids ...int) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + // if not arguments were provided, append the FALSE constants, + // since we can't apply "IN ()". This will make this predicate falsy. + if len(ids) == 0 { + s.Where(sql.False()) + return + } + v := make([]interface{}, len(ids)) + for i := range v { + v[i] = ids[i] + } + s.Where(sql.NotIn(s.C(FieldID), v...)) + }) +} + +// IDGT applies the GT predicate on the ID field. +func IDGT(id int) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.GT(s.C(FieldID), id)) + }) +} + +// IDGTE applies the GTE predicate on the ID field. +func IDGTE(id int) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.GTE(s.C(FieldID), id)) + }) +} + +// IDLT applies the LT predicate on the ID field. +func IDLT(id int) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.LT(s.C(FieldID), id)) + }) +} + +// IDLTE applies the LTE predicate on the ID field. +func IDLTE(id int) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.LTE(s.C(FieldID), id)) + }) +} + +// Token applies equality check predicate on the "token" field. It's identical to TokenEQ. +func Token(v []byte) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldToken), v)) + }) +} + +// ExpiresAt applies equality check predicate on the "expires_at" field. It's identical to ExpiresAtEQ. +func ExpiresAt(v time.Time) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldExpiresAt), v)) + }) +} + +// CreatedAt applies equality check predicate on the "created_at" field. It's identical to CreatedAtEQ. +func CreatedAt(v time.Time) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldCreatedAt), v)) + }) +} + +// TokenEQ applies the EQ predicate on the "token" field. +func TokenEQ(v []byte) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldToken), v)) + }) +} + +// TokenNEQ applies the NEQ predicate on the "token" field. +func TokenNEQ(v []byte) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldToken), v)) + }) +} + +// TokenIn applies the In predicate on the "token" field. +func TokenIn(vs ...[]byte) predicate.AuthTokens { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.AuthTokens(func(s *sql.Selector) { + // if not arguments were provided, append the FALSE constants, + // since we can't apply "IN ()". This will make this predicate falsy. + if len(v) == 0 { + s.Where(sql.False()) + return + } + s.Where(sql.In(s.C(FieldToken), v...)) + }) +} + +// TokenNotIn applies the NotIn predicate on the "token" field. +func TokenNotIn(vs ...[]byte) predicate.AuthTokens { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.AuthTokens(func(s *sql.Selector) { + // if not arguments were provided, append the FALSE constants, + // since we can't apply "IN ()". This will make this predicate falsy. + if len(v) == 0 { + s.Where(sql.False()) + return + } + s.Where(sql.NotIn(s.C(FieldToken), v...)) + }) +} + +// TokenGT applies the GT predicate on the "token" field. +func TokenGT(v []byte) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.GT(s.C(FieldToken), v)) + }) +} + +// TokenGTE applies the GTE predicate on the "token" field. +func TokenGTE(v []byte) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.GTE(s.C(FieldToken), v)) + }) +} + +// TokenLT applies the LT predicate on the "token" field. +func TokenLT(v []byte) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.LT(s.C(FieldToken), v)) + }) +} + +// TokenLTE applies the LTE predicate on the "token" field. +func TokenLTE(v []byte) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.LTE(s.C(FieldToken), v)) + }) +} + +// ExpiresAtEQ applies the EQ predicate on the "expires_at" field. +func ExpiresAtEQ(v time.Time) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldExpiresAt), v)) + }) +} + +// ExpiresAtNEQ applies the NEQ predicate on the "expires_at" field. +func ExpiresAtNEQ(v time.Time) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldExpiresAt), v)) + }) +} + +// ExpiresAtIn applies the In predicate on the "expires_at" field. +func ExpiresAtIn(vs ...time.Time) predicate.AuthTokens { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.AuthTokens(func(s *sql.Selector) { + // if not arguments were provided, append the FALSE constants, + // since we can't apply "IN ()". This will make this predicate falsy. + if len(v) == 0 { + s.Where(sql.False()) + return + } + s.Where(sql.In(s.C(FieldExpiresAt), v...)) + }) +} + +// ExpiresAtNotIn applies the NotIn predicate on the "expires_at" field. +func ExpiresAtNotIn(vs ...time.Time) predicate.AuthTokens { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.AuthTokens(func(s *sql.Selector) { + // if not arguments were provided, append the FALSE constants, + // since we can't apply "IN ()". This will make this predicate falsy. + if len(v) == 0 { + s.Where(sql.False()) + return + } + s.Where(sql.NotIn(s.C(FieldExpiresAt), v...)) + }) +} + +// ExpiresAtGT applies the GT predicate on the "expires_at" field. +func ExpiresAtGT(v time.Time) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.GT(s.C(FieldExpiresAt), v)) + }) +} + +// ExpiresAtGTE applies the GTE predicate on the "expires_at" field. +func ExpiresAtGTE(v time.Time) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.GTE(s.C(FieldExpiresAt), v)) + }) +} + +// ExpiresAtLT applies the LT predicate on the "expires_at" field. +func ExpiresAtLT(v time.Time) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.LT(s.C(FieldExpiresAt), v)) + }) +} + +// ExpiresAtLTE applies the LTE predicate on the "expires_at" field. +func ExpiresAtLTE(v time.Time) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.LTE(s.C(FieldExpiresAt), v)) + }) +} + +// CreatedAtEQ applies the EQ predicate on the "created_at" field. +func CreatedAtEQ(v time.Time) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldCreatedAt), v)) + }) +} + +// CreatedAtNEQ applies the NEQ predicate on the "created_at" field. +func CreatedAtNEQ(v time.Time) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldCreatedAt), v)) + }) +} + +// CreatedAtIn applies the In predicate on the "created_at" field. +func CreatedAtIn(vs ...time.Time) predicate.AuthTokens { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.AuthTokens(func(s *sql.Selector) { + // if not arguments were provided, append the FALSE constants, + // since we can't apply "IN ()". This will make this predicate falsy. + if len(v) == 0 { + s.Where(sql.False()) + return + } + s.Where(sql.In(s.C(FieldCreatedAt), v...)) + }) +} + +// CreatedAtNotIn applies the NotIn predicate on the "created_at" field. +func CreatedAtNotIn(vs ...time.Time) predicate.AuthTokens { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.AuthTokens(func(s *sql.Selector) { + // if not arguments were provided, append the FALSE constants, + // since we can't apply "IN ()". This will make this predicate falsy. + if len(v) == 0 { + s.Where(sql.False()) + return + } + s.Where(sql.NotIn(s.C(FieldCreatedAt), v...)) + }) +} + +// CreatedAtGT applies the GT predicate on the "created_at" field. +func CreatedAtGT(v time.Time) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.GT(s.C(FieldCreatedAt), v)) + }) +} + +// CreatedAtGTE applies the GTE predicate on the "created_at" field. +func CreatedAtGTE(v time.Time) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.GTE(s.C(FieldCreatedAt), v)) + }) +} + +// CreatedAtLT applies the LT predicate on the "created_at" field. +func CreatedAtLT(v time.Time) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.LT(s.C(FieldCreatedAt), v)) + }) +} + +// CreatedAtLTE applies the LTE predicate on the "created_at" field. +func CreatedAtLTE(v time.Time) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.LTE(s.C(FieldCreatedAt), v)) + }) +} + +// HasUser applies the HasEdge predicate on the "user" edge. +func HasUser() predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + step := sqlgraph.NewStep( + sqlgraph.From(Table, FieldID), + sqlgraph.To(UserTable, FieldID), + sqlgraph.Edge(sqlgraph.M2O, true, UserTable, UserColumn), + ) + sqlgraph.HasNeighbors(s, step) + }) +} + +// HasUserWith applies the HasEdge predicate on the "user" edge with a given conditions (other predicates). +func HasUserWith(preds ...predicate.User) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + step := sqlgraph.NewStep( + sqlgraph.From(Table, FieldID), + sqlgraph.To(UserInverseTable, FieldID), + sqlgraph.Edge(sqlgraph.M2O, true, UserTable, UserColumn), + ) + sqlgraph.HasNeighborsWith(s, step, func(s *sql.Selector) { + for _, p := range preds { + p(s) + } + }) + }) +} + +// And groups predicates with the AND operator between them. +func And(predicates ...predicate.AuthTokens) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s1 := s.Clone().SetP(nil) + for _, p := range predicates { + p(s1) + } + s.Where(s1.P()) + }) +} + +// Or groups predicates with the OR operator between them. +func Or(predicates ...predicate.AuthTokens) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s1 := s.Clone().SetP(nil) + for i, p := range predicates { + if i > 0 { + s1.Or() + } + p(s1) + } + s.Where(s1.P()) + }) +} + +// Not applies the not operator on the given predicate. +func Not(p predicate.AuthTokens) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + p(s.Not()) + }) +} diff --git a/backend/ent/authtokens_create.go b/backend/ent/authtokens_create.go new file mode 100644 index 0000000..4cca125 --- /dev/null +++ b/backend/ent/authtokens_create.go @@ -0,0 +1,326 @@ +// Code generated by entc, DO NOT EDIT. + +package ent + +import ( + "context" + "errors" + "fmt" + "time" + + "entgo.io/ent/dialect/sql/sqlgraph" + "entgo.io/ent/schema/field" + "github.com/google/uuid" + "github.com/hay-kot/git-web-template/backend/ent/authtokens" + "github.com/hay-kot/git-web-template/backend/ent/user" +) + +// AuthTokensCreate is the builder for creating a AuthTokens entity. +type AuthTokensCreate struct { + config + mutation *AuthTokensMutation + hooks []Hook +} + +// SetToken sets the "token" field. +func (atc *AuthTokensCreate) SetToken(b []byte) *AuthTokensCreate { + atc.mutation.SetToken(b) + return atc +} + +// SetExpiresAt sets the "expires_at" field. +func (atc *AuthTokensCreate) SetExpiresAt(t time.Time) *AuthTokensCreate { + atc.mutation.SetExpiresAt(t) + return atc +} + +// SetNillableExpiresAt sets the "expires_at" field if the given value is not nil. +func (atc *AuthTokensCreate) SetNillableExpiresAt(t *time.Time) *AuthTokensCreate { + if t != nil { + atc.SetExpiresAt(*t) + } + return atc +} + +// SetCreatedAt sets the "created_at" field. +func (atc *AuthTokensCreate) SetCreatedAt(t time.Time) *AuthTokensCreate { + atc.mutation.SetCreatedAt(t) + return atc +} + +// SetNillableCreatedAt sets the "created_at" field if the given value is not nil. +func (atc *AuthTokensCreate) SetNillableCreatedAt(t *time.Time) *AuthTokensCreate { + if t != nil { + atc.SetCreatedAt(*t) + } + return atc +} + +// SetUserID sets the "user" edge to the User entity by ID. +func (atc *AuthTokensCreate) SetUserID(id uuid.UUID) *AuthTokensCreate { + atc.mutation.SetUserID(id) + return atc +} + +// SetNillableUserID sets the "user" edge to the User entity by ID if the given value is not nil. +func (atc *AuthTokensCreate) SetNillableUserID(id *uuid.UUID) *AuthTokensCreate { + if id != nil { + atc = atc.SetUserID(*id) + } + return atc +} + +// SetUser sets the "user" edge to the User entity. +func (atc *AuthTokensCreate) SetUser(u *User) *AuthTokensCreate { + return atc.SetUserID(u.ID) +} + +// Mutation returns the AuthTokensMutation object of the builder. +func (atc *AuthTokensCreate) Mutation() *AuthTokensMutation { + return atc.mutation +} + +// Save creates the AuthTokens in the database. +func (atc *AuthTokensCreate) Save(ctx context.Context) (*AuthTokens, error) { + var ( + err error + node *AuthTokens + ) + atc.defaults() + if len(atc.hooks) == 0 { + if err = atc.check(); err != nil { + return nil, err + } + node, err = atc.sqlSave(ctx) + } else { + var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) { + mutation, ok := m.(*AuthTokensMutation) + if !ok { + return nil, fmt.Errorf("unexpected mutation type %T", m) + } + if err = atc.check(); err != nil { + return nil, err + } + atc.mutation = mutation + if node, err = atc.sqlSave(ctx); err != nil { + return nil, err + } + mutation.id = &node.ID + mutation.done = true + return node, err + }) + for i := len(atc.hooks) - 1; i >= 0; i-- { + if atc.hooks[i] == nil { + return nil, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)") + } + mut = atc.hooks[i](mut) + } + if _, err := mut.Mutate(ctx, atc.mutation); err != nil { + return nil, err + } + } + return node, err +} + +// SaveX calls Save and panics if Save returns an error. +func (atc *AuthTokensCreate) SaveX(ctx context.Context) *AuthTokens { + v, err := atc.Save(ctx) + if err != nil { + panic(err) + } + return v +} + +// Exec executes the query. +func (atc *AuthTokensCreate) Exec(ctx context.Context) error { + _, err := atc.Save(ctx) + return err +} + +// ExecX is like Exec, but panics if an error occurs. +func (atc *AuthTokensCreate) ExecX(ctx context.Context) { + if err := atc.Exec(ctx); err != nil { + panic(err) + } +} + +// defaults sets the default values of the builder before save. +func (atc *AuthTokensCreate) defaults() { + if _, ok := atc.mutation.ExpiresAt(); !ok { + v := authtokens.DefaultExpiresAt() + atc.mutation.SetExpiresAt(v) + } + if _, ok := atc.mutation.CreatedAt(); !ok { + v := authtokens.DefaultCreatedAt() + atc.mutation.SetCreatedAt(v) + } +} + +// check runs all checks and user-defined validators on the builder. +func (atc *AuthTokensCreate) check() error { + if _, ok := atc.mutation.Token(); !ok { + return &ValidationError{Name: "token", err: errors.New(`ent: missing required field "AuthTokens.token"`)} + } + if _, ok := atc.mutation.ExpiresAt(); !ok { + return &ValidationError{Name: "expires_at", err: errors.New(`ent: missing required field "AuthTokens.expires_at"`)} + } + if _, ok := atc.mutation.CreatedAt(); !ok { + return &ValidationError{Name: "created_at", err: errors.New(`ent: missing required field "AuthTokens.created_at"`)} + } + return nil +} + +func (atc *AuthTokensCreate) sqlSave(ctx context.Context) (*AuthTokens, error) { + _node, _spec := atc.createSpec() + if err := sqlgraph.CreateNode(ctx, atc.driver, _spec); err != nil { + if sqlgraph.IsConstraintError(err) { + err = &ConstraintError{err.Error(), err} + } + return nil, err + } + id := _spec.ID.Value.(int64) + _node.ID = int(id) + return _node, nil +} + +func (atc *AuthTokensCreate) createSpec() (*AuthTokens, *sqlgraph.CreateSpec) { + var ( + _node = &AuthTokens{config: atc.config} + _spec = &sqlgraph.CreateSpec{ + Table: authtokens.Table, + ID: &sqlgraph.FieldSpec{ + Type: field.TypeInt, + Column: authtokens.FieldID, + }, + } + ) + if value, ok := atc.mutation.Token(); ok { + _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ + Type: field.TypeBytes, + Value: value, + Column: authtokens.FieldToken, + }) + _node.Token = value + } + if value, ok := atc.mutation.ExpiresAt(); ok { + _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ + Type: field.TypeTime, + Value: value, + Column: authtokens.FieldExpiresAt, + }) + _node.ExpiresAt = value + } + if value, ok := atc.mutation.CreatedAt(); ok { + _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ + Type: field.TypeTime, + Value: value, + Column: authtokens.FieldCreatedAt, + }) + _node.CreatedAt = value + } + if nodes := atc.mutation.UserIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2O, + Inverse: true, + Table: authtokens.UserTable, + Columns: []string{authtokens.UserColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: user.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _node.user_auth_tokens = &nodes[0] + _spec.Edges = append(_spec.Edges, edge) + } + return _node, _spec +} + +// AuthTokensCreateBulk is the builder for creating many AuthTokens entities in bulk. +type AuthTokensCreateBulk struct { + config + builders []*AuthTokensCreate +} + +// Save creates the AuthTokens entities in the database. +func (atcb *AuthTokensCreateBulk) Save(ctx context.Context) ([]*AuthTokens, error) { + specs := make([]*sqlgraph.CreateSpec, len(atcb.builders)) + nodes := make([]*AuthTokens, len(atcb.builders)) + mutators := make([]Mutator, len(atcb.builders)) + for i := range atcb.builders { + func(i int, root context.Context) { + builder := atcb.builders[i] + builder.defaults() + var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) { + mutation, ok := m.(*AuthTokensMutation) + if !ok { + return nil, fmt.Errorf("unexpected mutation type %T", m) + } + if err := builder.check(); err != nil { + return nil, err + } + builder.mutation = mutation + nodes[i], specs[i] = builder.createSpec() + var err error + if i < len(mutators)-1 { + _, err = mutators[i+1].Mutate(root, atcb.builders[i+1].mutation) + } else { + spec := &sqlgraph.BatchCreateSpec{Nodes: specs} + // Invoke the actual operation on the latest mutation in the chain. + if err = sqlgraph.BatchCreate(ctx, atcb.driver, spec); err != nil { + if sqlgraph.IsConstraintError(err) { + err = &ConstraintError{err.Error(), err} + } + } + } + if err != nil { + return nil, err + } + mutation.id = &nodes[i].ID + mutation.done = true + if specs[i].ID.Value != nil { + id := specs[i].ID.Value.(int64) + nodes[i].ID = int(id) + } + return nodes[i], nil + }) + for i := len(builder.hooks) - 1; i >= 0; i-- { + mut = builder.hooks[i](mut) + } + mutators[i] = mut + }(i, ctx) + } + if len(mutators) > 0 { + if _, err := mutators[0].Mutate(ctx, atcb.builders[0].mutation); err != nil { + return nil, err + } + } + return nodes, nil +} + +// SaveX is like Save, but panics if an error occurs. +func (atcb *AuthTokensCreateBulk) SaveX(ctx context.Context) []*AuthTokens { + v, err := atcb.Save(ctx) + if err != nil { + panic(err) + } + return v +} + +// Exec executes the query. +func (atcb *AuthTokensCreateBulk) Exec(ctx context.Context) error { + _, err := atcb.Save(ctx) + return err +} + +// ExecX is like Exec, but panics if an error occurs. +func (atcb *AuthTokensCreateBulk) ExecX(ctx context.Context) { + if err := atcb.Exec(ctx); err != nil { + panic(err) + } +} diff --git a/backend/ent/authtokens_delete.go b/backend/ent/authtokens_delete.go new file mode 100644 index 0000000..123ee17 --- /dev/null +++ b/backend/ent/authtokens_delete.go @@ -0,0 +1,111 @@ +// Code generated by entc, DO NOT EDIT. + +package ent + +import ( + "context" + "fmt" + + "entgo.io/ent/dialect/sql" + "entgo.io/ent/dialect/sql/sqlgraph" + "entgo.io/ent/schema/field" + "github.com/hay-kot/git-web-template/backend/ent/authtokens" + "github.com/hay-kot/git-web-template/backend/ent/predicate" +) + +// AuthTokensDelete is the builder for deleting a AuthTokens entity. +type AuthTokensDelete struct { + config + hooks []Hook + mutation *AuthTokensMutation +} + +// Where appends a list predicates to the AuthTokensDelete builder. +func (atd *AuthTokensDelete) Where(ps ...predicate.AuthTokens) *AuthTokensDelete { + atd.mutation.Where(ps...) + return atd +} + +// Exec executes the deletion query and returns how many vertices were deleted. +func (atd *AuthTokensDelete) Exec(ctx context.Context) (int, error) { + var ( + err error + affected int + ) + if len(atd.hooks) == 0 { + affected, err = atd.sqlExec(ctx) + } else { + var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) { + mutation, ok := m.(*AuthTokensMutation) + if !ok { + return nil, fmt.Errorf("unexpected mutation type %T", m) + } + atd.mutation = mutation + affected, err = atd.sqlExec(ctx) + mutation.done = true + return affected, err + }) + for i := len(atd.hooks) - 1; i >= 0; i-- { + if atd.hooks[i] == nil { + return 0, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)") + } + mut = atd.hooks[i](mut) + } + if _, err := mut.Mutate(ctx, atd.mutation); err != nil { + return 0, err + } + } + return affected, err +} + +// ExecX is like Exec, but panics if an error occurs. +func (atd *AuthTokensDelete) ExecX(ctx context.Context) int { + n, err := atd.Exec(ctx) + if err != nil { + panic(err) + } + return n +} + +func (atd *AuthTokensDelete) sqlExec(ctx context.Context) (int, error) { + _spec := &sqlgraph.DeleteSpec{ + Node: &sqlgraph.NodeSpec{ + Table: authtokens.Table, + ID: &sqlgraph.FieldSpec{ + Type: field.TypeInt, + Column: authtokens.FieldID, + }, + }, + } + if ps := atd.mutation.predicates; len(ps) > 0 { + _spec.Predicate = func(selector *sql.Selector) { + for i := range ps { + ps[i](selector) + } + } + } + return sqlgraph.DeleteNodes(ctx, atd.driver, _spec) +} + +// AuthTokensDeleteOne is the builder for deleting a single AuthTokens entity. +type AuthTokensDeleteOne struct { + atd *AuthTokensDelete +} + +// Exec executes the deletion query. +func (atdo *AuthTokensDeleteOne) Exec(ctx context.Context) error { + n, err := atdo.atd.Exec(ctx) + switch { + case err != nil: + return err + case n == 0: + return &NotFoundError{authtokens.Label} + default: + return nil + } +} + +// ExecX is like Exec, but panics if an error occurs. +func (atdo *AuthTokensDeleteOne) ExecX(ctx context.Context) { + atdo.atd.ExecX(ctx) +} diff --git a/backend/ent/authtokens_query.go b/backend/ent/authtokens_query.go new file mode 100644 index 0000000..9e309c8 --- /dev/null +++ b/backend/ent/authtokens_query.go @@ -0,0 +1,1000 @@ +// Code generated by entc, DO NOT EDIT. + +package ent + +import ( + "context" + "errors" + "fmt" + "math" + + "entgo.io/ent/dialect/sql" + "entgo.io/ent/dialect/sql/sqlgraph" + "entgo.io/ent/schema/field" + "github.com/google/uuid" + "github.com/hay-kot/git-web-template/backend/ent/authtokens" + "github.com/hay-kot/git-web-template/backend/ent/predicate" + "github.com/hay-kot/git-web-template/backend/ent/user" +) + +// AuthTokensQuery is the builder for querying AuthTokens entities. +type AuthTokensQuery struct { + config + limit *int + offset *int + unique *bool + order []OrderFunc + fields []string + predicates []predicate.AuthTokens + // eager-loading edges. + withUser *UserQuery + withFKs bool + // intermediate query (i.e. traversal path). + sql *sql.Selector + path func(context.Context) (*sql.Selector, error) +} + +// Where adds a new predicate for the AuthTokensQuery builder. +func (atq *AuthTokensQuery) Where(ps ...predicate.AuthTokens) *AuthTokensQuery { + atq.predicates = append(atq.predicates, ps...) + return atq +} + +// Limit adds a limit step to the query. +func (atq *AuthTokensQuery) Limit(limit int) *AuthTokensQuery { + atq.limit = &limit + return atq +} + +// Offset adds an offset step to the query. +func (atq *AuthTokensQuery) Offset(offset int) *AuthTokensQuery { + atq.offset = &offset + return atq +} + +// Unique configures the query builder to filter duplicate records on query. +// By default, unique is set to true, and can be disabled using this method. +func (atq *AuthTokensQuery) Unique(unique bool) *AuthTokensQuery { + atq.unique = &unique + return atq +} + +// Order adds an order step to the query. +func (atq *AuthTokensQuery) Order(o ...OrderFunc) *AuthTokensQuery { + atq.order = append(atq.order, o...) + return atq +} + +// QueryUser chains the current query on the "user" edge. +func (atq *AuthTokensQuery) QueryUser() *UserQuery { + query := &UserQuery{config: atq.config} + query.path = func(ctx context.Context) (fromU *sql.Selector, err error) { + if err := atq.prepareQuery(ctx); err != nil { + return nil, err + } + selector := atq.sqlQuery(ctx) + if err := selector.Err(); err != nil { + return nil, err + } + step := sqlgraph.NewStep( + sqlgraph.From(authtokens.Table, authtokens.FieldID, selector), + sqlgraph.To(user.Table, user.FieldID), + sqlgraph.Edge(sqlgraph.M2O, true, authtokens.UserTable, authtokens.UserColumn), + ) + fromU = sqlgraph.SetNeighbors(atq.driver.Dialect(), step) + return fromU, nil + } + return query +} + +// First returns the first AuthTokens entity from the query. +// Returns a *NotFoundError when no AuthTokens was found. +func (atq *AuthTokensQuery) First(ctx context.Context) (*AuthTokens, error) { + nodes, err := atq.Limit(1).All(ctx) + if err != nil { + return nil, err + } + if len(nodes) == 0 { + return nil, &NotFoundError{authtokens.Label} + } + return nodes[0], nil +} + +// FirstX is like First, but panics if an error occurs. +func (atq *AuthTokensQuery) FirstX(ctx context.Context) *AuthTokens { + node, err := atq.First(ctx) + if err != nil && !IsNotFound(err) { + panic(err) + } + return node +} + +// FirstID returns the first AuthTokens ID from the query. +// Returns a *NotFoundError when no AuthTokens ID was found. +func (atq *AuthTokensQuery) FirstID(ctx context.Context) (id int, err error) { + var ids []int + if ids, err = atq.Limit(1).IDs(ctx); err != nil { + return + } + if len(ids) == 0 { + err = &NotFoundError{authtokens.Label} + return + } + return ids[0], nil +} + +// FirstIDX is like FirstID, but panics if an error occurs. +func (atq *AuthTokensQuery) FirstIDX(ctx context.Context) int { + id, err := atq.FirstID(ctx) + if err != nil && !IsNotFound(err) { + panic(err) + } + return id +} + +// Only returns a single AuthTokens entity found by the query, ensuring it only returns one. +// Returns a *NotSingularError when exactly one AuthTokens entity is not found. +// Returns a *NotFoundError when no AuthTokens entities are found. +func (atq *AuthTokensQuery) Only(ctx context.Context) (*AuthTokens, error) { + nodes, err := atq.Limit(2).All(ctx) + if err != nil { + return nil, err + } + switch len(nodes) { + case 1: + return nodes[0], nil + case 0: + return nil, &NotFoundError{authtokens.Label} + default: + return nil, &NotSingularError{authtokens.Label} + } +} + +// OnlyX is like Only, but panics if an error occurs. +func (atq *AuthTokensQuery) OnlyX(ctx context.Context) *AuthTokens { + node, err := atq.Only(ctx) + if err != nil { + panic(err) + } + return node +} + +// OnlyID is like Only, but returns the only AuthTokens ID in the query. +// Returns a *NotSingularError when exactly one AuthTokens ID is not found. +// Returns a *NotFoundError when no entities are found. +func (atq *AuthTokensQuery) OnlyID(ctx context.Context) (id int, err error) { + var ids []int + if ids, err = atq.Limit(2).IDs(ctx); err != nil { + return + } + switch len(ids) { + case 1: + id = ids[0] + case 0: + err = &NotFoundError{authtokens.Label} + default: + err = &NotSingularError{authtokens.Label} + } + return +} + +// OnlyIDX is like OnlyID, but panics if an error occurs. +func (atq *AuthTokensQuery) OnlyIDX(ctx context.Context) int { + id, err := atq.OnlyID(ctx) + if err != nil { + panic(err) + } + return id +} + +// All executes the query and returns a list of AuthTokensSlice. +func (atq *AuthTokensQuery) All(ctx context.Context) ([]*AuthTokens, error) { + if err := atq.prepareQuery(ctx); err != nil { + return nil, err + } + return atq.sqlAll(ctx) +} + +// AllX is like All, but panics if an error occurs. +func (atq *AuthTokensQuery) AllX(ctx context.Context) []*AuthTokens { + nodes, err := atq.All(ctx) + if err != nil { + panic(err) + } + return nodes +} + +// IDs executes the query and returns a list of AuthTokens IDs. +func (atq *AuthTokensQuery) IDs(ctx context.Context) ([]int, error) { + var ids []int + if err := atq.Select(authtokens.FieldID).Scan(ctx, &ids); err != nil { + return nil, err + } + return ids, nil +} + +// IDsX is like IDs, but panics if an error occurs. +func (atq *AuthTokensQuery) IDsX(ctx context.Context) []int { + ids, err := atq.IDs(ctx) + if err != nil { + panic(err) + } + return ids +} + +// Count returns the count of the given query. +func (atq *AuthTokensQuery) Count(ctx context.Context) (int, error) { + if err := atq.prepareQuery(ctx); err != nil { + return 0, err + } + return atq.sqlCount(ctx) +} + +// CountX is like Count, but panics if an error occurs. +func (atq *AuthTokensQuery) CountX(ctx context.Context) int { + count, err := atq.Count(ctx) + if err != nil { + panic(err) + } + return count +} + +// Exist returns true if the query has elements in the graph. +func (atq *AuthTokensQuery) Exist(ctx context.Context) (bool, error) { + if err := atq.prepareQuery(ctx); err != nil { + return false, err + } + return atq.sqlExist(ctx) +} + +// ExistX is like Exist, but panics if an error occurs. +func (atq *AuthTokensQuery) ExistX(ctx context.Context) bool { + exist, err := atq.Exist(ctx) + if err != nil { + panic(err) + } + return exist +} + +// Clone returns a duplicate of the AuthTokensQuery builder, including all associated steps. It can be +// used to prepare common query builders and use them differently after the clone is made. +func (atq *AuthTokensQuery) Clone() *AuthTokensQuery { + if atq == nil { + return nil + } + return &AuthTokensQuery{ + config: atq.config, + limit: atq.limit, + offset: atq.offset, + order: append([]OrderFunc{}, atq.order...), + predicates: append([]predicate.AuthTokens{}, atq.predicates...), + withUser: atq.withUser.Clone(), + // clone intermediate query. + sql: atq.sql.Clone(), + path: atq.path, + } +} + +// WithUser tells the query-builder to eager-load the nodes that are connected to +// the "user" edge. The optional arguments are used to configure the query builder of the edge. +func (atq *AuthTokensQuery) WithUser(opts ...func(*UserQuery)) *AuthTokensQuery { + query := &UserQuery{config: atq.config} + for _, opt := range opts { + opt(query) + } + atq.withUser = query + return atq +} + +// GroupBy is used to group vertices by one or more fields/columns. +// It is often used with aggregate functions, like: count, max, mean, min, sum. +// +// Example: +// +// var v []struct { +// Token []byte `json:"token,omitempty"` +// Count int `json:"count,omitempty"` +// } +// +// client.AuthTokens.Query(). +// GroupBy(authtokens.FieldToken). +// Aggregate(ent.Count()). +// Scan(ctx, &v) +// +func (atq *AuthTokensQuery) GroupBy(field string, fields ...string) *AuthTokensGroupBy { + group := &AuthTokensGroupBy{config: atq.config} + group.fields = append([]string{field}, fields...) + group.path = func(ctx context.Context) (prev *sql.Selector, err error) { + if err := atq.prepareQuery(ctx); err != nil { + return nil, err + } + return atq.sqlQuery(ctx), nil + } + return group +} + +// Select allows the selection one or more fields/columns for the given query, +// instead of selecting all fields in the entity. +// +// Example: +// +// var v []struct { +// Token []byte `json:"token,omitempty"` +// } +// +// client.AuthTokens.Query(). +// Select(authtokens.FieldToken). +// Scan(ctx, &v) +// +func (atq *AuthTokensQuery) Select(fields ...string) *AuthTokensSelect { + atq.fields = append(atq.fields, fields...) + return &AuthTokensSelect{AuthTokensQuery: atq} +} + +func (atq *AuthTokensQuery) prepareQuery(ctx context.Context) error { + for _, f := range atq.fields { + if !authtokens.ValidColumn(f) { + return &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)} + } + } + if atq.path != nil { + prev, err := atq.path(ctx) + if err != nil { + return err + } + atq.sql = prev + } + return nil +} + +func (atq *AuthTokensQuery) sqlAll(ctx context.Context) ([]*AuthTokens, error) { + var ( + nodes = []*AuthTokens{} + withFKs = atq.withFKs + _spec = atq.querySpec() + loadedTypes = [1]bool{ + atq.withUser != nil, + } + ) + if atq.withUser != nil { + withFKs = true + } + if withFKs { + _spec.Node.Columns = append(_spec.Node.Columns, authtokens.ForeignKeys...) + } + _spec.ScanValues = func(columns []string) ([]interface{}, error) { + node := &AuthTokens{config: atq.config} + nodes = append(nodes, node) + return node.scanValues(columns) + } + _spec.Assign = func(columns []string, values []interface{}) error { + if len(nodes) == 0 { + return fmt.Errorf("ent: Assign called without calling ScanValues") + } + node := nodes[len(nodes)-1] + node.Edges.loadedTypes = loadedTypes + return node.assignValues(columns, values) + } + if err := sqlgraph.QueryNodes(ctx, atq.driver, _spec); err != nil { + return nil, err + } + if len(nodes) == 0 { + return nodes, nil + } + + if query := atq.withUser; query != nil { + ids := make([]uuid.UUID, 0, len(nodes)) + nodeids := make(map[uuid.UUID][]*AuthTokens) + for i := range nodes { + if nodes[i].user_auth_tokens == nil { + continue + } + fk := *nodes[i].user_auth_tokens + if _, ok := nodeids[fk]; !ok { + ids = append(ids, fk) + } + nodeids[fk] = append(nodeids[fk], nodes[i]) + } + query.Where(user.IDIn(ids...)) + neighbors, err := query.All(ctx) + if err != nil { + return nil, err + } + for _, n := range neighbors { + nodes, ok := nodeids[n.ID] + if !ok { + return nil, fmt.Errorf(`unexpected foreign-key "user_auth_tokens" returned %v`, n.ID) + } + for i := range nodes { + nodes[i].Edges.User = n + } + } + } + + return nodes, nil +} + +func (atq *AuthTokensQuery) sqlCount(ctx context.Context) (int, error) { + _spec := atq.querySpec() + _spec.Node.Columns = atq.fields + if len(atq.fields) > 0 { + _spec.Unique = atq.unique != nil && *atq.unique + } + return sqlgraph.CountNodes(ctx, atq.driver, _spec) +} + +func (atq *AuthTokensQuery) sqlExist(ctx context.Context) (bool, error) { + n, err := atq.sqlCount(ctx) + if err != nil { + return false, fmt.Errorf("ent: check existence: %w", err) + } + return n > 0, nil +} + +func (atq *AuthTokensQuery) querySpec() *sqlgraph.QuerySpec { + _spec := &sqlgraph.QuerySpec{ + Node: &sqlgraph.NodeSpec{ + Table: authtokens.Table, + Columns: authtokens.Columns, + ID: &sqlgraph.FieldSpec{ + Type: field.TypeInt, + Column: authtokens.FieldID, + }, + }, + From: atq.sql, + Unique: true, + } + if unique := atq.unique; unique != nil { + _spec.Unique = *unique + } + if fields := atq.fields; len(fields) > 0 { + _spec.Node.Columns = make([]string, 0, len(fields)) + _spec.Node.Columns = append(_spec.Node.Columns, authtokens.FieldID) + for i := range fields { + if fields[i] != authtokens.FieldID { + _spec.Node.Columns = append(_spec.Node.Columns, fields[i]) + } + } + } + if ps := atq.predicates; len(ps) > 0 { + _spec.Predicate = func(selector *sql.Selector) { + for i := range ps { + ps[i](selector) + } + } + } + if limit := atq.limit; limit != nil { + _spec.Limit = *limit + } + if offset := atq.offset; offset != nil { + _spec.Offset = *offset + } + if ps := atq.order; len(ps) > 0 { + _spec.Order = func(selector *sql.Selector) { + for i := range ps { + ps[i](selector) + } + } + } + return _spec +} + +func (atq *AuthTokensQuery) sqlQuery(ctx context.Context) *sql.Selector { + builder := sql.Dialect(atq.driver.Dialect()) + t1 := builder.Table(authtokens.Table) + columns := atq.fields + if len(columns) == 0 { + columns = authtokens.Columns + } + selector := builder.Select(t1.Columns(columns...)...).From(t1) + if atq.sql != nil { + selector = atq.sql + selector.Select(selector.Columns(columns...)...) + } + if atq.unique != nil && *atq.unique { + selector.Distinct() + } + for _, p := range atq.predicates { + p(selector) + } + for _, p := range atq.order { + p(selector) + } + if offset := atq.offset; offset != nil { + // limit is mandatory for offset clause. We start + // with default value, and override it below if needed. + selector.Offset(*offset).Limit(math.MaxInt32) + } + if limit := atq.limit; limit != nil { + selector.Limit(*limit) + } + return selector +} + +// AuthTokensGroupBy is the group-by builder for AuthTokens entities. +type AuthTokensGroupBy struct { + config + fields []string + fns []AggregateFunc + // intermediate query (i.e. traversal path). + sql *sql.Selector + path func(context.Context) (*sql.Selector, error) +} + +// Aggregate adds the given aggregation functions to the group-by query. +func (atgb *AuthTokensGroupBy) Aggregate(fns ...AggregateFunc) *AuthTokensGroupBy { + atgb.fns = append(atgb.fns, fns...) + return atgb +} + +// Scan applies the group-by query and scans the result into the given value. +func (atgb *AuthTokensGroupBy) Scan(ctx context.Context, v interface{}) error { + query, err := atgb.path(ctx) + if err != nil { + return err + } + atgb.sql = query + return atgb.sqlScan(ctx, v) +} + +// ScanX is like Scan, but panics if an error occurs. +func (atgb *AuthTokensGroupBy) ScanX(ctx context.Context, v interface{}) { + if err := atgb.Scan(ctx, v); err != nil { + panic(err) + } +} + +// Strings returns list of strings from group-by. +// It is only allowed when executing a group-by query with one field. +func (atgb *AuthTokensGroupBy) Strings(ctx context.Context) ([]string, error) { + if len(atgb.fields) > 1 { + return nil, errors.New("ent: AuthTokensGroupBy.Strings is not achievable when grouping more than 1 field") + } + var v []string + if err := atgb.Scan(ctx, &v); err != nil { + return nil, err + } + return v, nil +} + +// StringsX is like Strings, but panics if an error occurs. +func (atgb *AuthTokensGroupBy) StringsX(ctx context.Context) []string { + v, err := atgb.Strings(ctx) + if err != nil { + panic(err) + } + return v +} + +// String returns a single string from a group-by query. +// It is only allowed when executing a group-by query with one field. +func (atgb *AuthTokensGroupBy) String(ctx context.Context) (_ string, err error) { + var v []string + if v, err = atgb.Strings(ctx); err != nil { + return + } + switch len(v) { + case 1: + return v[0], nil + case 0: + err = &NotFoundError{authtokens.Label} + default: + err = fmt.Errorf("ent: AuthTokensGroupBy.Strings returned %d results when one was expected", len(v)) + } + return +} + +// StringX is like String, but panics if an error occurs. +func (atgb *AuthTokensGroupBy) StringX(ctx context.Context) string { + v, err := atgb.String(ctx) + if err != nil { + panic(err) + } + return v +} + +// Ints returns list of ints from group-by. +// It is only allowed when executing a group-by query with one field. +func (atgb *AuthTokensGroupBy) Ints(ctx context.Context) ([]int, error) { + if len(atgb.fields) > 1 { + return nil, errors.New("ent: AuthTokensGroupBy.Ints is not achievable when grouping more than 1 field") + } + var v []int + if err := atgb.Scan(ctx, &v); err != nil { + return nil, err + } + return v, nil +} + +// IntsX is like Ints, but panics if an error occurs. +func (atgb *AuthTokensGroupBy) IntsX(ctx context.Context) []int { + v, err := atgb.Ints(ctx) + if err != nil { + panic(err) + } + return v +} + +// Int returns a single int from a group-by query. +// It is only allowed when executing a group-by query with one field. +func (atgb *AuthTokensGroupBy) Int(ctx context.Context) (_ int, err error) { + var v []int + if v, err = atgb.Ints(ctx); err != nil { + return + } + switch len(v) { + case 1: + return v[0], nil + case 0: + err = &NotFoundError{authtokens.Label} + default: + err = fmt.Errorf("ent: AuthTokensGroupBy.Ints returned %d results when one was expected", len(v)) + } + return +} + +// IntX is like Int, but panics if an error occurs. +func (atgb *AuthTokensGroupBy) IntX(ctx context.Context) int { + v, err := atgb.Int(ctx) + if err != nil { + panic(err) + } + return v +} + +// Float64s returns list of float64s from group-by. +// It is only allowed when executing a group-by query with one field. +func (atgb *AuthTokensGroupBy) Float64s(ctx context.Context) ([]float64, error) { + if len(atgb.fields) > 1 { + return nil, errors.New("ent: AuthTokensGroupBy.Float64s is not achievable when grouping more than 1 field") + } + var v []float64 + if err := atgb.Scan(ctx, &v); err != nil { + return nil, err + } + return v, nil +} + +// Float64sX is like Float64s, but panics if an error occurs. +func (atgb *AuthTokensGroupBy) Float64sX(ctx context.Context) []float64 { + v, err := atgb.Float64s(ctx) + if err != nil { + panic(err) + } + return v +} + +// Float64 returns a single float64 from a group-by query. +// It is only allowed when executing a group-by query with one field. +func (atgb *AuthTokensGroupBy) Float64(ctx context.Context) (_ float64, err error) { + var v []float64 + if v, err = atgb.Float64s(ctx); err != nil { + return + } + switch len(v) { + case 1: + return v[0], nil + case 0: + err = &NotFoundError{authtokens.Label} + default: + err = fmt.Errorf("ent: AuthTokensGroupBy.Float64s returned %d results when one was expected", len(v)) + } + return +} + +// Float64X is like Float64, but panics if an error occurs. +func (atgb *AuthTokensGroupBy) Float64X(ctx context.Context) float64 { + v, err := atgb.Float64(ctx) + if err != nil { + panic(err) + } + return v +} + +// Bools returns list of bools from group-by. +// It is only allowed when executing a group-by query with one field. +func (atgb *AuthTokensGroupBy) Bools(ctx context.Context) ([]bool, error) { + if len(atgb.fields) > 1 { + return nil, errors.New("ent: AuthTokensGroupBy.Bools is not achievable when grouping more than 1 field") + } + var v []bool + if err := atgb.Scan(ctx, &v); err != nil { + return nil, err + } + return v, nil +} + +// BoolsX is like Bools, but panics if an error occurs. +func (atgb *AuthTokensGroupBy) BoolsX(ctx context.Context) []bool { + v, err := atgb.Bools(ctx) + if err != nil { + panic(err) + } + return v +} + +// Bool returns a single bool from a group-by query. +// It is only allowed when executing a group-by query with one field. +func (atgb *AuthTokensGroupBy) Bool(ctx context.Context) (_ bool, err error) { + var v []bool + if v, err = atgb.Bools(ctx); err != nil { + return + } + switch len(v) { + case 1: + return v[0], nil + case 0: + err = &NotFoundError{authtokens.Label} + default: + err = fmt.Errorf("ent: AuthTokensGroupBy.Bools returned %d results when one was expected", len(v)) + } + return +} + +// BoolX is like Bool, but panics if an error occurs. +func (atgb *AuthTokensGroupBy) BoolX(ctx context.Context) bool { + v, err := atgb.Bool(ctx) + if err != nil { + panic(err) + } + return v +} + +func (atgb *AuthTokensGroupBy) sqlScan(ctx context.Context, v interface{}) error { + for _, f := range atgb.fields { + if !authtokens.ValidColumn(f) { + return &ValidationError{Name: f, err: fmt.Errorf("invalid field %q for group-by", f)} + } + } + selector := atgb.sqlQuery() + if err := selector.Err(); err != nil { + return err + } + rows := &sql.Rows{} + query, args := selector.Query() + if err := atgb.driver.Query(ctx, query, args, rows); err != nil { + return err + } + defer rows.Close() + return sql.ScanSlice(rows, v) +} + +func (atgb *AuthTokensGroupBy) sqlQuery() *sql.Selector { + selector := atgb.sql.Select() + aggregation := make([]string, 0, len(atgb.fns)) + for _, fn := range atgb.fns { + aggregation = append(aggregation, fn(selector)) + } + // If no columns were selected in a custom aggregation function, the default + // selection is the fields used for "group-by", and the aggregation functions. + if len(selector.SelectedColumns()) == 0 { + columns := make([]string, 0, len(atgb.fields)+len(atgb.fns)) + for _, f := range atgb.fields { + columns = append(columns, selector.C(f)) + } + columns = append(columns, aggregation...) + selector.Select(columns...) + } + return selector.GroupBy(selector.Columns(atgb.fields...)...) +} + +// AuthTokensSelect is the builder for selecting fields of AuthTokens entities. +type AuthTokensSelect struct { + *AuthTokensQuery + // intermediate query (i.e. traversal path). + sql *sql.Selector +} + +// Scan applies the selector query and scans the result into the given value. +func (ats *AuthTokensSelect) Scan(ctx context.Context, v interface{}) error { + if err := ats.prepareQuery(ctx); err != nil { + return err + } + ats.sql = ats.AuthTokensQuery.sqlQuery(ctx) + return ats.sqlScan(ctx, v) +} + +// ScanX is like Scan, but panics if an error occurs. +func (ats *AuthTokensSelect) ScanX(ctx context.Context, v interface{}) { + if err := ats.Scan(ctx, v); err != nil { + panic(err) + } +} + +// Strings returns list of strings from a selector. It is only allowed when selecting one field. +func (ats *AuthTokensSelect) Strings(ctx context.Context) ([]string, error) { + if len(ats.fields) > 1 { + return nil, errors.New("ent: AuthTokensSelect.Strings is not achievable when selecting more than 1 field") + } + var v []string + if err := ats.Scan(ctx, &v); err != nil { + return nil, err + } + return v, nil +} + +// StringsX is like Strings, but panics if an error occurs. +func (ats *AuthTokensSelect) StringsX(ctx context.Context) []string { + v, err := ats.Strings(ctx) + if err != nil { + panic(err) + } + return v +} + +// String returns a single string from a selector. It is only allowed when selecting one field. +func (ats *AuthTokensSelect) String(ctx context.Context) (_ string, err error) { + var v []string + if v, err = ats.Strings(ctx); err != nil { + return + } + switch len(v) { + case 1: + return v[0], nil + case 0: + err = &NotFoundError{authtokens.Label} + default: + err = fmt.Errorf("ent: AuthTokensSelect.Strings returned %d results when one was expected", len(v)) + } + return +} + +// StringX is like String, but panics if an error occurs. +func (ats *AuthTokensSelect) StringX(ctx context.Context) string { + v, err := ats.String(ctx) + if err != nil { + panic(err) + } + return v +} + +// Ints returns list of ints from a selector. It is only allowed when selecting one field. +func (ats *AuthTokensSelect) Ints(ctx context.Context) ([]int, error) { + if len(ats.fields) > 1 { + return nil, errors.New("ent: AuthTokensSelect.Ints is not achievable when selecting more than 1 field") + } + var v []int + if err := ats.Scan(ctx, &v); err != nil { + return nil, err + } + return v, nil +} + +// IntsX is like Ints, but panics if an error occurs. +func (ats *AuthTokensSelect) IntsX(ctx context.Context) []int { + v, err := ats.Ints(ctx) + if err != nil { + panic(err) + } + return v +} + +// Int returns a single int from a selector. It is only allowed when selecting one field. +func (ats *AuthTokensSelect) Int(ctx context.Context) (_ int, err error) { + var v []int + if v, err = ats.Ints(ctx); err != nil { + return + } + switch len(v) { + case 1: + return v[0], nil + case 0: + err = &NotFoundError{authtokens.Label} + default: + err = fmt.Errorf("ent: AuthTokensSelect.Ints returned %d results when one was expected", len(v)) + } + return +} + +// IntX is like Int, but panics if an error occurs. +func (ats *AuthTokensSelect) IntX(ctx context.Context) int { + v, err := ats.Int(ctx) + if err != nil { + panic(err) + } + return v +} + +// Float64s returns list of float64s from a selector. It is only allowed when selecting one field. +func (ats *AuthTokensSelect) Float64s(ctx context.Context) ([]float64, error) { + if len(ats.fields) > 1 { + return nil, errors.New("ent: AuthTokensSelect.Float64s is not achievable when selecting more than 1 field") + } + var v []float64 + if err := ats.Scan(ctx, &v); err != nil { + return nil, err + } + return v, nil +} + +// Float64sX is like Float64s, but panics if an error occurs. +func (ats *AuthTokensSelect) Float64sX(ctx context.Context) []float64 { + v, err := ats.Float64s(ctx) + if err != nil { + panic(err) + } + return v +} + +// Float64 returns a single float64 from a selector. It is only allowed when selecting one field. +func (ats *AuthTokensSelect) Float64(ctx context.Context) (_ float64, err error) { + var v []float64 + if v, err = ats.Float64s(ctx); err != nil { + return + } + switch len(v) { + case 1: + return v[0], nil + case 0: + err = &NotFoundError{authtokens.Label} + default: + err = fmt.Errorf("ent: AuthTokensSelect.Float64s returned %d results when one was expected", len(v)) + } + return +} + +// Float64X is like Float64, but panics if an error occurs. +func (ats *AuthTokensSelect) Float64X(ctx context.Context) float64 { + v, err := ats.Float64(ctx) + if err != nil { + panic(err) + } + return v +} + +// Bools returns list of bools from a selector. It is only allowed when selecting one field. +func (ats *AuthTokensSelect) Bools(ctx context.Context) ([]bool, error) { + if len(ats.fields) > 1 { + return nil, errors.New("ent: AuthTokensSelect.Bools is not achievable when selecting more than 1 field") + } + var v []bool + if err := ats.Scan(ctx, &v); err != nil { + return nil, err + } + return v, nil +} + +// BoolsX is like Bools, but panics if an error occurs. +func (ats *AuthTokensSelect) BoolsX(ctx context.Context) []bool { + v, err := ats.Bools(ctx) + if err != nil { + panic(err) + } + return v +} + +// Bool returns a single bool from a selector. It is only allowed when selecting one field. +func (ats *AuthTokensSelect) Bool(ctx context.Context) (_ bool, err error) { + var v []bool + if v, err = ats.Bools(ctx); err != nil { + return + } + switch len(v) { + case 1: + return v[0], nil + case 0: + err = &NotFoundError{authtokens.Label} + default: + err = fmt.Errorf("ent: AuthTokensSelect.Bools returned %d results when one was expected", len(v)) + } + return +} + +// BoolX is like Bool, but panics if an error occurs. +func (ats *AuthTokensSelect) BoolX(ctx context.Context) bool { + v, err := ats.Bool(ctx) + if err != nil { + panic(err) + } + return v +} + +func (ats *AuthTokensSelect) sqlScan(ctx context.Context, v interface{}) error { + rows := &sql.Rows{} + query, args := ats.sql.Query() + if err := ats.driver.Query(ctx, query, args, rows); err != nil { + return err + } + defer rows.Close() + return sql.ScanSlice(rows, v) +} diff --git a/backend/ent/authtokens_update.go b/backend/ent/authtokens_update.go new file mode 100644 index 0000000..243db3f --- /dev/null +++ b/backend/ent/authtokens_update.go @@ -0,0 +1,472 @@ +// Code generated by entc, DO NOT EDIT. + +package ent + +import ( + "context" + "errors" + "fmt" + "time" + + "entgo.io/ent/dialect/sql" + "entgo.io/ent/dialect/sql/sqlgraph" + "entgo.io/ent/schema/field" + "github.com/google/uuid" + "github.com/hay-kot/git-web-template/backend/ent/authtokens" + "github.com/hay-kot/git-web-template/backend/ent/predicate" + "github.com/hay-kot/git-web-template/backend/ent/user" +) + +// AuthTokensUpdate is the builder for updating AuthTokens entities. +type AuthTokensUpdate struct { + config + hooks []Hook + mutation *AuthTokensMutation +} + +// Where appends a list predicates to the AuthTokensUpdate builder. +func (atu *AuthTokensUpdate) Where(ps ...predicate.AuthTokens) *AuthTokensUpdate { + atu.mutation.Where(ps...) + return atu +} + +// SetToken sets the "token" field. +func (atu *AuthTokensUpdate) SetToken(b []byte) *AuthTokensUpdate { + atu.mutation.SetToken(b) + return atu +} + +// SetExpiresAt sets the "expires_at" field. +func (atu *AuthTokensUpdate) SetExpiresAt(t time.Time) *AuthTokensUpdate { + atu.mutation.SetExpiresAt(t) + return atu +} + +// SetNillableExpiresAt sets the "expires_at" field if the given value is not nil. +func (atu *AuthTokensUpdate) SetNillableExpiresAt(t *time.Time) *AuthTokensUpdate { + if t != nil { + atu.SetExpiresAt(*t) + } + return atu +} + +// SetCreatedAt sets the "created_at" field. +func (atu *AuthTokensUpdate) SetCreatedAt(t time.Time) *AuthTokensUpdate { + atu.mutation.SetCreatedAt(t) + return atu +} + +// SetNillableCreatedAt sets the "created_at" field if the given value is not nil. +func (atu *AuthTokensUpdate) SetNillableCreatedAt(t *time.Time) *AuthTokensUpdate { + if t != nil { + atu.SetCreatedAt(*t) + } + return atu +} + +// SetUserID sets the "user" edge to the User entity by ID. +func (atu *AuthTokensUpdate) SetUserID(id uuid.UUID) *AuthTokensUpdate { + atu.mutation.SetUserID(id) + return atu +} + +// SetNillableUserID sets the "user" edge to the User entity by ID if the given value is not nil. +func (atu *AuthTokensUpdate) SetNillableUserID(id *uuid.UUID) *AuthTokensUpdate { + if id != nil { + atu = atu.SetUserID(*id) + } + return atu +} + +// SetUser sets the "user" edge to the User entity. +func (atu *AuthTokensUpdate) SetUser(u *User) *AuthTokensUpdate { + return atu.SetUserID(u.ID) +} + +// Mutation returns the AuthTokensMutation object of the builder. +func (atu *AuthTokensUpdate) Mutation() *AuthTokensMutation { + return atu.mutation +} + +// ClearUser clears the "user" edge to the User entity. +func (atu *AuthTokensUpdate) ClearUser() *AuthTokensUpdate { + atu.mutation.ClearUser() + return atu +} + +// Save executes the query and returns the number of nodes affected by the update operation. +func (atu *AuthTokensUpdate) Save(ctx context.Context) (int, error) { + var ( + err error + affected int + ) + if len(atu.hooks) == 0 { + affected, err = atu.sqlSave(ctx) + } else { + var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) { + mutation, ok := m.(*AuthTokensMutation) + if !ok { + return nil, fmt.Errorf("unexpected mutation type %T", m) + } + atu.mutation = mutation + affected, err = atu.sqlSave(ctx) + mutation.done = true + return affected, err + }) + for i := len(atu.hooks) - 1; i >= 0; i-- { + if atu.hooks[i] == nil { + return 0, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)") + } + mut = atu.hooks[i](mut) + } + if _, err := mut.Mutate(ctx, atu.mutation); err != nil { + return 0, err + } + } + return affected, err +} + +// SaveX is like Save, but panics if an error occurs. +func (atu *AuthTokensUpdate) SaveX(ctx context.Context) int { + affected, err := atu.Save(ctx) + if err != nil { + panic(err) + } + return affected +} + +// Exec executes the query. +func (atu *AuthTokensUpdate) Exec(ctx context.Context) error { + _, err := atu.Save(ctx) + return err +} + +// ExecX is like Exec, but panics if an error occurs. +func (atu *AuthTokensUpdate) ExecX(ctx context.Context) { + if err := atu.Exec(ctx); err != nil { + panic(err) + } +} + +func (atu *AuthTokensUpdate) sqlSave(ctx context.Context) (n int, err error) { + _spec := &sqlgraph.UpdateSpec{ + Node: &sqlgraph.NodeSpec{ + Table: authtokens.Table, + Columns: authtokens.Columns, + ID: &sqlgraph.FieldSpec{ + Type: field.TypeInt, + Column: authtokens.FieldID, + }, + }, + } + if ps := atu.mutation.predicates; len(ps) > 0 { + _spec.Predicate = func(selector *sql.Selector) { + for i := range ps { + ps[i](selector) + } + } + } + if value, ok := atu.mutation.Token(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeBytes, + Value: value, + Column: authtokens.FieldToken, + }) + } + if value, ok := atu.mutation.ExpiresAt(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeTime, + Value: value, + Column: authtokens.FieldExpiresAt, + }) + } + if value, ok := atu.mutation.CreatedAt(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeTime, + Value: value, + Column: authtokens.FieldCreatedAt, + }) + } + if atu.mutation.UserCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2O, + Inverse: true, + Table: authtokens.UserTable, + Columns: []string{authtokens.UserColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: user.FieldID, + }, + }, + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := atu.mutation.UserIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2O, + Inverse: true, + Table: authtokens.UserTable, + Columns: []string{authtokens.UserColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: user.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Add = append(_spec.Edges.Add, edge) + } + if n, err = sqlgraph.UpdateNodes(ctx, atu.driver, _spec); err != nil { + if _, ok := err.(*sqlgraph.NotFoundError); ok { + err = &NotFoundError{authtokens.Label} + } else if sqlgraph.IsConstraintError(err) { + err = &ConstraintError{err.Error(), err} + } + return 0, err + } + return n, nil +} + +// AuthTokensUpdateOne is the builder for updating a single AuthTokens entity. +type AuthTokensUpdateOne struct { + config + fields []string + hooks []Hook + mutation *AuthTokensMutation +} + +// SetToken sets the "token" field. +func (atuo *AuthTokensUpdateOne) SetToken(b []byte) *AuthTokensUpdateOne { + atuo.mutation.SetToken(b) + return atuo +} + +// SetExpiresAt sets the "expires_at" field. +func (atuo *AuthTokensUpdateOne) SetExpiresAt(t time.Time) *AuthTokensUpdateOne { + atuo.mutation.SetExpiresAt(t) + return atuo +} + +// SetNillableExpiresAt sets the "expires_at" field if the given value is not nil. +func (atuo *AuthTokensUpdateOne) SetNillableExpiresAt(t *time.Time) *AuthTokensUpdateOne { + if t != nil { + atuo.SetExpiresAt(*t) + } + return atuo +} + +// SetCreatedAt sets the "created_at" field. +func (atuo *AuthTokensUpdateOne) SetCreatedAt(t time.Time) *AuthTokensUpdateOne { + atuo.mutation.SetCreatedAt(t) + return atuo +} + +// SetNillableCreatedAt sets the "created_at" field if the given value is not nil. +func (atuo *AuthTokensUpdateOne) SetNillableCreatedAt(t *time.Time) *AuthTokensUpdateOne { + if t != nil { + atuo.SetCreatedAt(*t) + } + return atuo +} + +// SetUserID sets the "user" edge to the User entity by ID. +func (atuo *AuthTokensUpdateOne) SetUserID(id uuid.UUID) *AuthTokensUpdateOne { + atuo.mutation.SetUserID(id) + return atuo +} + +// SetNillableUserID sets the "user" edge to the User entity by ID if the given value is not nil. +func (atuo *AuthTokensUpdateOne) SetNillableUserID(id *uuid.UUID) *AuthTokensUpdateOne { + if id != nil { + atuo = atuo.SetUserID(*id) + } + return atuo +} + +// SetUser sets the "user" edge to the User entity. +func (atuo *AuthTokensUpdateOne) SetUser(u *User) *AuthTokensUpdateOne { + return atuo.SetUserID(u.ID) +} + +// Mutation returns the AuthTokensMutation object of the builder. +func (atuo *AuthTokensUpdateOne) Mutation() *AuthTokensMutation { + return atuo.mutation +} + +// ClearUser clears the "user" edge to the User entity. +func (atuo *AuthTokensUpdateOne) ClearUser() *AuthTokensUpdateOne { + atuo.mutation.ClearUser() + return atuo +} + +// Select allows selecting one or more fields (columns) of the returned entity. +// The default is selecting all fields defined in the entity schema. +func (atuo *AuthTokensUpdateOne) Select(field string, fields ...string) *AuthTokensUpdateOne { + atuo.fields = append([]string{field}, fields...) + return atuo +} + +// Save executes the query and returns the updated AuthTokens entity. +func (atuo *AuthTokensUpdateOne) Save(ctx context.Context) (*AuthTokens, error) { + var ( + err error + node *AuthTokens + ) + if len(atuo.hooks) == 0 { + node, err = atuo.sqlSave(ctx) + } else { + var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) { + mutation, ok := m.(*AuthTokensMutation) + if !ok { + return nil, fmt.Errorf("unexpected mutation type %T", m) + } + atuo.mutation = mutation + node, err = atuo.sqlSave(ctx) + mutation.done = true + return node, err + }) + for i := len(atuo.hooks) - 1; i >= 0; i-- { + if atuo.hooks[i] == nil { + return nil, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)") + } + mut = atuo.hooks[i](mut) + } + if _, err := mut.Mutate(ctx, atuo.mutation); err != nil { + return nil, err + } + } + return node, err +} + +// SaveX is like Save, but panics if an error occurs. +func (atuo *AuthTokensUpdateOne) SaveX(ctx context.Context) *AuthTokens { + node, err := atuo.Save(ctx) + if err != nil { + panic(err) + } + return node +} + +// Exec executes the query on the entity. +func (atuo *AuthTokensUpdateOne) Exec(ctx context.Context) error { + _, err := atuo.Save(ctx) + return err +} + +// ExecX is like Exec, but panics if an error occurs. +func (atuo *AuthTokensUpdateOne) ExecX(ctx context.Context) { + if err := atuo.Exec(ctx); err != nil { + panic(err) + } +} + +func (atuo *AuthTokensUpdateOne) sqlSave(ctx context.Context) (_node *AuthTokens, err error) { + _spec := &sqlgraph.UpdateSpec{ + Node: &sqlgraph.NodeSpec{ + Table: authtokens.Table, + Columns: authtokens.Columns, + ID: &sqlgraph.FieldSpec{ + Type: field.TypeInt, + Column: authtokens.FieldID, + }, + }, + } + id, ok := atuo.mutation.ID() + if !ok { + return nil, &ValidationError{Name: "id", err: errors.New(`ent: missing "AuthTokens.id" for update`)} + } + _spec.Node.ID.Value = id + if fields := atuo.fields; len(fields) > 0 { + _spec.Node.Columns = make([]string, 0, len(fields)) + _spec.Node.Columns = append(_spec.Node.Columns, authtokens.FieldID) + for _, f := range fields { + if !authtokens.ValidColumn(f) { + return nil, &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)} + } + if f != authtokens.FieldID { + _spec.Node.Columns = append(_spec.Node.Columns, f) + } + } + } + if ps := atuo.mutation.predicates; len(ps) > 0 { + _spec.Predicate = func(selector *sql.Selector) { + for i := range ps { + ps[i](selector) + } + } + } + if value, ok := atuo.mutation.Token(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeBytes, + Value: value, + Column: authtokens.FieldToken, + }) + } + if value, ok := atuo.mutation.ExpiresAt(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeTime, + Value: value, + Column: authtokens.FieldExpiresAt, + }) + } + if value, ok := atuo.mutation.CreatedAt(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeTime, + Value: value, + Column: authtokens.FieldCreatedAt, + }) + } + if atuo.mutation.UserCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2O, + Inverse: true, + Table: authtokens.UserTable, + Columns: []string{authtokens.UserColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: user.FieldID, + }, + }, + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := atuo.mutation.UserIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2O, + Inverse: true, + Table: authtokens.UserTable, + Columns: []string{authtokens.UserColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: user.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Add = append(_spec.Edges.Add, edge) + } + _node = &AuthTokens{config: atuo.config} + _spec.Assign = _node.assignValues + _spec.ScanValues = _node.scanValues + if err = sqlgraph.UpdateNode(ctx, atuo.driver, _spec); err != nil { + if _, ok := err.(*sqlgraph.NotFoundError); ok { + err = &NotFoundError{authtokens.Label} + } else if sqlgraph.IsConstraintError(err) { + err = &ConstraintError{err.Error(), err} + } + return nil, err + } + return _node, nil +} diff --git a/backend/ent/client.go b/backend/ent/client.go new file mode 100644 index 0000000..c6cf533 --- /dev/null +++ b/backend/ent/client.go @@ -0,0 +1,344 @@ +// Code generated by entc, DO NOT EDIT. + +package ent + +import ( + "context" + "fmt" + "log" + + "github.com/google/uuid" + "github.com/hay-kot/git-web-template/backend/ent/migrate" + + "github.com/hay-kot/git-web-template/backend/ent/authtokens" + "github.com/hay-kot/git-web-template/backend/ent/user" + + "entgo.io/ent/dialect" + "entgo.io/ent/dialect/sql" + "entgo.io/ent/dialect/sql/sqlgraph" +) + +// Client is the client that holds all ent builders. +type Client struct { + config + // Schema is the client for creating, migrating and dropping schema. + Schema *migrate.Schema + // AuthTokens is the client for interacting with the AuthTokens builders. + AuthTokens *AuthTokensClient + // User is the client for interacting with the User builders. + User *UserClient +} + +// NewClient creates a new client configured with the given options. +func NewClient(opts ...Option) *Client { + cfg := config{log: log.Println, hooks: &hooks{}} + cfg.options(opts...) + client := &Client{config: cfg} + client.init() + return client +} + +func (c *Client) init() { + c.Schema = migrate.NewSchema(c.driver) + c.AuthTokens = NewAuthTokensClient(c.config) + c.User = NewUserClient(c.config) +} + +// Open opens a database/sql.DB specified by the driver name and +// the data source name, and returns a new client attached to it. +// Optional parameters can be added for configuring the client. +func Open(driverName, dataSourceName string, options ...Option) (*Client, error) { + switch driverName { + case dialect.MySQL, dialect.Postgres, dialect.SQLite: + drv, err := sql.Open(driverName, dataSourceName) + if err != nil { + return nil, err + } + return NewClient(append(options, Driver(drv))...), nil + default: + return nil, fmt.Errorf("unsupported driver: %q", driverName) + } +} + +// Tx returns a new transactional client. The provided context +// is used until the transaction is committed or rolled back. +func (c *Client) Tx(ctx context.Context) (*Tx, error) { + if _, ok := c.driver.(*txDriver); ok { + return nil, fmt.Errorf("ent: cannot start a transaction within a transaction") + } + tx, err := newTx(ctx, c.driver) + if err != nil { + return nil, fmt.Errorf("ent: starting a transaction: %w", err) + } + cfg := c.config + cfg.driver = tx + return &Tx{ + ctx: ctx, + config: cfg, + AuthTokens: NewAuthTokensClient(cfg), + User: NewUserClient(cfg), + }, nil +} + +// BeginTx returns a transactional client with specified options. +func (c *Client) BeginTx(ctx context.Context, opts *sql.TxOptions) (*Tx, error) { + if _, ok := c.driver.(*txDriver); ok { + return nil, fmt.Errorf("ent: cannot start a transaction within a transaction") + } + tx, err := c.driver.(interface { + BeginTx(context.Context, *sql.TxOptions) (dialect.Tx, error) + }).BeginTx(ctx, opts) + if err != nil { + return nil, fmt.Errorf("ent: starting a transaction: %w", err) + } + cfg := c.config + cfg.driver = &txDriver{tx: tx, drv: c.driver} + return &Tx{ + ctx: ctx, + config: cfg, + AuthTokens: NewAuthTokensClient(cfg), + User: NewUserClient(cfg), + }, nil +} + +// Debug returns a new debug-client. It's used to get verbose logging on specific operations. +// +// client.Debug(). +// AuthTokens. +// Query(). +// Count(ctx) +// +func (c *Client) Debug() *Client { + if c.debug { + return c + } + cfg := c.config + cfg.driver = dialect.Debug(c.driver, c.log) + client := &Client{config: cfg} + client.init() + return client +} + +// Close closes the database connection and prevents new queries from starting. +func (c *Client) Close() error { + return c.driver.Close() +} + +// Use adds the mutation hooks to all the entity clients. +// In order to add hooks to a specific client, call: `client.Node.Use(...)`. +func (c *Client) Use(hooks ...Hook) { + c.AuthTokens.Use(hooks...) + c.User.Use(hooks...) +} + +// AuthTokensClient is a client for the AuthTokens schema. +type AuthTokensClient struct { + config +} + +// NewAuthTokensClient returns a client for the AuthTokens from the given config. +func NewAuthTokensClient(c config) *AuthTokensClient { + return &AuthTokensClient{config: c} +} + +// Use adds a list of mutation hooks to the hooks stack. +// A call to `Use(f, g, h)` equals to `authtokens.Hooks(f(g(h())))`. +func (c *AuthTokensClient) Use(hooks ...Hook) { + c.hooks.AuthTokens = append(c.hooks.AuthTokens, hooks...) +} + +// Create returns a create builder for AuthTokens. +func (c *AuthTokensClient) Create() *AuthTokensCreate { + mutation := newAuthTokensMutation(c.config, OpCreate) + return &AuthTokensCreate{config: c.config, hooks: c.Hooks(), mutation: mutation} +} + +// CreateBulk returns a builder for creating a bulk of AuthTokens entities. +func (c *AuthTokensClient) CreateBulk(builders ...*AuthTokensCreate) *AuthTokensCreateBulk { + return &AuthTokensCreateBulk{config: c.config, builders: builders} +} + +// Update returns an update builder for AuthTokens. +func (c *AuthTokensClient) Update() *AuthTokensUpdate { + mutation := newAuthTokensMutation(c.config, OpUpdate) + return &AuthTokensUpdate{config: c.config, hooks: c.Hooks(), mutation: mutation} +} + +// UpdateOne returns an update builder for the given entity. +func (c *AuthTokensClient) UpdateOne(at *AuthTokens) *AuthTokensUpdateOne { + mutation := newAuthTokensMutation(c.config, OpUpdateOne, withAuthTokens(at)) + return &AuthTokensUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation} +} + +// UpdateOneID returns an update builder for the given id. +func (c *AuthTokensClient) UpdateOneID(id int) *AuthTokensUpdateOne { + mutation := newAuthTokensMutation(c.config, OpUpdateOne, withAuthTokensID(id)) + return &AuthTokensUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation} +} + +// Delete returns a delete builder for AuthTokens. +func (c *AuthTokensClient) Delete() *AuthTokensDelete { + mutation := newAuthTokensMutation(c.config, OpDelete) + return &AuthTokensDelete{config: c.config, hooks: c.Hooks(), mutation: mutation} +} + +// DeleteOne returns a delete builder for the given entity. +func (c *AuthTokensClient) DeleteOne(at *AuthTokens) *AuthTokensDeleteOne { + return c.DeleteOneID(at.ID) +} + +// DeleteOneID returns a delete builder for the given id. +func (c *AuthTokensClient) DeleteOneID(id int) *AuthTokensDeleteOne { + builder := c.Delete().Where(authtokens.ID(id)) + builder.mutation.id = &id + builder.mutation.op = OpDeleteOne + return &AuthTokensDeleteOne{builder} +} + +// Query returns a query builder for AuthTokens. +func (c *AuthTokensClient) Query() *AuthTokensQuery { + return &AuthTokensQuery{ + config: c.config, + } +} + +// Get returns a AuthTokens entity by its id. +func (c *AuthTokensClient) Get(ctx context.Context, id int) (*AuthTokens, error) { + return c.Query().Where(authtokens.ID(id)).Only(ctx) +} + +// GetX is like Get, but panics if an error occurs. +func (c *AuthTokensClient) GetX(ctx context.Context, id int) *AuthTokens { + obj, err := c.Get(ctx, id) + if err != nil { + panic(err) + } + return obj +} + +// QueryUser queries the user edge of a AuthTokens. +func (c *AuthTokensClient) QueryUser(at *AuthTokens) *UserQuery { + query := &UserQuery{config: c.config} + query.path = func(ctx context.Context) (fromV *sql.Selector, _ error) { + id := at.ID + step := sqlgraph.NewStep( + sqlgraph.From(authtokens.Table, authtokens.FieldID, id), + sqlgraph.To(user.Table, user.FieldID), + sqlgraph.Edge(sqlgraph.M2O, true, authtokens.UserTable, authtokens.UserColumn), + ) + fromV = sqlgraph.Neighbors(at.driver.Dialect(), step) + return fromV, nil + } + return query +} + +// Hooks returns the client hooks. +func (c *AuthTokensClient) Hooks() []Hook { + return c.hooks.AuthTokens +} + +// UserClient is a client for the User schema. +type UserClient struct { + config +} + +// NewUserClient returns a client for the User from the given config. +func NewUserClient(c config) *UserClient { + return &UserClient{config: c} +} + +// Use adds a list of mutation hooks to the hooks stack. +// A call to `Use(f, g, h)` equals to `user.Hooks(f(g(h())))`. +func (c *UserClient) Use(hooks ...Hook) { + c.hooks.User = append(c.hooks.User, hooks...) +} + +// Create returns a create builder for User. +func (c *UserClient) Create() *UserCreate { + mutation := newUserMutation(c.config, OpCreate) + return &UserCreate{config: c.config, hooks: c.Hooks(), mutation: mutation} +} + +// CreateBulk returns a builder for creating a bulk of User entities. +func (c *UserClient) CreateBulk(builders ...*UserCreate) *UserCreateBulk { + return &UserCreateBulk{config: c.config, builders: builders} +} + +// Update returns an update builder for User. +func (c *UserClient) Update() *UserUpdate { + mutation := newUserMutation(c.config, OpUpdate) + return &UserUpdate{config: c.config, hooks: c.Hooks(), mutation: mutation} +} + +// UpdateOne returns an update builder for the given entity. +func (c *UserClient) UpdateOne(u *User) *UserUpdateOne { + mutation := newUserMutation(c.config, OpUpdateOne, withUser(u)) + return &UserUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation} +} + +// UpdateOneID returns an update builder for the given id. +func (c *UserClient) UpdateOneID(id uuid.UUID) *UserUpdateOne { + mutation := newUserMutation(c.config, OpUpdateOne, withUserID(id)) + return &UserUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation} +} + +// Delete returns a delete builder for User. +func (c *UserClient) Delete() *UserDelete { + mutation := newUserMutation(c.config, OpDelete) + return &UserDelete{config: c.config, hooks: c.Hooks(), mutation: mutation} +} + +// DeleteOne returns a delete builder for the given entity. +func (c *UserClient) DeleteOne(u *User) *UserDeleteOne { + return c.DeleteOneID(u.ID) +} + +// DeleteOneID returns a delete builder for the given id. +func (c *UserClient) DeleteOneID(id uuid.UUID) *UserDeleteOne { + builder := c.Delete().Where(user.ID(id)) + builder.mutation.id = &id + builder.mutation.op = OpDeleteOne + return &UserDeleteOne{builder} +} + +// Query returns a query builder for User. +func (c *UserClient) Query() *UserQuery { + return &UserQuery{ + config: c.config, + } +} + +// Get returns a User entity by its id. +func (c *UserClient) Get(ctx context.Context, id uuid.UUID) (*User, error) { + return c.Query().Where(user.ID(id)).Only(ctx) +} + +// GetX is like Get, but panics if an error occurs. +func (c *UserClient) GetX(ctx context.Context, id uuid.UUID) *User { + obj, err := c.Get(ctx, id) + if err != nil { + panic(err) + } + return obj +} + +// QueryAuthTokens queries the auth_tokens edge of a User. +func (c *UserClient) QueryAuthTokens(u *User) *AuthTokensQuery { + query := &AuthTokensQuery{config: c.config} + query.path = func(ctx context.Context) (fromV *sql.Selector, _ error) { + id := u.ID + step := sqlgraph.NewStep( + sqlgraph.From(user.Table, user.FieldID, id), + sqlgraph.To(authtokens.Table, authtokens.FieldID), + sqlgraph.Edge(sqlgraph.O2M, false, user.AuthTokensTable, user.AuthTokensColumn), + ) + fromV = sqlgraph.Neighbors(u.driver.Dialect(), step) + return fromV, nil + } + return query +} + +// Hooks returns the client hooks. +func (c *UserClient) Hooks() []Hook { + return c.hooks.User +} diff --git a/backend/ent/config.go b/backend/ent/config.go new file mode 100644 index 0000000..550e16e --- /dev/null +++ b/backend/ent/config.go @@ -0,0 +1,60 @@ +// Code generated by entc, DO NOT EDIT. + +package ent + +import ( + "entgo.io/ent" + "entgo.io/ent/dialect" +) + +// Option function to configure the client. +type Option func(*config) + +// Config is the configuration for the client and its builder. +type config struct { + // driver used for executing database requests. + driver dialect.Driver + // debug enable a debug logging. + debug bool + // log used for logging on debug mode. + log func(...interface{}) + // hooks to execute on mutations. + hooks *hooks +} + +// hooks per client, for fast access. +type hooks struct { + AuthTokens []ent.Hook + User []ent.Hook +} + +// Options applies the options on the config object. +func (c *config) options(opts ...Option) { + for _, opt := range opts { + opt(c) + } + if c.debug { + c.driver = dialect.Debug(c.driver, c.log) + } +} + +// Debug enables debug logging on the ent.Driver. +func Debug() Option { + return func(c *config) { + c.debug = true + } +} + +// Log sets the logging function for debug mode. +func Log(fn func(...interface{})) Option { + return func(c *config) { + c.log = fn + } +} + +// Driver configures the client driver. +func Driver(driver dialect.Driver) Option { + return func(c *config) { + c.driver = driver + } +} diff --git a/backend/ent/context.go b/backend/ent/context.go new file mode 100644 index 0000000..0840726 --- /dev/null +++ b/backend/ent/context.go @@ -0,0 +1,33 @@ +// Code generated by entc, DO NOT EDIT. + +package ent + +import ( + "context" +) + +type clientCtxKey struct{} + +// FromContext returns a Client stored inside a context, or nil if there isn't one. +func FromContext(ctx context.Context) *Client { + c, _ := ctx.Value(clientCtxKey{}).(*Client) + return c +} + +// NewContext returns a new context with the given Client attached. +func NewContext(parent context.Context, c *Client) context.Context { + return context.WithValue(parent, clientCtxKey{}, c) +} + +type txCtxKey struct{} + +// TxFromContext returns a Tx stored inside a context, or nil if there isn't one. +func TxFromContext(ctx context.Context) *Tx { + tx, _ := ctx.Value(txCtxKey{}).(*Tx) + return tx +} + +// NewTxContext returns a new context with the given Tx attached. +func NewTxContext(parent context.Context, tx *Tx) context.Context { + return context.WithValue(parent, txCtxKey{}, tx) +} diff --git a/backend/ent/ent.go b/backend/ent/ent.go new file mode 100644 index 0000000..2568a4e --- /dev/null +++ b/backend/ent/ent.go @@ -0,0 +1,261 @@ +// Code generated by entc, DO NOT EDIT. + +package ent + +import ( + "errors" + "fmt" + + "entgo.io/ent" + "entgo.io/ent/dialect/sql" + "github.com/hay-kot/git-web-template/backend/ent/authtokens" + "github.com/hay-kot/git-web-template/backend/ent/user" +) + +// ent aliases to avoid import conflicts in user's code. +type ( + Op = ent.Op + Hook = ent.Hook + Value = ent.Value + Query = ent.Query + Policy = ent.Policy + Mutator = ent.Mutator + Mutation = ent.Mutation + MutateFunc = ent.MutateFunc +) + +// OrderFunc applies an ordering on the sql selector. +type OrderFunc func(*sql.Selector) + +// columnChecker returns a function indicates if the column exists in the given column. +func columnChecker(table string) func(string) error { + checks := map[string]func(string) bool{ + authtokens.Table: authtokens.ValidColumn, + user.Table: user.ValidColumn, + } + check, ok := checks[table] + if !ok { + return func(string) error { + return fmt.Errorf("unknown table %q", table) + } + } + return func(column string) error { + if !check(column) { + return fmt.Errorf("unknown column %q for table %q", column, table) + } + return nil + } +} + +// Asc applies the given fields in ASC order. +func Asc(fields ...string) OrderFunc { + return func(s *sql.Selector) { + check := columnChecker(s.TableName()) + for _, f := range fields { + if err := check(f); err != nil { + s.AddError(&ValidationError{Name: f, err: fmt.Errorf("ent: %w", err)}) + } + s.OrderBy(sql.Asc(s.C(f))) + } + } +} + +// Desc applies the given fields in DESC order. +func Desc(fields ...string) OrderFunc { + return func(s *sql.Selector) { + check := columnChecker(s.TableName()) + for _, f := range fields { + if err := check(f); err != nil { + s.AddError(&ValidationError{Name: f, err: fmt.Errorf("ent: %w", err)}) + } + s.OrderBy(sql.Desc(s.C(f))) + } + } +} + +// AggregateFunc applies an aggregation step on the group-by traversal/selector. +type AggregateFunc func(*sql.Selector) string + +// As is a pseudo aggregation function for renaming another other functions with custom names. For example: +// +// GroupBy(field1, field2). +// Aggregate(ent.As(ent.Sum(field1), "sum_field1"), (ent.As(ent.Sum(field2), "sum_field2")). +// Scan(ctx, &v) +// +func As(fn AggregateFunc, end string) AggregateFunc { + return func(s *sql.Selector) string { + return sql.As(fn(s), end) + } +} + +// Count applies the "count" aggregation function on each group. +func Count() AggregateFunc { + return func(s *sql.Selector) string { + return sql.Count("*") + } +} + +// Max applies the "max" aggregation function on the given field of each group. +func Max(field string) AggregateFunc { + return func(s *sql.Selector) string { + check := columnChecker(s.TableName()) + if err := check(field); err != nil { + s.AddError(&ValidationError{Name: field, err: fmt.Errorf("ent: %w", err)}) + return "" + } + return sql.Max(s.C(field)) + } +} + +// Mean applies the "mean" aggregation function on the given field of each group. +func Mean(field string) AggregateFunc { + return func(s *sql.Selector) string { + check := columnChecker(s.TableName()) + if err := check(field); err != nil { + s.AddError(&ValidationError{Name: field, err: fmt.Errorf("ent: %w", err)}) + return "" + } + return sql.Avg(s.C(field)) + } +} + +// Min applies the "min" aggregation function on the given field of each group. +func Min(field string) AggregateFunc { + return func(s *sql.Selector) string { + check := columnChecker(s.TableName()) + if err := check(field); err != nil { + s.AddError(&ValidationError{Name: field, err: fmt.Errorf("ent: %w", err)}) + return "" + } + return sql.Min(s.C(field)) + } +} + +// Sum applies the "sum" aggregation function on the given field of each group. +func Sum(field string) AggregateFunc { + return func(s *sql.Selector) string { + check := columnChecker(s.TableName()) + if err := check(field); err != nil { + s.AddError(&ValidationError{Name: field, err: fmt.Errorf("ent: %w", err)}) + return "" + } + return sql.Sum(s.C(field)) + } +} + +// ValidationError returns when validating a field or edge fails. +type ValidationError struct { + Name string // Field or edge name. + err error +} + +// Error implements the error interface. +func (e *ValidationError) Error() string { + return e.err.Error() +} + +// Unwrap implements the errors.Wrapper interface. +func (e *ValidationError) Unwrap() error { + return e.err +} + +// IsValidationError returns a boolean indicating whether the error is a validation error. +func IsValidationError(err error) bool { + if err == nil { + return false + } + var e *ValidationError + return errors.As(err, &e) +} + +// NotFoundError returns when trying to fetch a specific entity and it was not found in the database. +type NotFoundError struct { + label string +} + +// Error implements the error interface. +func (e *NotFoundError) Error() string { + return "ent: " + e.label + " not found" +} + +// IsNotFound returns a boolean indicating whether the error is a not found error. +func IsNotFound(err error) bool { + if err == nil { + return false + } + var e *NotFoundError + return errors.As(err, &e) +} + +// MaskNotFound masks not found error. +func MaskNotFound(err error) error { + if IsNotFound(err) { + return nil + } + return err +} + +// NotSingularError returns when trying to fetch a singular entity and more then one was found in the database. +type NotSingularError struct { + label string +} + +// Error implements the error interface. +func (e *NotSingularError) Error() string { + return "ent: " + e.label + " not singular" +} + +// IsNotSingular returns a boolean indicating whether the error is a not singular error. +func IsNotSingular(err error) bool { + if err == nil { + return false + } + var e *NotSingularError + return errors.As(err, &e) +} + +// NotLoadedError returns when trying to get a node that was not loaded by the query. +type NotLoadedError struct { + edge string +} + +// Error implements the error interface. +func (e *NotLoadedError) Error() string { + return "ent: " + e.edge + " edge was not loaded" +} + +// IsNotLoaded returns a boolean indicating whether the error is a not loaded error. +func IsNotLoaded(err error) bool { + if err == nil { + return false + } + var e *NotLoadedError + return errors.As(err, &e) +} + +// ConstraintError returns when trying to create/update one or more entities and +// one or more of their constraints failed. For example, violation of edge or +// field uniqueness. +type ConstraintError struct { + msg string + wrap error +} + +// Error implements the error interface. +func (e ConstraintError) Error() string { + return "ent: constraint failed: " + e.msg +} + +// Unwrap implements the errors.Wrapper interface. +func (e *ConstraintError) Unwrap() error { + return e.wrap +} + +// IsConstraintError returns a boolean indicating whether the error is a constraint failure. +func IsConstraintError(err error) bool { + if err == nil { + return false + } + var e *ConstraintError + return errors.As(err, &e) +} diff --git a/backend/ent/enttest/enttest.go b/backend/ent/enttest/enttest.go new file mode 100644 index 0000000..cc6930e --- /dev/null +++ b/backend/ent/enttest/enttest.go @@ -0,0 +1,78 @@ +// Code generated by entc, DO NOT EDIT. + +package enttest + +import ( + "context" + + "github.com/hay-kot/git-web-template/backend/ent" + // required by schema hooks. + _ "github.com/hay-kot/git-web-template/backend/ent/runtime" + + "entgo.io/ent/dialect/sql/schema" +) + +type ( + // TestingT is the interface that is shared between + // testing.T and testing.B and used by enttest. + TestingT interface { + FailNow() + Error(...interface{}) + } + + // Option configures client creation. + Option func(*options) + + options struct { + opts []ent.Option + migrateOpts []schema.MigrateOption + } +) + +// WithOptions forwards options to client creation. +func WithOptions(opts ...ent.Option) Option { + return func(o *options) { + o.opts = append(o.opts, opts...) + } +} + +// WithMigrateOptions forwards options to auto migration. +func WithMigrateOptions(opts ...schema.MigrateOption) Option { + return func(o *options) { + o.migrateOpts = append(o.migrateOpts, opts...) + } +} + +func newOptions(opts []Option) *options { + o := &options{} + for _, opt := range opts { + opt(o) + } + return o +} + +// Open calls ent.Open and auto-run migration. +func Open(t TestingT, driverName, dataSourceName string, opts ...Option) *ent.Client { + o := newOptions(opts) + c, err := ent.Open(driverName, dataSourceName, o.opts...) + if err != nil { + t.Error(err) + t.FailNow() + } + if err := c.Schema.Create(context.Background(), o.migrateOpts...); err != nil { + t.Error(err) + t.FailNow() + } + return c +} + +// NewClient calls ent.NewClient and auto-run migration. +func NewClient(t TestingT, opts ...Option) *ent.Client { + o := newOptions(opts) + c := ent.NewClient(o.opts...) + if err := c.Schema.Create(context.Background(), o.migrateOpts...); err != nil { + t.Error(err) + t.FailNow() + } + return c +} diff --git a/backend/ent/generate.go b/backend/ent/generate.go new file mode 100644 index 0000000..8d3fdfd --- /dev/null +++ b/backend/ent/generate.go @@ -0,0 +1,3 @@ +package ent + +//go:generate go run -mod=mod entgo.io/ent/cmd/ent generate ./schema diff --git a/backend/ent/hook/hook.go b/backend/ent/hook/hook.go new file mode 100644 index 0000000..1eefec3 --- /dev/null +++ b/backend/ent/hook/hook.go @@ -0,0 +1,217 @@ +// Code generated by entc, DO NOT EDIT. + +package hook + +import ( + "context" + "fmt" + + "github.com/hay-kot/git-web-template/backend/ent" +) + +// The AuthTokensFunc type is an adapter to allow the use of ordinary +// function as AuthTokens mutator. +type AuthTokensFunc func(context.Context, *ent.AuthTokensMutation) (ent.Value, error) + +// Mutate calls f(ctx, m). +func (f AuthTokensFunc) Mutate(ctx context.Context, m ent.Mutation) (ent.Value, error) { + mv, ok := m.(*ent.AuthTokensMutation) + if !ok { + return nil, fmt.Errorf("unexpected mutation type %T. expect *ent.AuthTokensMutation", m) + } + return f(ctx, mv) +} + +// The UserFunc type is an adapter to allow the use of ordinary +// function as User mutator. +type UserFunc func(context.Context, *ent.UserMutation) (ent.Value, error) + +// Mutate calls f(ctx, m). +func (f UserFunc) Mutate(ctx context.Context, m ent.Mutation) (ent.Value, error) { + mv, ok := m.(*ent.UserMutation) + if !ok { + return nil, fmt.Errorf("unexpected mutation type %T. expect *ent.UserMutation", m) + } + return f(ctx, mv) +} + +// Condition is a hook condition function. +type Condition func(context.Context, ent.Mutation) bool + +// And groups conditions with the AND operator. +func And(first, second Condition, rest ...Condition) Condition { + return func(ctx context.Context, m ent.Mutation) bool { + if !first(ctx, m) || !second(ctx, m) { + return false + } + for _, cond := range rest { + if !cond(ctx, m) { + return false + } + } + return true + } +} + +// Or groups conditions with the OR operator. +func Or(first, second Condition, rest ...Condition) Condition { + return func(ctx context.Context, m ent.Mutation) bool { + if first(ctx, m) || second(ctx, m) { + return true + } + for _, cond := range rest { + if cond(ctx, m) { + return true + } + } + return false + } +} + +// Not negates a given condition. +func Not(cond Condition) Condition { + return func(ctx context.Context, m ent.Mutation) bool { + return !cond(ctx, m) + } +} + +// HasOp is a condition testing mutation operation. +func HasOp(op ent.Op) Condition { + return func(_ context.Context, m ent.Mutation) bool { + return m.Op().Is(op) + } +} + +// HasAddedFields is a condition validating `.AddedField` on fields. +func HasAddedFields(field string, fields ...string) Condition { + return func(_ context.Context, m ent.Mutation) bool { + if _, exists := m.AddedField(field); !exists { + return false + } + for _, field := range fields { + if _, exists := m.AddedField(field); !exists { + return false + } + } + return true + } +} + +// HasClearedFields is a condition validating `.FieldCleared` on fields. +func HasClearedFields(field string, fields ...string) Condition { + return func(_ context.Context, m ent.Mutation) bool { + if exists := m.FieldCleared(field); !exists { + return false + } + for _, field := range fields { + if exists := m.FieldCleared(field); !exists { + return false + } + } + return true + } +} + +// HasFields is a condition validating `.Field` on fields. +func HasFields(field string, fields ...string) Condition { + return func(_ context.Context, m ent.Mutation) bool { + if _, exists := m.Field(field); !exists { + return false + } + for _, field := range fields { + if _, exists := m.Field(field); !exists { + return false + } + } + return true + } +} + +// If executes the given hook under condition. +// +// hook.If(ComputeAverage, And(HasFields(...), HasAddedFields(...))) +// +func If(hk ent.Hook, cond Condition) ent.Hook { + return func(next ent.Mutator) ent.Mutator { + return ent.MutateFunc(func(ctx context.Context, m ent.Mutation) (ent.Value, error) { + if cond(ctx, m) { + return hk(next).Mutate(ctx, m) + } + return next.Mutate(ctx, m) + }) + } +} + +// On executes the given hook only for the given operation. +// +// hook.On(Log, ent.Delete|ent.Create) +// +func On(hk ent.Hook, op ent.Op) ent.Hook { + return If(hk, HasOp(op)) +} + +// Unless skips the given hook only for the given operation. +// +// hook.Unless(Log, ent.Update|ent.UpdateOne) +// +func Unless(hk ent.Hook, op ent.Op) ent.Hook { + return If(hk, Not(HasOp(op))) +} + +// FixedError is a hook returning a fixed error. +func FixedError(err error) ent.Hook { + return func(ent.Mutator) ent.Mutator { + return ent.MutateFunc(func(context.Context, ent.Mutation) (ent.Value, error) { + return nil, err + }) + } +} + +// Reject returns a hook that rejects all operations that match op. +// +// func (T) Hooks() []ent.Hook { +// return []ent.Hook{ +// Reject(ent.Delete|ent.Update), +// } +// } +// +func Reject(op ent.Op) ent.Hook { + hk := FixedError(fmt.Errorf("%s operation is not allowed", op)) + return On(hk, op) +} + +// Chain acts as a list of hooks and is effectively immutable. +// Once created, it will always hold the same set of hooks in the same order. +type Chain struct { + hooks []ent.Hook +} + +// NewChain creates a new chain of hooks. +func NewChain(hooks ...ent.Hook) Chain { + return Chain{append([]ent.Hook(nil), hooks...)} +} + +// Hook chains the list of hooks and returns the final hook. +func (c Chain) Hook() ent.Hook { + return func(mutator ent.Mutator) ent.Mutator { + for i := len(c.hooks) - 1; i >= 0; i-- { + mutator = c.hooks[i](mutator) + } + return mutator + } +} + +// Append extends a chain, adding the specified hook +// as the last ones in the mutation flow. +func (c Chain) Append(hooks ...ent.Hook) Chain { + newHooks := make([]ent.Hook, 0, len(c.hooks)+len(hooks)) + newHooks = append(newHooks, c.hooks...) + newHooks = append(newHooks, hooks...) + return Chain{newHooks} +} + +// Extend extends a chain, adding the specified chain +// as the last ones in the mutation flow. +func (c Chain) Extend(chain Chain) Chain { + return c.Append(chain.hooks...) +} diff --git a/backend/ent/migrate/migrate.go b/backend/ent/migrate/migrate.go new file mode 100644 index 0000000..9bdaf52 --- /dev/null +++ b/backend/ent/migrate/migrate.go @@ -0,0 +1,71 @@ +// Code generated by entc, DO NOT EDIT. + +package migrate + +import ( + "context" + "fmt" + "io" + + "entgo.io/ent/dialect" + "entgo.io/ent/dialect/sql/schema" +) + +var ( + // WithGlobalUniqueID sets the universal ids options to the migration. + // If this option is enabled, ent migration will allocate a 1<<32 range + // for the ids of each entity (table). + // Note that this option cannot be applied on tables that already exist. + WithGlobalUniqueID = schema.WithGlobalUniqueID + // WithDropColumn sets the drop column option to the migration. + // If this option is enabled, ent migration will drop old columns + // that were used for both fields and edges. This defaults to false. + WithDropColumn = schema.WithDropColumn + // WithDropIndex sets the drop index option to the migration. + // If this option is enabled, ent migration will drop old indexes + // that were defined in the schema. This defaults to false. + // Note that unique constraints are defined using `UNIQUE INDEX`, + // and therefore, it's recommended to enable this option to get more + // flexibility in the schema changes. + WithDropIndex = schema.WithDropIndex + // WithFixture sets the foreign-key renaming option to the migration when upgrading + // ent from v0.1.0 (issue-#285). Defaults to false. + WithFixture = schema.WithFixture + // WithForeignKeys enables creating foreign-key in schema DDL. This defaults to true. + WithForeignKeys = schema.WithForeignKeys +) + +// Schema is the API for creating, migrating and dropping a schema. +type Schema struct { + drv dialect.Driver +} + +// NewSchema creates a new schema client. +func NewSchema(drv dialect.Driver) *Schema { return &Schema{drv: drv} } + +// Create creates all schema resources. +func (s *Schema) Create(ctx context.Context, opts ...schema.MigrateOption) error { + migrate, err := schema.NewMigrate(s.drv, opts...) + if err != nil { + return fmt.Errorf("ent/migrate: %w", err) + } + return migrate.Create(ctx, Tables...) +} + +// WriteTo writes the schema changes to w instead of running them against the database. +// +// if err := client.Schema.WriteTo(context.Background(), os.Stdout); err != nil { +// log.Fatal(err) +// } +// +func (s *Schema) WriteTo(ctx context.Context, w io.Writer, opts ...schema.MigrateOption) error { + drv := &schema.WriteDriver{ + Writer: w, + Driver: s.drv, + } + migrate, err := schema.NewMigrate(drv, opts...) + if err != nil { + return fmt.Errorf("ent/migrate: %w", err) + } + return migrate.Create(ctx, Tables...) +} diff --git a/backend/ent/migrate/schema.go b/backend/ent/migrate/schema.go new file mode 100644 index 0000000..203f01c --- /dev/null +++ b/backend/ent/migrate/schema.go @@ -0,0 +1,63 @@ +// Code generated by entc, DO NOT EDIT. + +package migrate + +import ( + "entgo.io/ent/dialect/sql/schema" + "entgo.io/ent/schema/field" +) + +var ( + // AuthTokensColumns holds the columns for the "auth_tokens" table. + AuthTokensColumns = []*schema.Column{ + {Name: "id", Type: field.TypeInt, Increment: true}, + {Name: "token", Type: field.TypeBytes, Unique: true}, + {Name: "expires_at", Type: field.TypeTime}, + {Name: "created_at", Type: field.TypeTime}, + {Name: "user_auth_tokens", Type: field.TypeUUID, Nullable: true}, + } + // AuthTokensTable holds the schema information for the "auth_tokens" table. + AuthTokensTable = &schema.Table{ + Name: "auth_tokens", + Columns: AuthTokensColumns, + PrimaryKey: []*schema.Column{AuthTokensColumns[0]}, + ForeignKeys: []*schema.ForeignKey{ + { + Symbol: "auth_tokens_users_auth_tokens", + Columns: []*schema.Column{AuthTokensColumns[4]}, + RefColumns: []*schema.Column{UsersColumns[0]}, + OnDelete: schema.SetNull, + }, + }, + Indexes: []*schema.Index{ + { + Name: "authtokens_token", + Unique: false, + Columns: []*schema.Column{AuthTokensColumns[1]}, + }, + }, + } + // UsersColumns holds the columns for the "users" table. + UsersColumns = []*schema.Column{ + {Name: "id", Type: field.TypeUUID}, + {Name: "name", Type: field.TypeString}, + {Name: "email", Type: field.TypeString, Unique: true}, + {Name: "password", Type: field.TypeString}, + {Name: "is_superuser", Type: field.TypeBool, Default: false}, + } + // UsersTable holds the schema information for the "users" table. + UsersTable = &schema.Table{ + Name: "users", + Columns: UsersColumns, + PrimaryKey: []*schema.Column{UsersColumns[0]}, + } + // Tables holds all the tables in the schema. + Tables = []*schema.Table{ + AuthTokensTable, + UsersTable, + } +) + +func init() { + AuthTokensTable.ForeignKeys[0].RefTable = UsersTable +} diff --git a/backend/ent/mutation.go b/backend/ent/mutation.go new file mode 100644 index 0000000..3705cb4 --- /dev/null +++ b/backend/ent/mutation.go @@ -0,0 +1,1091 @@ +// Code generated by entc, DO NOT EDIT. + +package ent + +import ( + "context" + "errors" + "fmt" + "sync" + "time" + + "github.com/google/uuid" + "github.com/hay-kot/git-web-template/backend/ent/authtokens" + "github.com/hay-kot/git-web-template/backend/ent/predicate" + "github.com/hay-kot/git-web-template/backend/ent/user" + + "entgo.io/ent" +) + +const ( + // Operation types. + OpCreate = ent.OpCreate + OpDelete = ent.OpDelete + OpDeleteOne = ent.OpDeleteOne + OpUpdate = ent.OpUpdate + OpUpdateOne = ent.OpUpdateOne + + // Node types. + TypeAuthTokens = "AuthTokens" + TypeUser = "User" +) + +// AuthTokensMutation represents an operation that mutates the AuthTokens nodes in the graph. +type AuthTokensMutation struct { + config + op Op + typ string + id *int + token *[]byte + expires_at *time.Time + created_at *time.Time + clearedFields map[string]struct{} + user *uuid.UUID + cleareduser bool + done bool + oldValue func(context.Context) (*AuthTokens, error) + predicates []predicate.AuthTokens +} + +var _ ent.Mutation = (*AuthTokensMutation)(nil) + +// authtokensOption allows management of the mutation configuration using functional options. +type authtokensOption func(*AuthTokensMutation) + +// newAuthTokensMutation creates new mutation for the AuthTokens entity. +func newAuthTokensMutation(c config, op Op, opts ...authtokensOption) *AuthTokensMutation { + m := &AuthTokensMutation{ + config: c, + op: op, + typ: TypeAuthTokens, + clearedFields: make(map[string]struct{}), + } + for _, opt := range opts { + opt(m) + } + return m +} + +// withAuthTokensID sets the ID field of the mutation. +func withAuthTokensID(id int) authtokensOption { + return func(m *AuthTokensMutation) { + var ( + err error + once sync.Once + value *AuthTokens + ) + m.oldValue = func(ctx context.Context) (*AuthTokens, error) { + once.Do(func() { + if m.done { + err = errors.New("querying old values post mutation is not allowed") + } else { + value, err = m.Client().AuthTokens.Get(ctx, id) + } + }) + return value, err + } + m.id = &id + } +} + +// withAuthTokens sets the old AuthTokens of the mutation. +func withAuthTokens(node *AuthTokens) authtokensOption { + return func(m *AuthTokensMutation) { + m.oldValue = func(context.Context) (*AuthTokens, error) { + return node, nil + } + m.id = &node.ID + } +} + +// Client returns a new `ent.Client` from the mutation. If the mutation was +// executed in a transaction (ent.Tx), a transactional client is returned. +func (m AuthTokensMutation) Client() *Client { + client := &Client{config: m.config} + client.init() + return client +} + +// Tx returns an `ent.Tx` for mutations that were executed in transactions; +// it returns an error otherwise. +func (m AuthTokensMutation) Tx() (*Tx, error) { + if _, ok := m.driver.(*txDriver); !ok { + return nil, errors.New("ent: mutation is not running in a transaction") + } + tx := &Tx{config: m.config} + tx.init() + return tx, nil +} + +// ID returns the ID value in the mutation. Note that the ID is only available +// if it was provided to the builder or after it was returned from the database. +func (m *AuthTokensMutation) ID() (id int, exists bool) { + if m.id == nil { + return + } + return *m.id, true +} + +// IDs queries the database and returns the entity ids that match the mutation's predicate. +// That means, if the mutation is applied within a transaction with an isolation level such +// as sql.LevelSerializable, the returned ids match the ids of the rows that will be updated +// or updated by the mutation. +func (m *AuthTokensMutation) IDs(ctx context.Context) ([]int, error) { + switch { + case m.op.Is(OpUpdateOne | OpDeleteOne): + id, exists := m.ID() + if exists { + return []int{id}, nil + } + fallthrough + case m.op.Is(OpUpdate | OpDelete): + return m.Client().AuthTokens.Query().Where(m.predicates...).IDs(ctx) + default: + return nil, fmt.Errorf("IDs is not allowed on %s operations", m.op) + } +} + +// SetToken sets the "token" field. +func (m *AuthTokensMutation) SetToken(b []byte) { + m.token = &b +} + +// Token returns the value of the "token" field in the mutation. +func (m *AuthTokensMutation) Token() (r []byte, exists bool) { + v := m.token + if v == nil { + return + } + return *v, true +} + +// OldToken returns the old "token" field's value of the AuthTokens entity. +// If the AuthTokens object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *AuthTokensMutation) OldToken(ctx context.Context) (v []byte, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldToken is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldToken requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldToken: %w", err) + } + return oldValue.Token, nil +} + +// ResetToken resets all changes to the "token" field. +func (m *AuthTokensMutation) ResetToken() { + m.token = nil +} + +// SetExpiresAt sets the "expires_at" field. +func (m *AuthTokensMutation) SetExpiresAt(t time.Time) { + m.expires_at = &t +} + +// ExpiresAt returns the value of the "expires_at" field in the mutation. +func (m *AuthTokensMutation) ExpiresAt() (r time.Time, exists bool) { + v := m.expires_at + if v == nil { + return + } + return *v, true +} + +// OldExpiresAt returns the old "expires_at" field's value of the AuthTokens entity. +// If the AuthTokens object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *AuthTokensMutation) OldExpiresAt(ctx context.Context) (v time.Time, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldExpiresAt is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldExpiresAt requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldExpiresAt: %w", err) + } + return oldValue.ExpiresAt, nil +} + +// ResetExpiresAt resets all changes to the "expires_at" field. +func (m *AuthTokensMutation) ResetExpiresAt() { + m.expires_at = nil +} + +// SetCreatedAt sets the "created_at" field. +func (m *AuthTokensMutation) SetCreatedAt(t time.Time) { + m.created_at = &t +} + +// CreatedAt returns the value of the "created_at" field in the mutation. +func (m *AuthTokensMutation) CreatedAt() (r time.Time, exists bool) { + v := m.created_at + if v == nil { + return + } + return *v, true +} + +// OldCreatedAt returns the old "created_at" field's value of the AuthTokens entity. +// If the AuthTokens object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *AuthTokensMutation) OldCreatedAt(ctx context.Context) (v time.Time, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldCreatedAt is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldCreatedAt requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldCreatedAt: %w", err) + } + return oldValue.CreatedAt, nil +} + +// ResetCreatedAt resets all changes to the "created_at" field. +func (m *AuthTokensMutation) ResetCreatedAt() { + m.created_at = nil +} + +// SetUserID sets the "user" edge to the User entity by id. +func (m *AuthTokensMutation) SetUserID(id uuid.UUID) { + m.user = &id +} + +// ClearUser clears the "user" edge to the User entity. +func (m *AuthTokensMutation) ClearUser() { + m.cleareduser = true +} + +// UserCleared reports if the "user" edge to the User entity was cleared. +func (m *AuthTokensMutation) UserCleared() bool { + return m.cleareduser +} + +// UserID returns the "user" edge ID in the mutation. +func (m *AuthTokensMutation) UserID() (id uuid.UUID, exists bool) { + if m.user != nil { + return *m.user, true + } + return +} + +// UserIDs returns the "user" edge IDs in the mutation. +// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use +// UserID instead. It exists only for internal usage by the builders. +func (m *AuthTokensMutation) UserIDs() (ids []uuid.UUID) { + if id := m.user; id != nil { + ids = append(ids, *id) + } + return +} + +// ResetUser resets all changes to the "user" edge. +func (m *AuthTokensMutation) ResetUser() { + m.user = nil + m.cleareduser = false +} + +// Where appends a list predicates to the AuthTokensMutation builder. +func (m *AuthTokensMutation) Where(ps ...predicate.AuthTokens) { + m.predicates = append(m.predicates, ps...) +} + +// Op returns the operation name. +func (m *AuthTokensMutation) Op() Op { + return m.op +} + +// Type returns the node type of this mutation (AuthTokens). +func (m *AuthTokensMutation) Type() string { + return m.typ +} + +// Fields returns all fields that were changed during this mutation. Note that in +// order to get all numeric fields that were incremented/decremented, call +// AddedFields(). +func (m *AuthTokensMutation) Fields() []string { + fields := make([]string, 0, 3) + if m.token != nil { + fields = append(fields, authtokens.FieldToken) + } + if m.expires_at != nil { + fields = append(fields, authtokens.FieldExpiresAt) + } + if m.created_at != nil { + fields = append(fields, authtokens.FieldCreatedAt) + } + return fields +} + +// Field returns the value of a field with the given name. The second boolean +// return value indicates that this field was not set, or was not defined in the +// schema. +func (m *AuthTokensMutation) Field(name string) (ent.Value, bool) { + switch name { + case authtokens.FieldToken: + return m.Token() + case authtokens.FieldExpiresAt: + return m.ExpiresAt() + case authtokens.FieldCreatedAt: + return m.CreatedAt() + } + return nil, false +} + +// OldField returns the old value of the field from the database. An error is +// returned if the mutation operation is not UpdateOne, or the query to the +// database failed. +func (m *AuthTokensMutation) OldField(ctx context.Context, name string) (ent.Value, error) { + switch name { + case authtokens.FieldToken: + return m.OldToken(ctx) + case authtokens.FieldExpiresAt: + return m.OldExpiresAt(ctx) + case authtokens.FieldCreatedAt: + return m.OldCreatedAt(ctx) + } + return nil, fmt.Errorf("unknown AuthTokens field %s", name) +} + +// SetField sets the value of a field with the given name. It returns an error if +// the field is not defined in the schema, or if the type mismatched the field +// type. +func (m *AuthTokensMutation) SetField(name string, value ent.Value) error { + switch name { + case authtokens.FieldToken: + v, ok := value.([]byte) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetToken(v) + return nil + case authtokens.FieldExpiresAt: + v, ok := value.(time.Time) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetExpiresAt(v) + return nil + case authtokens.FieldCreatedAt: + v, ok := value.(time.Time) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetCreatedAt(v) + return nil + } + return fmt.Errorf("unknown AuthTokens field %s", name) +} + +// AddedFields returns all numeric fields that were incremented/decremented during +// this mutation. +func (m *AuthTokensMutation) AddedFields() []string { + return nil +} + +// AddedField returns the numeric value that was incremented/decremented on a field +// with the given name. The second boolean return value indicates that this field +// was not set, or was not defined in the schema. +func (m *AuthTokensMutation) AddedField(name string) (ent.Value, bool) { + return nil, false +} + +// AddField adds the value to the field with the given name. It returns an error if +// the field is not defined in the schema, or if the type mismatched the field +// type. +func (m *AuthTokensMutation) AddField(name string, value ent.Value) error { + switch name { + } + return fmt.Errorf("unknown AuthTokens numeric field %s", name) +} + +// ClearedFields returns all nullable fields that were cleared during this +// mutation. +func (m *AuthTokensMutation) ClearedFields() []string { + return nil +} + +// FieldCleared returns a boolean indicating if a field with the given name was +// cleared in this mutation. +func (m *AuthTokensMutation) FieldCleared(name string) bool { + _, ok := m.clearedFields[name] + return ok +} + +// ClearField clears the value of the field with the given name. It returns an +// error if the field is not defined in the schema. +func (m *AuthTokensMutation) ClearField(name string) error { + return fmt.Errorf("unknown AuthTokens nullable field %s", name) +} + +// ResetField resets all changes in the mutation for the field with the given name. +// It returns an error if the field is not defined in the schema. +func (m *AuthTokensMutation) ResetField(name string) error { + switch name { + case authtokens.FieldToken: + m.ResetToken() + return nil + case authtokens.FieldExpiresAt: + m.ResetExpiresAt() + return nil + case authtokens.FieldCreatedAt: + m.ResetCreatedAt() + return nil + } + return fmt.Errorf("unknown AuthTokens field %s", name) +} + +// AddedEdges returns all edge names that were set/added in this mutation. +func (m *AuthTokensMutation) AddedEdges() []string { + edges := make([]string, 0, 1) + if m.user != nil { + edges = append(edges, authtokens.EdgeUser) + } + return edges +} + +// AddedIDs returns all IDs (to other nodes) that were added for the given edge +// name in this mutation. +func (m *AuthTokensMutation) AddedIDs(name string) []ent.Value { + switch name { + case authtokens.EdgeUser: + if id := m.user; id != nil { + return []ent.Value{*id} + } + } + return nil +} + +// RemovedEdges returns all edge names that were removed in this mutation. +func (m *AuthTokensMutation) RemovedEdges() []string { + edges := make([]string, 0, 1) + return edges +} + +// RemovedIDs returns all IDs (to other nodes) that were removed for the edge with +// the given name in this mutation. +func (m *AuthTokensMutation) RemovedIDs(name string) []ent.Value { + switch name { + } + return nil +} + +// ClearedEdges returns all edge names that were cleared in this mutation. +func (m *AuthTokensMutation) ClearedEdges() []string { + edges := make([]string, 0, 1) + if m.cleareduser { + edges = append(edges, authtokens.EdgeUser) + } + return edges +} + +// EdgeCleared returns a boolean which indicates if the edge with the given name +// was cleared in this mutation. +func (m *AuthTokensMutation) EdgeCleared(name string) bool { + switch name { + case authtokens.EdgeUser: + return m.cleareduser + } + return false +} + +// ClearEdge clears the value of the edge with the given name. It returns an error +// if that edge is not defined in the schema. +func (m *AuthTokensMutation) ClearEdge(name string) error { + switch name { + case authtokens.EdgeUser: + m.ClearUser() + return nil + } + return fmt.Errorf("unknown AuthTokens unique edge %s", name) +} + +// ResetEdge resets all changes to the edge with the given name in this mutation. +// It returns an error if the edge is not defined in the schema. +func (m *AuthTokensMutation) ResetEdge(name string) error { + switch name { + case authtokens.EdgeUser: + m.ResetUser() + return nil + } + return fmt.Errorf("unknown AuthTokens edge %s", name) +} + +// UserMutation represents an operation that mutates the User nodes in the graph. +type UserMutation struct { + config + op Op + typ string + id *uuid.UUID + name *string + email *string + password *string + is_superuser *bool + clearedFields map[string]struct{} + auth_tokens map[int]struct{} + removedauth_tokens map[int]struct{} + clearedauth_tokens bool + done bool + oldValue func(context.Context) (*User, error) + predicates []predicate.User +} + +var _ ent.Mutation = (*UserMutation)(nil) + +// userOption allows management of the mutation configuration using functional options. +type userOption func(*UserMutation) + +// newUserMutation creates new mutation for the User entity. +func newUserMutation(c config, op Op, opts ...userOption) *UserMutation { + m := &UserMutation{ + config: c, + op: op, + typ: TypeUser, + clearedFields: make(map[string]struct{}), + } + for _, opt := range opts { + opt(m) + } + return m +} + +// withUserID sets the ID field of the mutation. +func withUserID(id uuid.UUID) userOption { + return func(m *UserMutation) { + var ( + err error + once sync.Once + value *User + ) + m.oldValue = func(ctx context.Context) (*User, error) { + once.Do(func() { + if m.done { + err = errors.New("querying old values post mutation is not allowed") + } else { + value, err = m.Client().User.Get(ctx, id) + } + }) + return value, err + } + m.id = &id + } +} + +// withUser sets the old User of the mutation. +func withUser(node *User) userOption { + return func(m *UserMutation) { + m.oldValue = func(context.Context) (*User, error) { + return node, nil + } + m.id = &node.ID + } +} + +// Client returns a new `ent.Client` from the mutation. If the mutation was +// executed in a transaction (ent.Tx), a transactional client is returned. +func (m UserMutation) Client() *Client { + client := &Client{config: m.config} + client.init() + return client +} + +// Tx returns an `ent.Tx` for mutations that were executed in transactions; +// it returns an error otherwise. +func (m UserMutation) Tx() (*Tx, error) { + if _, ok := m.driver.(*txDriver); !ok { + return nil, errors.New("ent: mutation is not running in a transaction") + } + tx := &Tx{config: m.config} + tx.init() + return tx, nil +} + +// SetID sets the value of the id field. Note that this +// operation is only accepted on creation of User entities. +func (m *UserMutation) SetID(id uuid.UUID) { + m.id = &id +} + +// ID returns the ID value in the mutation. Note that the ID is only available +// if it was provided to the builder or after it was returned from the database. +func (m *UserMutation) ID() (id uuid.UUID, exists bool) { + if m.id == nil { + return + } + return *m.id, true +} + +// IDs queries the database and returns the entity ids that match the mutation's predicate. +// That means, if the mutation is applied within a transaction with an isolation level such +// as sql.LevelSerializable, the returned ids match the ids of the rows that will be updated +// or updated by the mutation. +func (m *UserMutation) IDs(ctx context.Context) ([]uuid.UUID, error) { + switch { + case m.op.Is(OpUpdateOne | OpDeleteOne): + id, exists := m.ID() + if exists { + return []uuid.UUID{id}, nil + } + fallthrough + case m.op.Is(OpUpdate | OpDelete): + return m.Client().User.Query().Where(m.predicates...).IDs(ctx) + default: + return nil, fmt.Errorf("IDs is not allowed on %s operations", m.op) + } +} + +// SetName sets the "name" field. +func (m *UserMutation) SetName(s string) { + m.name = &s +} + +// Name returns the value of the "name" field in the mutation. +func (m *UserMutation) Name() (r string, exists bool) { + v := m.name + if v == nil { + return + } + return *v, true +} + +// OldName returns the old "name" field's value of the User entity. +// If the User object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *UserMutation) OldName(ctx context.Context) (v string, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldName is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldName requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldName: %w", err) + } + return oldValue.Name, nil +} + +// ResetName resets all changes to the "name" field. +func (m *UserMutation) ResetName() { + m.name = nil +} + +// SetEmail sets the "email" field. +func (m *UserMutation) SetEmail(s string) { + m.email = &s +} + +// Email returns the value of the "email" field in the mutation. +func (m *UserMutation) Email() (r string, exists bool) { + v := m.email + if v == nil { + return + } + return *v, true +} + +// OldEmail returns the old "email" field's value of the User entity. +// If the User object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *UserMutation) OldEmail(ctx context.Context) (v string, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldEmail is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldEmail requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldEmail: %w", err) + } + return oldValue.Email, nil +} + +// ResetEmail resets all changes to the "email" field. +func (m *UserMutation) ResetEmail() { + m.email = nil +} + +// SetPassword sets the "password" field. +func (m *UserMutation) SetPassword(s string) { + m.password = &s +} + +// Password returns the value of the "password" field in the mutation. +func (m *UserMutation) Password() (r string, exists bool) { + v := m.password + if v == nil { + return + } + return *v, true +} + +// OldPassword returns the old "password" field's value of the User entity. +// If the User object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *UserMutation) OldPassword(ctx context.Context) (v string, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldPassword is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldPassword requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldPassword: %w", err) + } + return oldValue.Password, nil +} + +// ResetPassword resets all changes to the "password" field. +func (m *UserMutation) ResetPassword() { + m.password = nil +} + +// SetIsSuperuser sets the "is_superuser" field. +func (m *UserMutation) SetIsSuperuser(b bool) { + m.is_superuser = &b +} + +// IsSuperuser returns the value of the "is_superuser" field in the mutation. +func (m *UserMutation) IsSuperuser() (r bool, exists bool) { + v := m.is_superuser + if v == nil { + return + } + return *v, true +} + +// OldIsSuperuser returns the old "is_superuser" field's value of the User entity. +// If the User object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *UserMutation) OldIsSuperuser(ctx context.Context) (v bool, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldIsSuperuser is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldIsSuperuser requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldIsSuperuser: %w", err) + } + return oldValue.IsSuperuser, nil +} + +// ResetIsSuperuser resets all changes to the "is_superuser" field. +func (m *UserMutation) ResetIsSuperuser() { + m.is_superuser = nil +} + +// AddAuthTokenIDs adds the "auth_tokens" edge to the AuthTokens entity by ids. +func (m *UserMutation) AddAuthTokenIDs(ids ...int) { + if m.auth_tokens == nil { + m.auth_tokens = make(map[int]struct{}) + } + for i := range ids { + m.auth_tokens[ids[i]] = struct{}{} + } +} + +// ClearAuthTokens clears the "auth_tokens" edge to the AuthTokens entity. +func (m *UserMutation) ClearAuthTokens() { + m.clearedauth_tokens = true +} + +// AuthTokensCleared reports if the "auth_tokens" edge to the AuthTokens entity was cleared. +func (m *UserMutation) AuthTokensCleared() bool { + return m.clearedauth_tokens +} + +// RemoveAuthTokenIDs removes the "auth_tokens" edge to the AuthTokens entity by IDs. +func (m *UserMutation) RemoveAuthTokenIDs(ids ...int) { + if m.removedauth_tokens == nil { + m.removedauth_tokens = make(map[int]struct{}) + } + for i := range ids { + delete(m.auth_tokens, ids[i]) + m.removedauth_tokens[ids[i]] = struct{}{} + } +} + +// RemovedAuthTokens returns the removed IDs of the "auth_tokens" edge to the AuthTokens entity. +func (m *UserMutation) RemovedAuthTokensIDs() (ids []int) { + for id := range m.removedauth_tokens { + ids = append(ids, id) + } + return +} + +// AuthTokensIDs returns the "auth_tokens" edge IDs in the mutation. +func (m *UserMutation) AuthTokensIDs() (ids []int) { + for id := range m.auth_tokens { + ids = append(ids, id) + } + return +} + +// ResetAuthTokens resets all changes to the "auth_tokens" edge. +func (m *UserMutation) ResetAuthTokens() { + m.auth_tokens = nil + m.clearedauth_tokens = false + m.removedauth_tokens = nil +} + +// Where appends a list predicates to the UserMutation builder. +func (m *UserMutation) Where(ps ...predicate.User) { + m.predicates = append(m.predicates, ps...) +} + +// Op returns the operation name. +func (m *UserMutation) Op() Op { + return m.op +} + +// Type returns the node type of this mutation (User). +func (m *UserMutation) Type() string { + return m.typ +} + +// Fields returns all fields that were changed during this mutation. Note that in +// order to get all numeric fields that were incremented/decremented, call +// AddedFields(). +func (m *UserMutation) Fields() []string { + fields := make([]string, 0, 4) + if m.name != nil { + fields = append(fields, user.FieldName) + } + if m.email != nil { + fields = append(fields, user.FieldEmail) + } + if m.password != nil { + fields = append(fields, user.FieldPassword) + } + if m.is_superuser != nil { + fields = append(fields, user.FieldIsSuperuser) + } + return fields +} + +// Field returns the value of a field with the given name. The second boolean +// return value indicates that this field was not set, or was not defined in the +// schema. +func (m *UserMutation) Field(name string) (ent.Value, bool) { + switch name { + case user.FieldName: + return m.Name() + case user.FieldEmail: + return m.Email() + case user.FieldPassword: + return m.Password() + case user.FieldIsSuperuser: + return m.IsSuperuser() + } + return nil, false +} + +// OldField returns the old value of the field from the database. An error is +// returned if the mutation operation is not UpdateOne, or the query to the +// database failed. +func (m *UserMutation) OldField(ctx context.Context, name string) (ent.Value, error) { + switch name { + case user.FieldName: + return m.OldName(ctx) + case user.FieldEmail: + return m.OldEmail(ctx) + case user.FieldPassword: + return m.OldPassword(ctx) + case user.FieldIsSuperuser: + return m.OldIsSuperuser(ctx) + } + return nil, fmt.Errorf("unknown User field %s", name) +} + +// SetField sets the value of a field with the given name. It returns an error if +// the field is not defined in the schema, or if the type mismatched the field +// type. +func (m *UserMutation) SetField(name string, value ent.Value) error { + switch name { + case user.FieldName: + v, ok := value.(string) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetName(v) + return nil + case user.FieldEmail: + v, ok := value.(string) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetEmail(v) + return nil + case user.FieldPassword: + v, ok := value.(string) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetPassword(v) + return nil + case user.FieldIsSuperuser: + v, ok := value.(bool) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetIsSuperuser(v) + return nil + } + return fmt.Errorf("unknown User field %s", name) +} + +// AddedFields returns all numeric fields that were incremented/decremented during +// this mutation. +func (m *UserMutation) AddedFields() []string { + return nil +} + +// AddedField returns the numeric value that was incremented/decremented on a field +// with the given name. The second boolean return value indicates that this field +// was not set, or was not defined in the schema. +func (m *UserMutation) AddedField(name string) (ent.Value, bool) { + return nil, false +} + +// AddField adds the value to the field with the given name. It returns an error if +// the field is not defined in the schema, or if the type mismatched the field +// type. +func (m *UserMutation) AddField(name string, value ent.Value) error { + switch name { + } + return fmt.Errorf("unknown User numeric field %s", name) +} + +// ClearedFields returns all nullable fields that were cleared during this +// mutation. +func (m *UserMutation) ClearedFields() []string { + return nil +} + +// FieldCleared returns a boolean indicating if a field with the given name was +// cleared in this mutation. +func (m *UserMutation) FieldCleared(name string) bool { + _, ok := m.clearedFields[name] + return ok +} + +// ClearField clears the value of the field with the given name. It returns an +// error if the field is not defined in the schema. +func (m *UserMutation) ClearField(name string) error { + return fmt.Errorf("unknown User nullable field %s", name) +} + +// ResetField resets all changes in the mutation for the field with the given name. +// It returns an error if the field is not defined in the schema. +func (m *UserMutation) ResetField(name string) error { + switch name { + case user.FieldName: + m.ResetName() + return nil + case user.FieldEmail: + m.ResetEmail() + return nil + case user.FieldPassword: + m.ResetPassword() + return nil + case user.FieldIsSuperuser: + m.ResetIsSuperuser() + return nil + } + return fmt.Errorf("unknown User field %s", name) +} + +// AddedEdges returns all edge names that were set/added in this mutation. +func (m *UserMutation) AddedEdges() []string { + edges := make([]string, 0, 1) + if m.auth_tokens != nil { + edges = append(edges, user.EdgeAuthTokens) + } + return edges +} + +// AddedIDs returns all IDs (to other nodes) that were added for the given edge +// name in this mutation. +func (m *UserMutation) AddedIDs(name string) []ent.Value { + switch name { + case user.EdgeAuthTokens: + ids := make([]ent.Value, 0, len(m.auth_tokens)) + for id := range m.auth_tokens { + ids = append(ids, id) + } + return ids + } + return nil +} + +// RemovedEdges returns all edge names that were removed in this mutation. +func (m *UserMutation) RemovedEdges() []string { + edges := make([]string, 0, 1) + if m.removedauth_tokens != nil { + edges = append(edges, user.EdgeAuthTokens) + } + return edges +} + +// RemovedIDs returns all IDs (to other nodes) that were removed for the edge with +// the given name in this mutation. +func (m *UserMutation) RemovedIDs(name string) []ent.Value { + switch name { + case user.EdgeAuthTokens: + ids := make([]ent.Value, 0, len(m.removedauth_tokens)) + for id := range m.removedauth_tokens { + ids = append(ids, id) + } + return ids + } + return nil +} + +// ClearedEdges returns all edge names that were cleared in this mutation. +func (m *UserMutation) ClearedEdges() []string { + edges := make([]string, 0, 1) + if m.clearedauth_tokens { + edges = append(edges, user.EdgeAuthTokens) + } + return edges +} + +// EdgeCleared returns a boolean which indicates if the edge with the given name +// was cleared in this mutation. +func (m *UserMutation) EdgeCleared(name string) bool { + switch name { + case user.EdgeAuthTokens: + return m.clearedauth_tokens + } + return false +} + +// ClearEdge clears the value of the edge with the given name. It returns an error +// if that edge is not defined in the schema. +func (m *UserMutation) ClearEdge(name string) error { + switch name { + } + return fmt.Errorf("unknown User unique edge %s", name) +} + +// ResetEdge resets all changes to the edge with the given name in this mutation. +// It returns an error if the edge is not defined in the schema. +func (m *UserMutation) ResetEdge(name string) error { + switch name { + case user.EdgeAuthTokens: + m.ResetAuthTokens() + return nil + } + return fmt.Errorf("unknown User edge %s", name) +} diff --git a/backend/ent/predicate/predicate.go b/backend/ent/predicate/predicate.go new file mode 100644 index 0000000..b26324f --- /dev/null +++ b/backend/ent/predicate/predicate.go @@ -0,0 +1,13 @@ +// Code generated by entc, DO NOT EDIT. + +package predicate + +import ( + "entgo.io/ent/dialect/sql" +) + +// AuthTokens is the predicate function for authtokens builders. +type AuthTokens func(*sql.Selector) + +// User is the predicate function for user builders. +type User func(*sql.Selector) diff --git a/backend/ent/runtime.go b/backend/ent/runtime.go new file mode 100644 index 0000000..828477f --- /dev/null +++ b/backend/ent/runtime.go @@ -0,0 +1,50 @@ +// Code generated by entc, DO NOT EDIT. + +package ent + +import ( + "time" + + "github.com/google/uuid" + "github.com/hay-kot/git-web-template/backend/ent/authtokens" + "github.com/hay-kot/git-web-template/backend/ent/schema" + "github.com/hay-kot/git-web-template/backend/ent/user" +) + +// The init function reads all schema descriptors with runtime code +// (default values, validators, hooks and policies) and stitches it +// to their package variables. +func init() { + authtokensFields := schema.AuthTokens{}.Fields() + _ = authtokensFields + // authtokensDescExpiresAt is the schema descriptor for expires_at field. + authtokensDescExpiresAt := authtokensFields[1].Descriptor() + // authtokens.DefaultExpiresAt holds the default value on creation for the expires_at field. + authtokens.DefaultExpiresAt = authtokensDescExpiresAt.Default.(func() time.Time) + // authtokensDescCreatedAt is the schema descriptor for created_at field. + authtokensDescCreatedAt := authtokensFields[2].Descriptor() + // authtokens.DefaultCreatedAt holds the default value on creation for the created_at field. + authtokens.DefaultCreatedAt = authtokensDescCreatedAt.Default.(func() time.Time) + userFields := schema.User{}.Fields() + _ = userFields + // userDescName is the schema descriptor for name field. + userDescName := userFields[1].Descriptor() + // user.NameValidator is a validator for the "name" field. It is called by the builders before save. + user.NameValidator = userDescName.Validators[0].(func(string) error) + // userDescEmail is the schema descriptor for email field. + userDescEmail := userFields[2].Descriptor() + // user.EmailValidator is a validator for the "email" field. It is called by the builders before save. + user.EmailValidator = userDescEmail.Validators[0].(func(string) error) + // userDescPassword is the schema descriptor for password field. + userDescPassword := userFields[3].Descriptor() + // user.PasswordValidator is a validator for the "password" field. It is called by the builders before save. + user.PasswordValidator = userDescPassword.Validators[0].(func(string) error) + // userDescIsSuperuser is the schema descriptor for is_superuser field. + userDescIsSuperuser := userFields[4].Descriptor() + // user.DefaultIsSuperuser holds the default value on creation for the is_superuser field. + user.DefaultIsSuperuser = userDescIsSuperuser.Default.(bool) + // userDescID is the schema descriptor for id field. + userDescID := userFields[0].Descriptor() + // user.DefaultID holds the default value on creation for the id field. + user.DefaultID = userDescID.Default.(func() uuid.UUID) +} diff --git a/backend/ent/runtime/runtime.go b/backend/ent/runtime/runtime.go new file mode 100644 index 0000000..31da890 --- /dev/null +++ b/backend/ent/runtime/runtime.go @@ -0,0 +1,10 @@ +// Code generated by entc, DO NOT EDIT. + +package runtime + +// The schema-stitching logic is generated in github.com/hay-kot/git-web-template/backend/ent/runtime.go + +const ( + Version = "v0.10.0" // Version of ent codegen. + Sum = "h1:9cBomE1fh+WX34DPYQL7tDNAIvhKa3tXvwxuLyhYCMo=" // Sum of ent codegen. +) diff --git a/backend/ent/schema/authtokens.go b/backend/ent/schema/authtokens.go new file mode 100644 index 0000000..fbe9fd1 --- /dev/null +++ b/backend/ent/schema/authtokens.go @@ -0,0 +1,43 @@ +package schema + +import ( + "time" + + "entgo.io/ent" + "entgo.io/ent/schema/edge" + "entgo.io/ent/schema/field" + "entgo.io/ent/schema/index" +) + +// AuthTokens holds the schema definition for the AuthTokens entity. +type AuthTokens struct { + ent.Schema +} + +// Fields of the AuthTokens. +func (AuthTokens) Fields() []ent.Field { + return []ent.Field{ + field.Bytes("token"). + Unique(), + field.Time("expires_at"). + Default(func() time.Time { return time.Now().Add(time.Hour * 24 * 7) }), + field.Time("created_at"). + Default(time.Now), + } +} + +// Edges of the AuthTokens. +func (AuthTokens) Edges() []ent.Edge { + return []ent.Edge{ + edge.From("user", User.Type). + Ref("auth_tokens"). + Unique(), + } +} + +func (AuthTokens) Indexes() []ent.Index { + return []ent.Index{ + // non-unique index. + index.Fields("token"), + } +} diff --git a/backend/ent/schema/user.go b/backend/ent/schema/user.go new file mode 100644 index 0000000..d1fb726 --- /dev/null +++ b/backend/ent/schema/user.go @@ -0,0 +1,38 @@ +package schema + +import ( + "entgo.io/ent" + "entgo.io/ent/schema/edge" + "entgo.io/ent/schema/field" + "github.com/google/uuid" +) + +// User holds the schema definition for the User entity. +type User struct { + ent.Schema +} + +// Fields of the User. +func (User) Fields() []ent.Field { + return []ent.Field{ + field.UUID("id", uuid.UUID{}). + Default(uuid.New), + field.String("name"). + NotEmpty(), + field.String("email"). + NotEmpty(). + Unique(), + field.String("password"). + NotEmpty(). + Sensitive(), + field.Bool("is_superuser"). + Default(false), + } +} + +// Edges of the User. +func (User) Edges() []ent.Edge { + return []ent.Edge{ + edge.To("auth_tokens", AuthTokens.Type), + } +} diff --git a/backend/ent/tx.go b/backend/ent/tx.go new file mode 100644 index 0000000..b93d068 --- /dev/null +++ b/backend/ent/tx.go @@ -0,0 +1,213 @@ +// Code generated by entc, DO NOT EDIT. + +package ent + +import ( + "context" + "sync" + + "entgo.io/ent/dialect" +) + +// Tx is a transactional client that is created by calling Client.Tx(). +type Tx struct { + config + // AuthTokens is the client for interacting with the AuthTokens builders. + AuthTokens *AuthTokensClient + // User is the client for interacting with the User builders. + User *UserClient + + // lazily loaded. + client *Client + clientOnce sync.Once + + // completion callbacks. + mu sync.Mutex + onCommit []CommitHook + onRollback []RollbackHook + + // ctx lives for the life of the transaction. It is + // the same context used by the underlying connection. + ctx context.Context +} + +type ( + // Committer is the interface that wraps the Commit method. + Committer interface { + Commit(context.Context, *Tx) error + } + + // The CommitFunc type is an adapter to allow the use of ordinary + // function as a Committer. If f is a function with the appropriate + // signature, CommitFunc(f) is a Committer that calls f. + CommitFunc func(context.Context, *Tx) error + + // CommitHook defines the "commit middleware". A function that gets a Committer + // and returns a Committer. For example: + // + // hook := func(next ent.Committer) ent.Committer { + // return ent.CommitFunc(func(ctx context.Context, tx *ent.Tx) error { + // // Do some stuff before. + // if err := next.Commit(ctx, tx); err != nil { + // return err + // } + // // Do some stuff after. + // return nil + // }) + // } + // + CommitHook func(Committer) Committer +) + +// Commit calls f(ctx, m). +func (f CommitFunc) Commit(ctx context.Context, tx *Tx) error { + return f(ctx, tx) +} + +// Commit commits the transaction. +func (tx *Tx) Commit() error { + txDriver := tx.config.driver.(*txDriver) + var fn Committer = CommitFunc(func(context.Context, *Tx) error { + return txDriver.tx.Commit() + }) + tx.mu.Lock() + hooks := append([]CommitHook(nil), tx.onCommit...) + tx.mu.Unlock() + for i := len(hooks) - 1; i >= 0; i-- { + fn = hooks[i](fn) + } + return fn.Commit(tx.ctx, tx) +} + +// OnCommit adds a hook to call on commit. +func (tx *Tx) OnCommit(f CommitHook) { + tx.mu.Lock() + defer tx.mu.Unlock() + tx.onCommit = append(tx.onCommit, f) +} + +type ( + // Rollbacker is the interface that wraps the Rollback method. + Rollbacker interface { + Rollback(context.Context, *Tx) error + } + + // The RollbackFunc type is an adapter to allow the use of ordinary + // function as a Rollbacker. If f is a function with the appropriate + // signature, RollbackFunc(f) is a Rollbacker that calls f. + RollbackFunc func(context.Context, *Tx) error + + // RollbackHook defines the "rollback middleware". A function that gets a Rollbacker + // and returns a Rollbacker. For example: + // + // hook := func(next ent.Rollbacker) ent.Rollbacker { + // return ent.RollbackFunc(func(ctx context.Context, tx *ent.Tx) error { + // // Do some stuff before. + // if err := next.Rollback(ctx, tx); err != nil { + // return err + // } + // // Do some stuff after. + // return nil + // }) + // } + // + RollbackHook func(Rollbacker) Rollbacker +) + +// Rollback calls f(ctx, m). +func (f RollbackFunc) Rollback(ctx context.Context, tx *Tx) error { + return f(ctx, tx) +} + +// Rollback rollbacks the transaction. +func (tx *Tx) Rollback() error { + txDriver := tx.config.driver.(*txDriver) + var fn Rollbacker = RollbackFunc(func(context.Context, *Tx) error { + return txDriver.tx.Rollback() + }) + tx.mu.Lock() + hooks := append([]RollbackHook(nil), tx.onRollback...) + tx.mu.Unlock() + for i := len(hooks) - 1; i >= 0; i-- { + fn = hooks[i](fn) + } + return fn.Rollback(tx.ctx, tx) +} + +// OnRollback adds a hook to call on rollback. +func (tx *Tx) OnRollback(f RollbackHook) { + tx.mu.Lock() + defer tx.mu.Unlock() + tx.onRollback = append(tx.onRollback, f) +} + +// Client returns a Client that binds to current transaction. +func (tx *Tx) Client() *Client { + tx.clientOnce.Do(func() { + tx.client = &Client{config: tx.config} + tx.client.init() + }) + return tx.client +} + +func (tx *Tx) init() { + tx.AuthTokens = NewAuthTokensClient(tx.config) + tx.User = NewUserClient(tx.config) +} + +// txDriver wraps the given dialect.Tx with a nop dialect.Driver implementation. +// The idea is to support transactions without adding any extra code to the builders. +// When a builder calls to driver.Tx(), it gets the same dialect.Tx instance. +// Commit and Rollback are nop for the internal builders and the user must call one +// of them in order to commit or rollback the transaction. +// +// If a closed transaction is embedded in one of the generated entities, and the entity +// applies a query, for example: AuthTokens.QueryXXX(), the query will be executed +// through the driver which created this transaction. +// +// Note that txDriver is not goroutine safe. +type txDriver struct { + // the driver we started the transaction from. + drv dialect.Driver + // tx is the underlying transaction. + tx dialect.Tx +} + +// newTx creates a new transactional driver. +func newTx(ctx context.Context, drv dialect.Driver) (*txDriver, error) { + tx, err := drv.Tx(ctx) + if err != nil { + return nil, err + } + return &txDriver{tx: tx, drv: drv}, nil +} + +// Tx returns the transaction wrapper (txDriver) to avoid Commit or Rollback calls +// from the internal builders. Should be called only by the internal builders. +func (tx *txDriver) Tx(context.Context) (dialect.Tx, error) { return tx, nil } + +// Dialect returns the dialect of the driver we started the transaction from. +func (tx *txDriver) Dialect() string { return tx.drv.Dialect() } + +// Close is a nop close. +func (*txDriver) Close() error { return nil } + +// Commit is a nop commit for the internal builders. +// User must call `Tx.Commit` in order to commit the transaction. +func (*txDriver) Commit() error { return nil } + +// Rollback is a nop rollback for the internal builders. +// User must call `Tx.Rollback` in order to rollback the transaction. +func (*txDriver) Rollback() error { return nil } + +// Exec calls tx.Exec. +func (tx *txDriver) Exec(ctx context.Context, query string, args, v interface{}) error { + return tx.tx.Exec(ctx, query, args, v) +} + +// Query calls tx.Query. +func (tx *txDriver) Query(ctx context.Context, query string, args, v interface{}) error { + return tx.tx.Query(ctx, query, args, v) +} + +var _ dialect.Driver = (*txDriver)(nil) diff --git a/backend/ent/user.go b/backend/ent/user.go new file mode 100644 index 0000000..62eaf8f --- /dev/null +++ b/backend/ent/user.go @@ -0,0 +1,157 @@ +// Code generated by entc, DO NOT EDIT. + +package ent + +import ( + "fmt" + "strings" + + "entgo.io/ent/dialect/sql" + "github.com/google/uuid" + "github.com/hay-kot/git-web-template/backend/ent/user" +) + +// User is the model entity for the User schema. +type User struct { + config `json:"-"` + // ID of the ent. + ID uuid.UUID `json:"id,omitempty"` + // Name holds the value of the "name" field. + Name string `json:"name,omitempty"` + // Email holds the value of the "email" field. + Email string `json:"email,omitempty"` + // Password holds the value of the "password" field. + Password string `json:"-"` + // IsSuperuser holds the value of the "is_superuser" field. + IsSuperuser bool `json:"is_superuser,omitempty"` + // Edges holds the relations/edges for other nodes in the graph. + // The values are being populated by the UserQuery when eager-loading is set. + Edges UserEdges `json:"edges"` +} + +// UserEdges holds the relations/edges for other nodes in the graph. +type UserEdges struct { + // AuthTokens holds the value of the auth_tokens edge. + AuthTokens []*AuthTokens `json:"auth_tokens,omitempty"` + // loadedTypes holds the information for reporting if a + // type was loaded (or requested) in eager-loading or not. + loadedTypes [1]bool +} + +// AuthTokensOrErr returns the AuthTokens value or an error if the edge +// was not loaded in eager-loading. +func (e UserEdges) AuthTokensOrErr() ([]*AuthTokens, error) { + if e.loadedTypes[0] { + return e.AuthTokens, nil + } + return nil, &NotLoadedError{edge: "auth_tokens"} +} + +// scanValues returns the types for scanning values from sql.Rows. +func (*User) scanValues(columns []string) ([]interface{}, error) { + values := make([]interface{}, len(columns)) + for i := range columns { + switch columns[i] { + case user.FieldIsSuperuser: + values[i] = new(sql.NullBool) + case user.FieldName, user.FieldEmail, user.FieldPassword: + values[i] = new(sql.NullString) + case user.FieldID: + values[i] = new(uuid.UUID) + default: + return nil, fmt.Errorf("unexpected column %q for type User", columns[i]) + } + } + return values, nil +} + +// assignValues assigns the values that were returned from sql.Rows (after scanning) +// to the User fields. +func (u *User) assignValues(columns []string, values []interface{}) error { + if m, n := len(values), len(columns); m < n { + return fmt.Errorf("mismatch number of scan values: %d != %d", m, n) + } + for i := range columns { + switch columns[i] { + case user.FieldID: + if value, ok := values[i].(*uuid.UUID); !ok { + return fmt.Errorf("unexpected type %T for field id", values[i]) + } else if value != nil { + u.ID = *value + } + case user.FieldName: + if value, ok := values[i].(*sql.NullString); !ok { + return fmt.Errorf("unexpected type %T for field name", values[i]) + } else if value.Valid { + u.Name = value.String + } + case user.FieldEmail: + if value, ok := values[i].(*sql.NullString); !ok { + return fmt.Errorf("unexpected type %T for field email", values[i]) + } else if value.Valid { + u.Email = value.String + } + case user.FieldPassword: + if value, ok := values[i].(*sql.NullString); !ok { + return fmt.Errorf("unexpected type %T for field password", values[i]) + } else if value.Valid { + u.Password = value.String + } + case user.FieldIsSuperuser: + if value, ok := values[i].(*sql.NullBool); !ok { + return fmt.Errorf("unexpected type %T for field is_superuser", values[i]) + } else if value.Valid { + u.IsSuperuser = value.Bool + } + } + } + return nil +} + +// QueryAuthTokens queries the "auth_tokens" edge of the User entity. +func (u *User) QueryAuthTokens() *AuthTokensQuery { + return (&UserClient{config: u.config}).QueryAuthTokens(u) +} + +// Update returns a builder for updating this User. +// Note that you need to call User.Unwrap() before calling this method if this User +// was returned from a transaction, and the transaction was committed or rolled back. +func (u *User) Update() *UserUpdateOne { + return (&UserClient{config: u.config}).UpdateOne(u) +} + +// Unwrap unwraps the User entity that was returned from a transaction after it was closed, +// so that all future queries will be executed through the driver which created the transaction. +func (u *User) Unwrap() *User { + tx, ok := u.config.driver.(*txDriver) + if !ok { + panic("ent: User is not a transactional entity") + } + u.config.driver = tx.drv + return u +} + +// String implements the fmt.Stringer. +func (u *User) String() string { + var builder strings.Builder + builder.WriteString("User(") + builder.WriteString(fmt.Sprintf("id=%v", u.ID)) + builder.WriteString(", name=") + builder.WriteString(u.Name) + builder.WriteString(", email=") + builder.WriteString(u.Email) + builder.WriteString(", password=") + builder.WriteString(", is_superuser=") + builder.WriteString(fmt.Sprintf("%v", u.IsSuperuser)) + builder.WriteByte(')') + return builder.String() +} + +// Users is a parsable slice of User. +type Users []*User + +func (u Users) config(cfg config) { + for _i := range u { + u[_i].config = cfg + } +} diff --git a/backend/ent/user/user.go b/backend/ent/user/user.go new file mode 100644 index 0000000..9bbbd9a --- /dev/null +++ b/backend/ent/user/user.go @@ -0,0 +1,65 @@ +// Code generated by entc, DO NOT EDIT. + +package user + +import ( + "github.com/google/uuid" +) + +const ( + // Label holds the string label denoting the user type in the database. + Label = "user" + // FieldID holds the string denoting the id field in the database. + FieldID = "id" + // FieldName holds the string denoting the name field in the database. + FieldName = "name" + // FieldEmail holds the string denoting the email field in the database. + FieldEmail = "email" + // FieldPassword holds the string denoting the password field in the database. + FieldPassword = "password" + // FieldIsSuperuser holds the string denoting the is_superuser field in the database. + FieldIsSuperuser = "is_superuser" + // EdgeAuthTokens holds the string denoting the auth_tokens edge name in mutations. + EdgeAuthTokens = "auth_tokens" + // Table holds the table name of the user in the database. + Table = "users" + // AuthTokensTable is the table that holds the auth_tokens relation/edge. + AuthTokensTable = "auth_tokens" + // AuthTokensInverseTable is the table name for the AuthTokens entity. + // It exists in this package in order to avoid circular dependency with the "authtokens" package. + AuthTokensInverseTable = "auth_tokens" + // AuthTokensColumn is the table column denoting the auth_tokens relation/edge. + AuthTokensColumn = "user_auth_tokens" +) + +// Columns holds all SQL columns for user fields. +var Columns = []string{ + FieldID, + FieldName, + FieldEmail, + FieldPassword, + FieldIsSuperuser, +} + +// ValidColumn reports if the column name is valid (part of the table columns). +func ValidColumn(column string) bool { + for i := range Columns { + if column == Columns[i] { + return true + } + } + return false +} + +var ( + // NameValidator is a validator for the "name" field. It is called by the builders before save. + NameValidator func(string) error + // EmailValidator is a validator for the "email" field. It is called by the builders before save. + EmailValidator func(string) error + // PasswordValidator is a validator for the "password" field. It is called by the builders before save. + PasswordValidator func(string) error + // DefaultIsSuperuser holds the default value on creation for the "is_superuser" field. + DefaultIsSuperuser bool + // DefaultID holds the default value on creation for the "id" field. + DefaultID func() uuid.UUID +) diff --git a/backend/ent/user/where.go b/backend/ent/user/where.go new file mode 100644 index 0000000..36db52a --- /dev/null +++ b/backend/ent/user/where.go @@ -0,0 +1,528 @@ +// Code generated by entc, DO NOT EDIT. + +package user + +import ( + "entgo.io/ent/dialect/sql" + "entgo.io/ent/dialect/sql/sqlgraph" + "github.com/google/uuid" + "github.com/hay-kot/git-web-template/backend/ent/predicate" +) + +// ID filters vertices based on their ID field. +func ID(id uuid.UUID) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldID), id)) + }) +} + +// IDEQ applies the EQ predicate on the ID field. +func IDEQ(id uuid.UUID) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldID), id)) + }) +} + +// IDNEQ applies the NEQ predicate on the ID field. +func IDNEQ(id uuid.UUID) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldID), id)) + }) +} + +// IDIn applies the In predicate on the ID field. +func IDIn(ids ...uuid.UUID) predicate.User { + return predicate.User(func(s *sql.Selector) { + // if not arguments were provided, append the FALSE constants, + // since we can't apply "IN ()". This will make this predicate falsy. + if len(ids) == 0 { + s.Where(sql.False()) + return + } + v := make([]interface{}, len(ids)) + for i := range v { + v[i] = ids[i] + } + s.Where(sql.In(s.C(FieldID), v...)) + }) +} + +// IDNotIn applies the NotIn predicate on the ID field. +func IDNotIn(ids ...uuid.UUID) predicate.User { + return predicate.User(func(s *sql.Selector) { + // if not arguments were provided, append the FALSE constants, + // since we can't apply "IN ()". This will make this predicate falsy. + if len(ids) == 0 { + s.Where(sql.False()) + return + } + v := make([]interface{}, len(ids)) + for i := range v { + v[i] = ids[i] + } + s.Where(sql.NotIn(s.C(FieldID), v...)) + }) +} + +// IDGT applies the GT predicate on the ID field. +func IDGT(id uuid.UUID) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.GT(s.C(FieldID), id)) + }) +} + +// IDGTE applies the GTE predicate on the ID field. +func IDGTE(id uuid.UUID) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.GTE(s.C(FieldID), id)) + }) +} + +// IDLT applies the LT predicate on the ID field. +func IDLT(id uuid.UUID) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.LT(s.C(FieldID), id)) + }) +} + +// IDLTE applies the LTE predicate on the ID field. +func IDLTE(id uuid.UUID) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.LTE(s.C(FieldID), id)) + }) +} + +// Name applies equality check predicate on the "name" field. It's identical to NameEQ. +func Name(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldName), v)) + }) +} + +// Email applies equality check predicate on the "email" field. It's identical to EmailEQ. +func Email(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldEmail), v)) + }) +} + +// Password applies equality check predicate on the "password" field. It's identical to PasswordEQ. +func Password(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldPassword), v)) + }) +} + +// IsSuperuser applies equality check predicate on the "is_superuser" field. It's identical to IsSuperuserEQ. +func IsSuperuser(v bool) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldIsSuperuser), v)) + }) +} + +// NameEQ applies the EQ predicate on the "name" field. +func NameEQ(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldName), v)) + }) +} + +// NameNEQ applies the NEQ predicate on the "name" field. +func NameNEQ(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldName), v)) + }) +} + +// NameIn applies the In predicate on the "name" field. +func NameIn(vs ...string) predicate.User { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.User(func(s *sql.Selector) { + // if not arguments were provided, append the FALSE constants, + // since we can't apply "IN ()". This will make this predicate falsy. + if len(v) == 0 { + s.Where(sql.False()) + return + } + s.Where(sql.In(s.C(FieldName), v...)) + }) +} + +// NameNotIn applies the NotIn predicate on the "name" field. +func NameNotIn(vs ...string) predicate.User { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.User(func(s *sql.Selector) { + // if not arguments were provided, append the FALSE constants, + // since we can't apply "IN ()". This will make this predicate falsy. + if len(v) == 0 { + s.Where(sql.False()) + return + } + s.Where(sql.NotIn(s.C(FieldName), v...)) + }) +} + +// NameGT applies the GT predicate on the "name" field. +func NameGT(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.GT(s.C(FieldName), v)) + }) +} + +// NameGTE applies the GTE predicate on the "name" field. +func NameGTE(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.GTE(s.C(FieldName), v)) + }) +} + +// NameLT applies the LT predicate on the "name" field. +func NameLT(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.LT(s.C(FieldName), v)) + }) +} + +// NameLTE applies the LTE predicate on the "name" field. +func NameLTE(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.LTE(s.C(FieldName), v)) + }) +} + +// NameContains applies the Contains predicate on the "name" field. +func NameContains(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.Contains(s.C(FieldName), v)) + }) +} + +// NameHasPrefix applies the HasPrefix predicate on the "name" field. +func NameHasPrefix(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.HasPrefix(s.C(FieldName), v)) + }) +} + +// NameHasSuffix applies the HasSuffix predicate on the "name" field. +func NameHasSuffix(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.HasSuffix(s.C(FieldName), v)) + }) +} + +// NameEqualFold applies the EqualFold predicate on the "name" field. +func NameEqualFold(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.EqualFold(s.C(FieldName), v)) + }) +} + +// NameContainsFold applies the ContainsFold predicate on the "name" field. +func NameContainsFold(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.ContainsFold(s.C(FieldName), v)) + }) +} + +// EmailEQ applies the EQ predicate on the "email" field. +func EmailEQ(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldEmail), v)) + }) +} + +// EmailNEQ applies the NEQ predicate on the "email" field. +func EmailNEQ(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldEmail), v)) + }) +} + +// EmailIn applies the In predicate on the "email" field. +func EmailIn(vs ...string) predicate.User { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.User(func(s *sql.Selector) { + // if not arguments were provided, append the FALSE constants, + // since we can't apply "IN ()". This will make this predicate falsy. + if len(v) == 0 { + s.Where(sql.False()) + return + } + s.Where(sql.In(s.C(FieldEmail), v...)) + }) +} + +// EmailNotIn applies the NotIn predicate on the "email" field. +func EmailNotIn(vs ...string) predicate.User { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.User(func(s *sql.Selector) { + // if not arguments were provided, append the FALSE constants, + // since we can't apply "IN ()". This will make this predicate falsy. + if len(v) == 0 { + s.Where(sql.False()) + return + } + s.Where(sql.NotIn(s.C(FieldEmail), v...)) + }) +} + +// EmailGT applies the GT predicate on the "email" field. +func EmailGT(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.GT(s.C(FieldEmail), v)) + }) +} + +// EmailGTE applies the GTE predicate on the "email" field. +func EmailGTE(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.GTE(s.C(FieldEmail), v)) + }) +} + +// EmailLT applies the LT predicate on the "email" field. +func EmailLT(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.LT(s.C(FieldEmail), v)) + }) +} + +// EmailLTE applies the LTE predicate on the "email" field. +func EmailLTE(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.LTE(s.C(FieldEmail), v)) + }) +} + +// EmailContains applies the Contains predicate on the "email" field. +func EmailContains(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.Contains(s.C(FieldEmail), v)) + }) +} + +// EmailHasPrefix applies the HasPrefix predicate on the "email" field. +func EmailHasPrefix(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.HasPrefix(s.C(FieldEmail), v)) + }) +} + +// EmailHasSuffix applies the HasSuffix predicate on the "email" field. +func EmailHasSuffix(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.HasSuffix(s.C(FieldEmail), v)) + }) +} + +// EmailEqualFold applies the EqualFold predicate on the "email" field. +func EmailEqualFold(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.EqualFold(s.C(FieldEmail), v)) + }) +} + +// EmailContainsFold applies the ContainsFold predicate on the "email" field. +func EmailContainsFold(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.ContainsFold(s.C(FieldEmail), v)) + }) +} + +// PasswordEQ applies the EQ predicate on the "password" field. +func PasswordEQ(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldPassword), v)) + }) +} + +// PasswordNEQ applies the NEQ predicate on the "password" field. +func PasswordNEQ(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldPassword), v)) + }) +} + +// PasswordIn applies the In predicate on the "password" field. +func PasswordIn(vs ...string) predicate.User { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.User(func(s *sql.Selector) { + // if not arguments were provided, append the FALSE constants, + // since we can't apply "IN ()". This will make this predicate falsy. + if len(v) == 0 { + s.Where(sql.False()) + return + } + s.Where(sql.In(s.C(FieldPassword), v...)) + }) +} + +// PasswordNotIn applies the NotIn predicate on the "password" field. +func PasswordNotIn(vs ...string) predicate.User { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.User(func(s *sql.Selector) { + // if not arguments were provided, append the FALSE constants, + // since we can't apply "IN ()". This will make this predicate falsy. + if len(v) == 0 { + s.Where(sql.False()) + return + } + s.Where(sql.NotIn(s.C(FieldPassword), v...)) + }) +} + +// PasswordGT applies the GT predicate on the "password" field. +func PasswordGT(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.GT(s.C(FieldPassword), v)) + }) +} + +// PasswordGTE applies the GTE predicate on the "password" field. +func PasswordGTE(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.GTE(s.C(FieldPassword), v)) + }) +} + +// PasswordLT applies the LT predicate on the "password" field. +func PasswordLT(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.LT(s.C(FieldPassword), v)) + }) +} + +// PasswordLTE applies the LTE predicate on the "password" field. +func PasswordLTE(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.LTE(s.C(FieldPassword), v)) + }) +} + +// PasswordContains applies the Contains predicate on the "password" field. +func PasswordContains(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.Contains(s.C(FieldPassword), v)) + }) +} + +// PasswordHasPrefix applies the HasPrefix predicate on the "password" field. +func PasswordHasPrefix(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.HasPrefix(s.C(FieldPassword), v)) + }) +} + +// PasswordHasSuffix applies the HasSuffix predicate on the "password" field. +func PasswordHasSuffix(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.HasSuffix(s.C(FieldPassword), v)) + }) +} + +// PasswordEqualFold applies the EqualFold predicate on the "password" field. +func PasswordEqualFold(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.EqualFold(s.C(FieldPassword), v)) + }) +} + +// PasswordContainsFold applies the ContainsFold predicate on the "password" field. +func PasswordContainsFold(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.ContainsFold(s.C(FieldPassword), v)) + }) +} + +// IsSuperuserEQ applies the EQ predicate on the "is_superuser" field. +func IsSuperuserEQ(v bool) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldIsSuperuser), v)) + }) +} + +// IsSuperuserNEQ applies the NEQ predicate on the "is_superuser" field. +func IsSuperuserNEQ(v bool) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldIsSuperuser), v)) + }) +} + +// HasAuthTokens applies the HasEdge predicate on the "auth_tokens" edge. +func HasAuthTokens() predicate.User { + return predicate.User(func(s *sql.Selector) { + step := sqlgraph.NewStep( + sqlgraph.From(Table, FieldID), + sqlgraph.To(AuthTokensTable, FieldID), + sqlgraph.Edge(sqlgraph.O2M, false, AuthTokensTable, AuthTokensColumn), + ) + sqlgraph.HasNeighbors(s, step) + }) +} + +// HasAuthTokensWith applies the HasEdge predicate on the "auth_tokens" edge with a given conditions (other predicates). +func HasAuthTokensWith(preds ...predicate.AuthTokens) predicate.User { + return predicate.User(func(s *sql.Selector) { + step := sqlgraph.NewStep( + sqlgraph.From(Table, FieldID), + sqlgraph.To(AuthTokensInverseTable, FieldID), + sqlgraph.Edge(sqlgraph.O2M, false, AuthTokensTable, AuthTokensColumn), + ) + sqlgraph.HasNeighborsWith(s, step, func(s *sql.Selector) { + for _, p := range preds { + p(s) + } + }) + }) +} + +// And groups predicates with the AND operator between them. +func And(predicates ...predicate.User) predicate.User { + return predicate.User(func(s *sql.Selector) { + s1 := s.Clone().SetP(nil) + for _, p := range predicates { + p(s1) + } + s.Where(s1.P()) + }) +} + +// Or groups predicates with the OR operator between them. +func Or(predicates ...predicate.User) predicate.User { + return predicate.User(func(s *sql.Selector) { + s1 := s.Clone().SetP(nil) + for i, p := range predicates { + if i > 0 { + s1.Or() + } + p(s1) + } + s.Where(s1.P()) + }) +} + +// Not applies the not operator on the given predicate. +func Not(p predicate.User) predicate.User { + return predicate.User(func(s *sql.Selector) { + p(s.Not()) + }) +} diff --git a/backend/ent/user_create.go b/backend/ent/user_create.go new file mode 100644 index 0000000..95ad932 --- /dev/null +++ b/backend/ent/user_create.go @@ -0,0 +1,363 @@ +// Code generated by entc, DO NOT EDIT. + +package ent + +import ( + "context" + "errors" + "fmt" + + "entgo.io/ent/dialect/sql/sqlgraph" + "entgo.io/ent/schema/field" + "github.com/google/uuid" + "github.com/hay-kot/git-web-template/backend/ent/authtokens" + "github.com/hay-kot/git-web-template/backend/ent/user" +) + +// UserCreate is the builder for creating a User entity. +type UserCreate struct { + config + mutation *UserMutation + hooks []Hook +} + +// SetName sets the "name" field. +func (uc *UserCreate) SetName(s string) *UserCreate { + uc.mutation.SetName(s) + return uc +} + +// SetEmail sets the "email" field. +func (uc *UserCreate) SetEmail(s string) *UserCreate { + uc.mutation.SetEmail(s) + return uc +} + +// SetPassword sets the "password" field. +func (uc *UserCreate) SetPassword(s string) *UserCreate { + uc.mutation.SetPassword(s) + return uc +} + +// SetIsSuperuser sets the "is_superuser" field. +func (uc *UserCreate) SetIsSuperuser(b bool) *UserCreate { + uc.mutation.SetIsSuperuser(b) + return uc +} + +// SetNillableIsSuperuser sets the "is_superuser" field if the given value is not nil. +func (uc *UserCreate) SetNillableIsSuperuser(b *bool) *UserCreate { + if b != nil { + uc.SetIsSuperuser(*b) + } + return uc +} + +// SetID sets the "id" field. +func (uc *UserCreate) SetID(u uuid.UUID) *UserCreate { + uc.mutation.SetID(u) + return uc +} + +// SetNillableID sets the "id" field if the given value is not nil. +func (uc *UserCreate) SetNillableID(u *uuid.UUID) *UserCreate { + if u != nil { + uc.SetID(*u) + } + return uc +} + +// AddAuthTokenIDs adds the "auth_tokens" edge to the AuthTokens entity by IDs. +func (uc *UserCreate) AddAuthTokenIDs(ids ...int) *UserCreate { + uc.mutation.AddAuthTokenIDs(ids...) + return uc +} + +// AddAuthTokens adds the "auth_tokens" edges to the AuthTokens entity. +func (uc *UserCreate) AddAuthTokens(a ...*AuthTokens) *UserCreate { + ids := make([]int, len(a)) + for i := range a { + ids[i] = a[i].ID + } + return uc.AddAuthTokenIDs(ids...) +} + +// Mutation returns the UserMutation object of the builder. +func (uc *UserCreate) Mutation() *UserMutation { + return uc.mutation +} + +// Save creates the User in the database. +func (uc *UserCreate) Save(ctx context.Context) (*User, error) { + var ( + err error + node *User + ) + uc.defaults() + if len(uc.hooks) == 0 { + if err = uc.check(); err != nil { + return nil, err + } + node, err = uc.sqlSave(ctx) + } else { + var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) { + mutation, ok := m.(*UserMutation) + if !ok { + return nil, fmt.Errorf("unexpected mutation type %T", m) + } + if err = uc.check(); err != nil { + return nil, err + } + uc.mutation = mutation + if node, err = uc.sqlSave(ctx); err != nil { + return nil, err + } + mutation.id = &node.ID + mutation.done = true + return node, err + }) + for i := len(uc.hooks) - 1; i >= 0; i-- { + if uc.hooks[i] == nil { + return nil, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)") + } + mut = uc.hooks[i](mut) + } + if _, err := mut.Mutate(ctx, uc.mutation); err != nil { + return nil, err + } + } + return node, err +} + +// SaveX calls Save and panics if Save returns an error. +func (uc *UserCreate) SaveX(ctx context.Context) *User { + v, err := uc.Save(ctx) + if err != nil { + panic(err) + } + return v +} + +// Exec executes the query. +func (uc *UserCreate) Exec(ctx context.Context) error { + _, err := uc.Save(ctx) + return err +} + +// ExecX is like Exec, but panics if an error occurs. +func (uc *UserCreate) ExecX(ctx context.Context) { + if err := uc.Exec(ctx); err != nil { + panic(err) + } +} + +// defaults sets the default values of the builder before save. +func (uc *UserCreate) defaults() { + if _, ok := uc.mutation.IsSuperuser(); !ok { + v := user.DefaultIsSuperuser + uc.mutation.SetIsSuperuser(v) + } + if _, ok := uc.mutation.ID(); !ok { + v := user.DefaultID() + uc.mutation.SetID(v) + } +} + +// check runs all checks and user-defined validators on the builder. +func (uc *UserCreate) check() error { + if _, ok := uc.mutation.Name(); !ok { + return &ValidationError{Name: "name", err: errors.New(`ent: missing required field "User.name"`)} + } + if v, ok := uc.mutation.Name(); ok { + if err := user.NameValidator(v); err != nil { + return &ValidationError{Name: "name", err: fmt.Errorf(`ent: validator failed for field "User.name": %w`, err)} + } + } + if _, ok := uc.mutation.Email(); !ok { + return &ValidationError{Name: "email", err: errors.New(`ent: missing required field "User.email"`)} + } + if v, ok := uc.mutation.Email(); ok { + if err := user.EmailValidator(v); err != nil { + return &ValidationError{Name: "email", err: fmt.Errorf(`ent: validator failed for field "User.email": %w`, err)} + } + } + if _, ok := uc.mutation.Password(); !ok { + return &ValidationError{Name: "password", err: errors.New(`ent: missing required field "User.password"`)} + } + if v, ok := uc.mutation.Password(); ok { + if err := user.PasswordValidator(v); err != nil { + return &ValidationError{Name: "password", err: fmt.Errorf(`ent: validator failed for field "User.password": %w`, err)} + } + } + if _, ok := uc.mutation.IsSuperuser(); !ok { + return &ValidationError{Name: "is_superuser", err: errors.New(`ent: missing required field "User.is_superuser"`)} + } + return nil +} + +func (uc *UserCreate) sqlSave(ctx context.Context) (*User, error) { + _node, _spec := uc.createSpec() + if err := sqlgraph.CreateNode(ctx, uc.driver, _spec); err != nil { + if sqlgraph.IsConstraintError(err) { + err = &ConstraintError{err.Error(), err} + } + return nil, err + } + if _spec.ID.Value != nil { + if id, ok := _spec.ID.Value.(*uuid.UUID); ok { + _node.ID = *id + } else if err := _node.ID.Scan(_spec.ID.Value); err != nil { + return nil, err + } + } + return _node, nil +} + +func (uc *UserCreate) createSpec() (*User, *sqlgraph.CreateSpec) { + var ( + _node = &User{config: uc.config} + _spec = &sqlgraph.CreateSpec{ + Table: user.Table, + ID: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: user.FieldID, + }, + } + ) + if id, ok := uc.mutation.ID(); ok { + _node.ID = id + _spec.ID.Value = &id + } + if value, ok := uc.mutation.Name(); ok { + _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: user.FieldName, + }) + _node.Name = value + } + if value, ok := uc.mutation.Email(); ok { + _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: user.FieldEmail, + }) + _node.Email = value + } + if value, ok := uc.mutation.Password(); ok { + _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: user.FieldPassword, + }) + _node.Password = value + } + if value, ok := uc.mutation.IsSuperuser(); ok { + _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ + Type: field.TypeBool, + Value: value, + Column: user.FieldIsSuperuser, + }) + _node.IsSuperuser = value + } + if nodes := uc.mutation.AuthTokensIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: user.AuthTokensTable, + Columns: []string{user.AuthTokensColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeInt, + Column: authtokens.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges = append(_spec.Edges, edge) + } + return _node, _spec +} + +// UserCreateBulk is the builder for creating many User entities in bulk. +type UserCreateBulk struct { + config + builders []*UserCreate +} + +// Save creates the User entities in the database. +func (ucb *UserCreateBulk) Save(ctx context.Context) ([]*User, error) { + specs := make([]*sqlgraph.CreateSpec, len(ucb.builders)) + nodes := make([]*User, len(ucb.builders)) + mutators := make([]Mutator, len(ucb.builders)) + for i := range ucb.builders { + func(i int, root context.Context) { + builder := ucb.builders[i] + builder.defaults() + var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) { + mutation, ok := m.(*UserMutation) + if !ok { + return nil, fmt.Errorf("unexpected mutation type %T", m) + } + if err := builder.check(); err != nil { + return nil, err + } + builder.mutation = mutation + nodes[i], specs[i] = builder.createSpec() + var err error + if i < len(mutators)-1 { + _, err = mutators[i+1].Mutate(root, ucb.builders[i+1].mutation) + } else { + spec := &sqlgraph.BatchCreateSpec{Nodes: specs} + // Invoke the actual operation on the latest mutation in the chain. + if err = sqlgraph.BatchCreate(ctx, ucb.driver, spec); err != nil { + if sqlgraph.IsConstraintError(err) { + err = &ConstraintError{err.Error(), err} + } + } + } + if err != nil { + return nil, err + } + mutation.id = &nodes[i].ID + mutation.done = true + return nodes[i], nil + }) + for i := len(builder.hooks) - 1; i >= 0; i-- { + mut = builder.hooks[i](mut) + } + mutators[i] = mut + }(i, ctx) + } + if len(mutators) > 0 { + if _, err := mutators[0].Mutate(ctx, ucb.builders[0].mutation); err != nil { + return nil, err + } + } + return nodes, nil +} + +// SaveX is like Save, but panics if an error occurs. +func (ucb *UserCreateBulk) SaveX(ctx context.Context) []*User { + v, err := ucb.Save(ctx) + if err != nil { + panic(err) + } + return v +} + +// Exec executes the query. +func (ucb *UserCreateBulk) Exec(ctx context.Context) error { + _, err := ucb.Save(ctx) + return err +} + +// ExecX is like Exec, but panics if an error occurs. +func (ucb *UserCreateBulk) ExecX(ctx context.Context) { + if err := ucb.Exec(ctx); err != nil { + panic(err) + } +} diff --git a/backend/ent/user_delete.go b/backend/ent/user_delete.go new file mode 100644 index 0000000..6c5aafc --- /dev/null +++ b/backend/ent/user_delete.go @@ -0,0 +1,111 @@ +// Code generated by entc, DO NOT EDIT. + +package ent + +import ( + "context" + "fmt" + + "entgo.io/ent/dialect/sql" + "entgo.io/ent/dialect/sql/sqlgraph" + "entgo.io/ent/schema/field" + "github.com/hay-kot/git-web-template/backend/ent/predicate" + "github.com/hay-kot/git-web-template/backend/ent/user" +) + +// UserDelete is the builder for deleting a User entity. +type UserDelete struct { + config + hooks []Hook + mutation *UserMutation +} + +// Where appends a list predicates to the UserDelete builder. +func (ud *UserDelete) Where(ps ...predicate.User) *UserDelete { + ud.mutation.Where(ps...) + return ud +} + +// Exec executes the deletion query and returns how many vertices were deleted. +func (ud *UserDelete) Exec(ctx context.Context) (int, error) { + var ( + err error + affected int + ) + if len(ud.hooks) == 0 { + affected, err = ud.sqlExec(ctx) + } else { + var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) { + mutation, ok := m.(*UserMutation) + if !ok { + return nil, fmt.Errorf("unexpected mutation type %T", m) + } + ud.mutation = mutation + affected, err = ud.sqlExec(ctx) + mutation.done = true + return affected, err + }) + for i := len(ud.hooks) - 1; i >= 0; i-- { + if ud.hooks[i] == nil { + return 0, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)") + } + mut = ud.hooks[i](mut) + } + if _, err := mut.Mutate(ctx, ud.mutation); err != nil { + return 0, err + } + } + return affected, err +} + +// ExecX is like Exec, but panics if an error occurs. +func (ud *UserDelete) ExecX(ctx context.Context) int { + n, err := ud.Exec(ctx) + if err != nil { + panic(err) + } + return n +} + +func (ud *UserDelete) sqlExec(ctx context.Context) (int, error) { + _spec := &sqlgraph.DeleteSpec{ + Node: &sqlgraph.NodeSpec{ + Table: user.Table, + ID: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: user.FieldID, + }, + }, + } + if ps := ud.mutation.predicates; len(ps) > 0 { + _spec.Predicate = func(selector *sql.Selector) { + for i := range ps { + ps[i](selector) + } + } + } + return sqlgraph.DeleteNodes(ctx, ud.driver, _spec) +} + +// UserDeleteOne is the builder for deleting a single User entity. +type UserDeleteOne struct { + ud *UserDelete +} + +// Exec executes the deletion query. +func (udo *UserDeleteOne) Exec(ctx context.Context) error { + n, err := udo.ud.Exec(ctx) + switch { + case err != nil: + return err + case n == 0: + return &NotFoundError{user.Label} + default: + return nil + } +} + +// ExecX is like Exec, but panics if an error occurs. +func (udo *UserDeleteOne) ExecX(ctx context.Context) { + udo.ud.ExecX(ctx) +} diff --git a/backend/ent/user_query.go b/backend/ent/user_query.go new file mode 100644 index 0000000..804688d --- /dev/null +++ b/backend/ent/user_query.go @@ -0,0 +1,993 @@ +// Code generated by entc, DO NOT EDIT. + +package ent + +import ( + "context" + "database/sql/driver" + "errors" + "fmt" + "math" + + "entgo.io/ent/dialect/sql" + "entgo.io/ent/dialect/sql/sqlgraph" + "entgo.io/ent/schema/field" + "github.com/google/uuid" + "github.com/hay-kot/git-web-template/backend/ent/authtokens" + "github.com/hay-kot/git-web-template/backend/ent/predicate" + "github.com/hay-kot/git-web-template/backend/ent/user" +) + +// UserQuery is the builder for querying User entities. +type UserQuery struct { + config + limit *int + offset *int + unique *bool + order []OrderFunc + fields []string + predicates []predicate.User + // eager-loading edges. + withAuthTokens *AuthTokensQuery + // intermediate query (i.e. traversal path). + sql *sql.Selector + path func(context.Context) (*sql.Selector, error) +} + +// Where adds a new predicate for the UserQuery builder. +func (uq *UserQuery) Where(ps ...predicate.User) *UserQuery { + uq.predicates = append(uq.predicates, ps...) + return uq +} + +// Limit adds a limit step to the query. +func (uq *UserQuery) Limit(limit int) *UserQuery { + uq.limit = &limit + return uq +} + +// Offset adds an offset step to the query. +func (uq *UserQuery) Offset(offset int) *UserQuery { + uq.offset = &offset + return uq +} + +// Unique configures the query builder to filter duplicate records on query. +// By default, unique is set to true, and can be disabled using this method. +func (uq *UserQuery) Unique(unique bool) *UserQuery { + uq.unique = &unique + return uq +} + +// Order adds an order step to the query. +func (uq *UserQuery) Order(o ...OrderFunc) *UserQuery { + uq.order = append(uq.order, o...) + return uq +} + +// QueryAuthTokens chains the current query on the "auth_tokens" edge. +func (uq *UserQuery) QueryAuthTokens() *AuthTokensQuery { + query := &AuthTokensQuery{config: uq.config} + query.path = func(ctx context.Context) (fromU *sql.Selector, err error) { + if err := uq.prepareQuery(ctx); err != nil { + return nil, err + } + selector := uq.sqlQuery(ctx) + if err := selector.Err(); err != nil { + return nil, err + } + step := sqlgraph.NewStep( + sqlgraph.From(user.Table, user.FieldID, selector), + sqlgraph.To(authtokens.Table, authtokens.FieldID), + sqlgraph.Edge(sqlgraph.O2M, false, user.AuthTokensTable, user.AuthTokensColumn), + ) + fromU = sqlgraph.SetNeighbors(uq.driver.Dialect(), step) + return fromU, nil + } + return query +} + +// First returns the first User entity from the query. +// Returns a *NotFoundError when no User was found. +func (uq *UserQuery) First(ctx context.Context) (*User, error) { + nodes, err := uq.Limit(1).All(ctx) + if err != nil { + return nil, err + } + if len(nodes) == 0 { + return nil, &NotFoundError{user.Label} + } + return nodes[0], nil +} + +// FirstX is like First, but panics if an error occurs. +func (uq *UserQuery) FirstX(ctx context.Context) *User { + node, err := uq.First(ctx) + if err != nil && !IsNotFound(err) { + panic(err) + } + return node +} + +// FirstID returns the first User ID from the query. +// Returns a *NotFoundError when no User ID was found. +func (uq *UserQuery) FirstID(ctx context.Context) (id uuid.UUID, err error) { + var ids []uuid.UUID + if ids, err = uq.Limit(1).IDs(ctx); err != nil { + return + } + if len(ids) == 0 { + err = &NotFoundError{user.Label} + return + } + return ids[0], nil +} + +// FirstIDX is like FirstID, but panics if an error occurs. +func (uq *UserQuery) FirstIDX(ctx context.Context) uuid.UUID { + id, err := uq.FirstID(ctx) + if err != nil && !IsNotFound(err) { + panic(err) + } + return id +} + +// Only returns a single User entity found by the query, ensuring it only returns one. +// Returns a *NotSingularError when exactly one User entity is not found. +// Returns a *NotFoundError when no User entities are found. +func (uq *UserQuery) Only(ctx context.Context) (*User, error) { + nodes, err := uq.Limit(2).All(ctx) + if err != nil { + return nil, err + } + switch len(nodes) { + case 1: + return nodes[0], nil + case 0: + return nil, &NotFoundError{user.Label} + default: + return nil, &NotSingularError{user.Label} + } +} + +// OnlyX is like Only, but panics if an error occurs. +func (uq *UserQuery) OnlyX(ctx context.Context) *User { + node, err := uq.Only(ctx) + if err != nil { + panic(err) + } + return node +} + +// OnlyID is like Only, but returns the only User ID in the query. +// Returns a *NotSingularError when exactly one User ID is not found. +// Returns a *NotFoundError when no entities are found. +func (uq *UserQuery) OnlyID(ctx context.Context) (id uuid.UUID, err error) { + var ids []uuid.UUID + if ids, err = uq.Limit(2).IDs(ctx); err != nil { + return + } + switch len(ids) { + case 1: + id = ids[0] + case 0: + err = &NotFoundError{user.Label} + default: + err = &NotSingularError{user.Label} + } + return +} + +// OnlyIDX is like OnlyID, but panics if an error occurs. +func (uq *UserQuery) OnlyIDX(ctx context.Context) uuid.UUID { + id, err := uq.OnlyID(ctx) + if err != nil { + panic(err) + } + return id +} + +// All executes the query and returns a list of Users. +func (uq *UserQuery) All(ctx context.Context) ([]*User, error) { + if err := uq.prepareQuery(ctx); err != nil { + return nil, err + } + return uq.sqlAll(ctx) +} + +// AllX is like All, but panics if an error occurs. +func (uq *UserQuery) AllX(ctx context.Context) []*User { + nodes, err := uq.All(ctx) + if err != nil { + panic(err) + } + return nodes +} + +// IDs executes the query and returns a list of User IDs. +func (uq *UserQuery) IDs(ctx context.Context) ([]uuid.UUID, error) { + var ids []uuid.UUID + if err := uq.Select(user.FieldID).Scan(ctx, &ids); err != nil { + return nil, err + } + return ids, nil +} + +// IDsX is like IDs, but panics if an error occurs. +func (uq *UserQuery) IDsX(ctx context.Context) []uuid.UUID { + ids, err := uq.IDs(ctx) + if err != nil { + panic(err) + } + return ids +} + +// Count returns the count of the given query. +func (uq *UserQuery) Count(ctx context.Context) (int, error) { + if err := uq.prepareQuery(ctx); err != nil { + return 0, err + } + return uq.sqlCount(ctx) +} + +// CountX is like Count, but panics if an error occurs. +func (uq *UserQuery) CountX(ctx context.Context) int { + count, err := uq.Count(ctx) + if err != nil { + panic(err) + } + return count +} + +// Exist returns true if the query has elements in the graph. +func (uq *UserQuery) Exist(ctx context.Context) (bool, error) { + if err := uq.prepareQuery(ctx); err != nil { + return false, err + } + return uq.sqlExist(ctx) +} + +// ExistX is like Exist, but panics if an error occurs. +func (uq *UserQuery) ExistX(ctx context.Context) bool { + exist, err := uq.Exist(ctx) + if err != nil { + panic(err) + } + return exist +} + +// Clone returns a duplicate of the UserQuery builder, including all associated steps. It can be +// used to prepare common query builders and use them differently after the clone is made. +func (uq *UserQuery) Clone() *UserQuery { + if uq == nil { + return nil + } + return &UserQuery{ + config: uq.config, + limit: uq.limit, + offset: uq.offset, + order: append([]OrderFunc{}, uq.order...), + predicates: append([]predicate.User{}, uq.predicates...), + withAuthTokens: uq.withAuthTokens.Clone(), + // clone intermediate query. + sql: uq.sql.Clone(), + path: uq.path, + } +} + +// WithAuthTokens tells the query-builder to eager-load the nodes that are connected to +// the "auth_tokens" edge. The optional arguments are used to configure the query builder of the edge. +func (uq *UserQuery) WithAuthTokens(opts ...func(*AuthTokensQuery)) *UserQuery { + query := &AuthTokensQuery{config: uq.config} + for _, opt := range opts { + opt(query) + } + uq.withAuthTokens = query + return uq +} + +// GroupBy is used to group vertices by one or more fields/columns. +// It is often used with aggregate functions, like: count, max, mean, min, sum. +// +// Example: +// +// var v []struct { +// Name string `json:"name,omitempty"` +// Count int `json:"count,omitempty"` +// } +// +// client.User.Query(). +// GroupBy(user.FieldName). +// Aggregate(ent.Count()). +// Scan(ctx, &v) +// +func (uq *UserQuery) GroupBy(field string, fields ...string) *UserGroupBy { + group := &UserGroupBy{config: uq.config} + group.fields = append([]string{field}, fields...) + group.path = func(ctx context.Context) (prev *sql.Selector, err error) { + if err := uq.prepareQuery(ctx); err != nil { + return nil, err + } + return uq.sqlQuery(ctx), nil + } + return group +} + +// Select allows the selection one or more fields/columns for the given query, +// instead of selecting all fields in the entity. +// +// Example: +// +// var v []struct { +// Name string `json:"name,omitempty"` +// } +// +// client.User.Query(). +// Select(user.FieldName). +// Scan(ctx, &v) +// +func (uq *UserQuery) Select(fields ...string) *UserSelect { + uq.fields = append(uq.fields, fields...) + return &UserSelect{UserQuery: uq} +} + +func (uq *UserQuery) prepareQuery(ctx context.Context) error { + for _, f := range uq.fields { + if !user.ValidColumn(f) { + return &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)} + } + } + if uq.path != nil { + prev, err := uq.path(ctx) + if err != nil { + return err + } + uq.sql = prev + } + return nil +} + +func (uq *UserQuery) sqlAll(ctx context.Context) ([]*User, error) { + var ( + nodes = []*User{} + _spec = uq.querySpec() + loadedTypes = [1]bool{ + uq.withAuthTokens != nil, + } + ) + _spec.ScanValues = func(columns []string) ([]interface{}, error) { + node := &User{config: uq.config} + nodes = append(nodes, node) + return node.scanValues(columns) + } + _spec.Assign = func(columns []string, values []interface{}) error { + if len(nodes) == 0 { + return fmt.Errorf("ent: Assign called without calling ScanValues") + } + node := nodes[len(nodes)-1] + node.Edges.loadedTypes = loadedTypes + return node.assignValues(columns, values) + } + if err := sqlgraph.QueryNodes(ctx, uq.driver, _spec); err != nil { + return nil, err + } + if len(nodes) == 0 { + return nodes, nil + } + + if query := uq.withAuthTokens; query != nil { + fks := make([]driver.Value, 0, len(nodes)) + nodeids := make(map[uuid.UUID]*User) + for i := range nodes { + fks = append(fks, nodes[i].ID) + nodeids[nodes[i].ID] = nodes[i] + nodes[i].Edges.AuthTokens = []*AuthTokens{} + } + query.withFKs = true + query.Where(predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.InValues(user.AuthTokensColumn, fks...)) + })) + neighbors, err := query.All(ctx) + if err != nil { + return nil, err + } + for _, n := range neighbors { + fk := n.user_auth_tokens + if fk == nil { + return nil, fmt.Errorf(`foreign-key "user_auth_tokens" is nil for node %v`, n.ID) + } + node, ok := nodeids[*fk] + if !ok { + return nil, fmt.Errorf(`unexpected foreign-key "user_auth_tokens" returned %v for node %v`, *fk, n.ID) + } + node.Edges.AuthTokens = append(node.Edges.AuthTokens, n) + } + } + + return nodes, nil +} + +func (uq *UserQuery) sqlCount(ctx context.Context) (int, error) { + _spec := uq.querySpec() + _spec.Node.Columns = uq.fields + if len(uq.fields) > 0 { + _spec.Unique = uq.unique != nil && *uq.unique + } + return sqlgraph.CountNodes(ctx, uq.driver, _spec) +} + +func (uq *UserQuery) sqlExist(ctx context.Context) (bool, error) { + n, err := uq.sqlCount(ctx) + if err != nil { + return false, fmt.Errorf("ent: check existence: %w", err) + } + return n > 0, nil +} + +func (uq *UserQuery) querySpec() *sqlgraph.QuerySpec { + _spec := &sqlgraph.QuerySpec{ + Node: &sqlgraph.NodeSpec{ + Table: user.Table, + Columns: user.Columns, + ID: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: user.FieldID, + }, + }, + From: uq.sql, + Unique: true, + } + if unique := uq.unique; unique != nil { + _spec.Unique = *unique + } + if fields := uq.fields; len(fields) > 0 { + _spec.Node.Columns = make([]string, 0, len(fields)) + _spec.Node.Columns = append(_spec.Node.Columns, user.FieldID) + for i := range fields { + if fields[i] != user.FieldID { + _spec.Node.Columns = append(_spec.Node.Columns, fields[i]) + } + } + } + if ps := uq.predicates; len(ps) > 0 { + _spec.Predicate = func(selector *sql.Selector) { + for i := range ps { + ps[i](selector) + } + } + } + if limit := uq.limit; limit != nil { + _spec.Limit = *limit + } + if offset := uq.offset; offset != nil { + _spec.Offset = *offset + } + if ps := uq.order; len(ps) > 0 { + _spec.Order = func(selector *sql.Selector) { + for i := range ps { + ps[i](selector) + } + } + } + return _spec +} + +func (uq *UserQuery) sqlQuery(ctx context.Context) *sql.Selector { + builder := sql.Dialect(uq.driver.Dialect()) + t1 := builder.Table(user.Table) + columns := uq.fields + if len(columns) == 0 { + columns = user.Columns + } + selector := builder.Select(t1.Columns(columns...)...).From(t1) + if uq.sql != nil { + selector = uq.sql + selector.Select(selector.Columns(columns...)...) + } + if uq.unique != nil && *uq.unique { + selector.Distinct() + } + for _, p := range uq.predicates { + p(selector) + } + for _, p := range uq.order { + p(selector) + } + if offset := uq.offset; offset != nil { + // limit is mandatory for offset clause. We start + // with default value, and override it below if needed. + selector.Offset(*offset).Limit(math.MaxInt32) + } + if limit := uq.limit; limit != nil { + selector.Limit(*limit) + } + return selector +} + +// UserGroupBy is the group-by builder for User entities. +type UserGroupBy struct { + config + fields []string + fns []AggregateFunc + // intermediate query (i.e. traversal path). + sql *sql.Selector + path func(context.Context) (*sql.Selector, error) +} + +// Aggregate adds the given aggregation functions to the group-by query. +func (ugb *UserGroupBy) Aggregate(fns ...AggregateFunc) *UserGroupBy { + ugb.fns = append(ugb.fns, fns...) + return ugb +} + +// Scan applies the group-by query and scans the result into the given value. +func (ugb *UserGroupBy) Scan(ctx context.Context, v interface{}) error { + query, err := ugb.path(ctx) + if err != nil { + return err + } + ugb.sql = query + return ugb.sqlScan(ctx, v) +} + +// ScanX is like Scan, but panics if an error occurs. +func (ugb *UserGroupBy) ScanX(ctx context.Context, v interface{}) { + if err := ugb.Scan(ctx, v); err != nil { + panic(err) + } +} + +// Strings returns list of strings from group-by. +// It is only allowed when executing a group-by query with one field. +func (ugb *UserGroupBy) Strings(ctx context.Context) ([]string, error) { + if len(ugb.fields) > 1 { + return nil, errors.New("ent: UserGroupBy.Strings is not achievable when grouping more than 1 field") + } + var v []string + if err := ugb.Scan(ctx, &v); err != nil { + return nil, err + } + return v, nil +} + +// StringsX is like Strings, but panics if an error occurs. +func (ugb *UserGroupBy) StringsX(ctx context.Context) []string { + v, err := ugb.Strings(ctx) + if err != nil { + panic(err) + } + return v +} + +// String returns a single string from a group-by query. +// It is only allowed when executing a group-by query with one field. +func (ugb *UserGroupBy) String(ctx context.Context) (_ string, err error) { + var v []string + if v, err = ugb.Strings(ctx); err != nil { + return + } + switch len(v) { + case 1: + return v[0], nil + case 0: + err = &NotFoundError{user.Label} + default: + err = fmt.Errorf("ent: UserGroupBy.Strings returned %d results when one was expected", len(v)) + } + return +} + +// StringX is like String, but panics if an error occurs. +func (ugb *UserGroupBy) StringX(ctx context.Context) string { + v, err := ugb.String(ctx) + if err != nil { + panic(err) + } + return v +} + +// Ints returns list of ints from group-by. +// It is only allowed when executing a group-by query with one field. +func (ugb *UserGroupBy) Ints(ctx context.Context) ([]int, error) { + if len(ugb.fields) > 1 { + return nil, errors.New("ent: UserGroupBy.Ints is not achievable when grouping more than 1 field") + } + var v []int + if err := ugb.Scan(ctx, &v); err != nil { + return nil, err + } + return v, nil +} + +// IntsX is like Ints, but panics if an error occurs. +func (ugb *UserGroupBy) IntsX(ctx context.Context) []int { + v, err := ugb.Ints(ctx) + if err != nil { + panic(err) + } + return v +} + +// Int returns a single int from a group-by query. +// It is only allowed when executing a group-by query with one field. +func (ugb *UserGroupBy) Int(ctx context.Context) (_ int, err error) { + var v []int + if v, err = ugb.Ints(ctx); err != nil { + return + } + switch len(v) { + case 1: + return v[0], nil + case 0: + err = &NotFoundError{user.Label} + default: + err = fmt.Errorf("ent: UserGroupBy.Ints returned %d results when one was expected", len(v)) + } + return +} + +// IntX is like Int, but panics if an error occurs. +func (ugb *UserGroupBy) IntX(ctx context.Context) int { + v, err := ugb.Int(ctx) + if err != nil { + panic(err) + } + return v +} + +// Float64s returns list of float64s from group-by. +// It is only allowed when executing a group-by query with one field. +func (ugb *UserGroupBy) Float64s(ctx context.Context) ([]float64, error) { + if len(ugb.fields) > 1 { + return nil, errors.New("ent: UserGroupBy.Float64s is not achievable when grouping more than 1 field") + } + var v []float64 + if err := ugb.Scan(ctx, &v); err != nil { + return nil, err + } + return v, nil +} + +// Float64sX is like Float64s, but panics if an error occurs. +func (ugb *UserGroupBy) Float64sX(ctx context.Context) []float64 { + v, err := ugb.Float64s(ctx) + if err != nil { + panic(err) + } + return v +} + +// Float64 returns a single float64 from a group-by query. +// It is only allowed when executing a group-by query with one field. +func (ugb *UserGroupBy) Float64(ctx context.Context) (_ float64, err error) { + var v []float64 + if v, err = ugb.Float64s(ctx); err != nil { + return + } + switch len(v) { + case 1: + return v[0], nil + case 0: + err = &NotFoundError{user.Label} + default: + err = fmt.Errorf("ent: UserGroupBy.Float64s returned %d results when one was expected", len(v)) + } + return +} + +// Float64X is like Float64, but panics if an error occurs. +func (ugb *UserGroupBy) Float64X(ctx context.Context) float64 { + v, err := ugb.Float64(ctx) + if err != nil { + panic(err) + } + return v +} + +// Bools returns list of bools from group-by. +// It is only allowed when executing a group-by query with one field. +func (ugb *UserGroupBy) Bools(ctx context.Context) ([]bool, error) { + if len(ugb.fields) > 1 { + return nil, errors.New("ent: UserGroupBy.Bools is not achievable when grouping more than 1 field") + } + var v []bool + if err := ugb.Scan(ctx, &v); err != nil { + return nil, err + } + return v, nil +} + +// BoolsX is like Bools, but panics if an error occurs. +func (ugb *UserGroupBy) BoolsX(ctx context.Context) []bool { + v, err := ugb.Bools(ctx) + if err != nil { + panic(err) + } + return v +} + +// Bool returns a single bool from a group-by query. +// It is only allowed when executing a group-by query with one field. +func (ugb *UserGroupBy) Bool(ctx context.Context) (_ bool, err error) { + var v []bool + if v, err = ugb.Bools(ctx); err != nil { + return + } + switch len(v) { + case 1: + return v[0], nil + case 0: + err = &NotFoundError{user.Label} + default: + err = fmt.Errorf("ent: UserGroupBy.Bools returned %d results when one was expected", len(v)) + } + return +} + +// BoolX is like Bool, but panics if an error occurs. +func (ugb *UserGroupBy) BoolX(ctx context.Context) bool { + v, err := ugb.Bool(ctx) + if err != nil { + panic(err) + } + return v +} + +func (ugb *UserGroupBy) sqlScan(ctx context.Context, v interface{}) error { + for _, f := range ugb.fields { + if !user.ValidColumn(f) { + return &ValidationError{Name: f, err: fmt.Errorf("invalid field %q for group-by", f)} + } + } + selector := ugb.sqlQuery() + if err := selector.Err(); err != nil { + return err + } + rows := &sql.Rows{} + query, args := selector.Query() + if err := ugb.driver.Query(ctx, query, args, rows); err != nil { + return err + } + defer rows.Close() + return sql.ScanSlice(rows, v) +} + +func (ugb *UserGroupBy) sqlQuery() *sql.Selector { + selector := ugb.sql.Select() + aggregation := make([]string, 0, len(ugb.fns)) + for _, fn := range ugb.fns { + aggregation = append(aggregation, fn(selector)) + } + // If no columns were selected in a custom aggregation function, the default + // selection is the fields used for "group-by", and the aggregation functions. + if len(selector.SelectedColumns()) == 0 { + columns := make([]string, 0, len(ugb.fields)+len(ugb.fns)) + for _, f := range ugb.fields { + columns = append(columns, selector.C(f)) + } + columns = append(columns, aggregation...) + selector.Select(columns...) + } + return selector.GroupBy(selector.Columns(ugb.fields...)...) +} + +// UserSelect is the builder for selecting fields of User entities. +type UserSelect struct { + *UserQuery + // intermediate query (i.e. traversal path). + sql *sql.Selector +} + +// Scan applies the selector query and scans the result into the given value. +func (us *UserSelect) Scan(ctx context.Context, v interface{}) error { + if err := us.prepareQuery(ctx); err != nil { + return err + } + us.sql = us.UserQuery.sqlQuery(ctx) + return us.sqlScan(ctx, v) +} + +// ScanX is like Scan, but panics if an error occurs. +func (us *UserSelect) ScanX(ctx context.Context, v interface{}) { + if err := us.Scan(ctx, v); err != nil { + panic(err) + } +} + +// Strings returns list of strings from a selector. It is only allowed when selecting one field. +func (us *UserSelect) Strings(ctx context.Context) ([]string, error) { + if len(us.fields) > 1 { + return nil, errors.New("ent: UserSelect.Strings is not achievable when selecting more than 1 field") + } + var v []string + if err := us.Scan(ctx, &v); err != nil { + return nil, err + } + return v, nil +} + +// StringsX is like Strings, but panics if an error occurs. +func (us *UserSelect) StringsX(ctx context.Context) []string { + v, err := us.Strings(ctx) + if err != nil { + panic(err) + } + return v +} + +// String returns a single string from a selector. It is only allowed when selecting one field. +func (us *UserSelect) String(ctx context.Context) (_ string, err error) { + var v []string + if v, err = us.Strings(ctx); err != nil { + return + } + switch len(v) { + case 1: + return v[0], nil + case 0: + err = &NotFoundError{user.Label} + default: + err = fmt.Errorf("ent: UserSelect.Strings returned %d results when one was expected", len(v)) + } + return +} + +// StringX is like String, but panics if an error occurs. +func (us *UserSelect) StringX(ctx context.Context) string { + v, err := us.String(ctx) + if err != nil { + panic(err) + } + return v +} + +// Ints returns list of ints from a selector. It is only allowed when selecting one field. +func (us *UserSelect) Ints(ctx context.Context) ([]int, error) { + if len(us.fields) > 1 { + return nil, errors.New("ent: UserSelect.Ints is not achievable when selecting more than 1 field") + } + var v []int + if err := us.Scan(ctx, &v); err != nil { + return nil, err + } + return v, nil +} + +// IntsX is like Ints, but panics if an error occurs. +func (us *UserSelect) IntsX(ctx context.Context) []int { + v, err := us.Ints(ctx) + if err != nil { + panic(err) + } + return v +} + +// Int returns a single int from a selector. It is only allowed when selecting one field. +func (us *UserSelect) Int(ctx context.Context) (_ int, err error) { + var v []int + if v, err = us.Ints(ctx); err != nil { + return + } + switch len(v) { + case 1: + return v[0], nil + case 0: + err = &NotFoundError{user.Label} + default: + err = fmt.Errorf("ent: UserSelect.Ints returned %d results when one was expected", len(v)) + } + return +} + +// IntX is like Int, but panics if an error occurs. +func (us *UserSelect) IntX(ctx context.Context) int { + v, err := us.Int(ctx) + if err != nil { + panic(err) + } + return v +} + +// Float64s returns list of float64s from a selector. It is only allowed when selecting one field. +func (us *UserSelect) Float64s(ctx context.Context) ([]float64, error) { + if len(us.fields) > 1 { + return nil, errors.New("ent: UserSelect.Float64s is not achievable when selecting more than 1 field") + } + var v []float64 + if err := us.Scan(ctx, &v); err != nil { + return nil, err + } + return v, nil +} + +// Float64sX is like Float64s, but panics if an error occurs. +func (us *UserSelect) Float64sX(ctx context.Context) []float64 { + v, err := us.Float64s(ctx) + if err != nil { + panic(err) + } + return v +} + +// Float64 returns a single float64 from a selector. It is only allowed when selecting one field. +func (us *UserSelect) Float64(ctx context.Context) (_ float64, err error) { + var v []float64 + if v, err = us.Float64s(ctx); err != nil { + return + } + switch len(v) { + case 1: + return v[0], nil + case 0: + err = &NotFoundError{user.Label} + default: + err = fmt.Errorf("ent: UserSelect.Float64s returned %d results when one was expected", len(v)) + } + return +} + +// Float64X is like Float64, but panics if an error occurs. +func (us *UserSelect) Float64X(ctx context.Context) float64 { + v, err := us.Float64(ctx) + if err != nil { + panic(err) + } + return v +} + +// Bools returns list of bools from a selector. It is only allowed when selecting one field. +func (us *UserSelect) Bools(ctx context.Context) ([]bool, error) { + if len(us.fields) > 1 { + return nil, errors.New("ent: UserSelect.Bools is not achievable when selecting more than 1 field") + } + var v []bool + if err := us.Scan(ctx, &v); err != nil { + return nil, err + } + return v, nil +} + +// BoolsX is like Bools, but panics if an error occurs. +func (us *UserSelect) BoolsX(ctx context.Context) []bool { + v, err := us.Bools(ctx) + if err != nil { + panic(err) + } + return v +} + +// Bool returns a single bool from a selector. It is only allowed when selecting one field. +func (us *UserSelect) Bool(ctx context.Context) (_ bool, err error) { + var v []bool + if v, err = us.Bools(ctx); err != nil { + return + } + switch len(v) { + case 1: + return v[0], nil + case 0: + err = &NotFoundError{user.Label} + default: + err = fmt.Errorf("ent: UserSelect.Bools returned %d results when one was expected", len(v)) + } + return +} + +// BoolX is like Bool, but panics if an error occurs. +func (us *UserSelect) BoolX(ctx context.Context) bool { + v, err := us.Bool(ctx) + if err != nil { + panic(err) + } + return v +} + +func (us *UserSelect) sqlScan(ctx context.Context, v interface{}) error { + rows := &sql.Rows{} + query, args := us.sql.Query() + if err := us.driver.Query(ctx, query, args, rows); err != nil { + return err + } + defer rows.Close() + return sql.ScanSlice(rows, v) +} diff --git a/backend/ent/user_update.go b/backend/ent/user_update.go new file mode 100644 index 0000000..d532fc5 --- /dev/null +++ b/backend/ent/user_update.go @@ -0,0 +1,592 @@ +// Code generated by entc, DO NOT EDIT. + +package ent + +import ( + "context" + "errors" + "fmt" + + "entgo.io/ent/dialect/sql" + "entgo.io/ent/dialect/sql/sqlgraph" + "entgo.io/ent/schema/field" + "github.com/hay-kot/git-web-template/backend/ent/authtokens" + "github.com/hay-kot/git-web-template/backend/ent/predicate" + "github.com/hay-kot/git-web-template/backend/ent/user" +) + +// UserUpdate is the builder for updating User entities. +type UserUpdate struct { + config + hooks []Hook + mutation *UserMutation +} + +// Where appends a list predicates to the UserUpdate builder. +func (uu *UserUpdate) Where(ps ...predicate.User) *UserUpdate { + uu.mutation.Where(ps...) + return uu +} + +// SetName sets the "name" field. +func (uu *UserUpdate) SetName(s string) *UserUpdate { + uu.mutation.SetName(s) + return uu +} + +// SetEmail sets the "email" field. +func (uu *UserUpdate) SetEmail(s string) *UserUpdate { + uu.mutation.SetEmail(s) + return uu +} + +// SetPassword sets the "password" field. +func (uu *UserUpdate) SetPassword(s string) *UserUpdate { + uu.mutation.SetPassword(s) + return uu +} + +// SetIsSuperuser sets the "is_superuser" field. +func (uu *UserUpdate) SetIsSuperuser(b bool) *UserUpdate { + uu.mutation.SetIsSuperuser(b) + return uu +} + +// SetNillableIsSuperuser sets the "is_superuser" field if the given value is not nil. +func (uu *UserUpdate) SetNillableIsSuperuser(b *bool) *UserUpdate { + if b != nil { + uu.SetIsSuperuser(*b) + } + return uu +} + +// AddAuthTokenIDs adds the "auth_tokens" edge to the AuthTokens entity by IDs. +func (uu *UserUpdate) AddAuthTokenIDs(ids ...int) *UserUpdate { + uu.mutation.AddAuthTokenIDs(ids...) + return uu +} + +// AddAuthTokens adds the "auth_tokens" edges to the AuthTokens entity. +func (uu *UserUpdate) AddAuthTokens(a ...*AuthTokens) *UserUpdate { + ids := make([]int, len(a)) + for i := range a { + ids[i] = a[i].ID + } + return uu.AddAuthTokenIDs(ids...) +} + +// Mutation returns the UserMutation object of the builder. +func (uu *UserUpdate) Mutation() *UserMutation { + return uu.mutation +} + +// ClearAuthTokens clears all "auth_tokens" edges to the AuthTokens entity. +func (uu *UserUpdate) ClearAuthTokens() *UserUpdate { + uu.mutation.ClearAuthTokens() + return uu +} + +// RemoveAuthTokenIDs removes the "auth_tokens" edge to AuthTokens entities by IDs. +func (uu *UserUpdate) RemoveAuthTokenIDs(ids ...int) *UserUpdate { + uu.mutation.RemoveAuthTokenIDs(ids...) + return uu +} + +// RemoveAuthTokens removes "auth_tokens" edges to AuthTokens entities. +func (uu *UserUpdate) RemoveAuthTokens(a ...*AuthTokens) *UserUpdate { + ids := make([]int, len(a)) + for i := range a { + ids[i] = a[i].ID + } + return uu.RemoveAuthTokenIDs(ids...) +} + +// Save executes the query and returns the number of nodes affected by the update operation. +func (uu *UserUpdate) Save(ctx context.Context) (int, error) { + var ( + err error + affected int + ) + if len(uu.hooks) == 0 { + if err = uu.check(); err != nil { + return 0, err + } + affected, err = uu.sqlSave(ctx) + } else { + var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) { + mutation, ok := m.(*UserMutation) + if !ok { + return nil, fmt.Errorf("unexpected mutation type %T", m) + } + if err = uu.check(); err != nil { + return 0, err + } + uu.mutation = mutation + affected, err = uu.sqlSave(ctx) + mutation.done = true + return affected, err + }) + for i := len(uu.hooks) - 1; i >= 0; i-- { + if uu.hooks[i] == nil { + return 0, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)") + } + mut = uu.hooks[i](mut) + } + if _, err := mut.Mutate(ctx, uu.mutation); err != nil { + return 0, err + } + } + return affected, err +} + +// SaveX is like Save, but panics if an error occurs. +func (uu *UserUpdate) SaveX(ctx context.Context) int { + affected, err := uu.Save(ctx) + if err != nil { + panic(err) + } + return affected +} + +// Exec executes the query. +func (uu *UserUpdate) Exec(ctx context.Context) error { + _, err := uu.Save(ctx) + return err +} + +// ExecX is like Exec, but panics if an error occurs. +func (uu *UserUpdate) ExecX(ctx context.Context) { + if err := uu.Exec(ctx); err != nil { + panic(err) + } +} + +// check runs all checks and user-defined validators on the builder. +func (uu *UserUpdate) check() error { + if v, ok := uu.mutation.Name(); ok { + if err := user.NameValidator(v); err != nil { + return &ValidationError{Name: "name", err: fmt.Errorf(`ent: validator failed for field "User.name": %w`, err)} + } + } + if v, ok := uu.mutation.Email(); ok { + if err := user.EmailValidator(v); err != nil { + return &ValidationError{Name: "email", err: fmt.Errorf(`ent: validator failed for field "User.email": %w`, err)} + } + } + if v, ok := uu.mutation.Password(); ok { + if err := user.PasswordValidator(v); err != nil { + return &ValidationError{Name: "password", err: fmt.Errorf(`ent: validator failed for field "User.password": %w`, err)} + } + } + return nil +} + +func (uu *UserUpdate) sqlSave(ctx context.Context) (n int, err error) { + _spec := &sqlgraph.UpdateSpec{ + Node: &sqlgraph.NodeSpec{ + Table: user.Table, + Columns: user.Columns, + ID: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: user.FieldID, + }, + }, + } + if ps := uu.mutation.predicates; len(ps) > 0 { + _spec.Predicate = func(selector *sql.Selector) { + for i := range ps { + ps[i](selector) + } + } + } + if value, ok := uu.mutation.Name(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: user.FieldName, + }) + } + if value, ok := uu.mutation.Email(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: user.FieldEmail, + }) + } + if value, ok := uu.mutation.Password(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: user.FieldPassword, + }) + } + if value, ok := uu.mutation.IsSuperuser(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeBool, + Value: value, + Column: user.FieldIsSuperuser, + }) + } + if uu.mutation.AuthTokensCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: user.AuthTokensTable, + Columns: []string{user.AuthTokensColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeInt, + Column: authtokens.FieldID, + }, + }, + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := uu.mutation.RemovedAuthTokensIDs(); len(nodes) > 0 && !uu.mutation.AuthTokensCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: user.AuthTokensTable, + Columns: []string{user.AuthTokensColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeInt, + Column: authtokens.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := uu.mutation.AuthTokensIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: user.AuthTokensTable, + Columns: []string{user.AuthTokensColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeInt, + Column: authtokens.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Add = append(_spec.Edges.Add, edge) + } + if n, err = sqlgraph.UpdateNodes(ctx, uu.driver, _spec); err != nil { + if _, ok := err.(*sqlgraph.NotFoundError); ok { + err = &NotFoundError{user.Label} + } else if sqlgraph.IsConstraintError(err) { + err = &ConstraintError{err.Error(), err} + } + return 0, err + } + return n, nil +} + +// UserUpdateOne is the builder for updating a single User entity. +type UserUpdateOne struct { + config + fields []string + hooks []Hook + mutation *UserMutation +} + +// SetName sets the "name" field. +func (uuo *UserUpdateOne) SetName(s string) *UserUpdateOne { + uuo.mutation.SetName(s) + return uuo +} + +// SetEmail sets the "email" field. +func (uuo *UserUpdateOne) SetEmail(s string) *UserUpdateOne { + uuo.mutation.SetEmail(s) + return uuo +} + +// SetPassword sets the "password" field. +func (uuo *UserUpdateOne) SetPassword(s string) *UserUpdateOne { + uuo.mutation.SetPassword(s) + return uuo +} + +// SetIsSuperuser sets the "is_superuser" field. +func (uuo *UserUpdateOne) SetIsSuperuser(b bool) *UserUpdateOne { + uuo.mutation.SetIsSuperuser(b) + return uuo +} + +// SetNillableIsSuperuser sets the "is_superuser" field if the given value is not nil. +func (uuo *UserUpdateOne) SetNillableIsSuperuser(b *bool) *UserUpdateOne { + if b != nil { + uuo.SetIsSuperuser(*b) + } + return uuo +} + +// AddAuthTokenIDs adds the "auth_tokens" edge to the AuthTokens entity by IDs. +func (uuo *UserUpdateOne) AddAuthTokenIDs(ids ...int) *UserUpdateOne { + uuo.mutation.AddAuthTokenIDs(ids...) + return uuo +} + +// AddAuthTokens adds the "auth_tokens" edges to the AuthTokens entity. +func (uuo *UserUpdateOne) AddAuthTokens(a ...*AuthTokens) *UserUpdateOne { + ids := make([]int, len(a)) + for i := range a { + ids[i] = a[i].ID + } + return uuo.AddAuthTokenIDs(ids...) +} + +// Mutation returns the UserMutation object of the builder. +func (uuo *UserUpdateOne) Mutation() *UserMutation { + return uuo.mutation +} + +// ClearAuthTokens clears all "auth_tokens" edges to the AuthTokens entity. +func (uuo *UserUpdateOne) ClearAuthTokens() *UserUpdateOne { + uuo.mutation.ClearAuthTokens() + return uuo +} + +// RemoveAuthTokenIDs removes the "auth_tokens" edge to AuthTokens entities by IDs. +func (uuo *UserUpdateOne) RemoveAuthTokenIDs(ids ...int) *UserUpdateOne { + uuo.mutation.RemoveAuthTokenIDs(ids...) + return uuo +} + +// RemoveAuthTokens removes "auth_tokens" edges to AuthTokens entities. +func (uuo *UserUpdateOne) RemoveAuthTokens(a ...*AuthTokens) *UserUpdateOne { + ids := make([]int, len(a)) + for i := range a { + ids[i] = a[i].ID + } + return uuo.RemoveAuthTokenIDs(ids...) +} + +// Select allows selecting one or more fields (columns) of the returned entity. +// The default is selecting all fields defined in the entity schema. +func (uuo *UserUpdateOne) Select(field string, fields ...string) *UserUpdateOne { + uuo.fields = append([]string{field}, fields...) + return uuo +} + +// Save executes the query and returns the updated User entity. +func (uuo *UserUpdateOne) Save(ctx context.Context) (*User, error) { + var ( + err error + node *User + ) + if len(uuo.hooks) == 0 { + if err = uuo.check(); err != nil { + return nil, err + } + node, err = uuo.sqlSave(ctx) + } else { + var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) { + mutation, ok := m.(*UserMutation) + if !ok { + return nil, fmt.Errorf("unexpected mutation type %T", m) + } + if err = uuo.check(); err != nil { + return nil, err + } + uuo.mutation = mutation + node, err = uuo.sqlSave(ctx) + mutation.done = true + return node, err + }) + for i := len(uuo.hooks) - 1; i >= 0; i-- { + if uuo.hooks[i] == nil { + return nil, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)") + } + mut = uuo.hooks[i](mut) + } + if _, err := mut.Mutate(ctx, uuo.mutation); err != nil { + return nil, err + } + } + return node, err +} + +// SaveX is like Save, but panics if an error occurs. +func (uuo *UserUpdateOne) SaveX(ctx context.Context) *User { + node, err := uuo.Save(ctx) + if err != nil { + panic(err) + } + return node +} + +// Exec executes the query on the entity. +func (uuo *UserUpdateOne) Exec(ctx context.Context) error { + _, err := uuo.Save(ctx) + return err +} + +// ExecX is like Exec, but panics if an error occurs. +func (uuo *UserUpdateOne) ExecX(ctx context.Context) { + if err := uuo.Exec(ctx); err != nil { + panic(err) + } +} + +// check runs all checks and user-defined validators on the builder. +func (uuo *UserUpdateOne) check() error { + if v, ok := uuo.mutation.Name(); ok { + if err := user.NameValidator(v); err != nil { + return &ValidationError{Name: "name", err: fmt.Errorf(`ent: validator failed for field "User.name": %w`, err)} + } + } + if v, ok := uuo.mutation.Email(); ok { + if err := user.EmailValidator(v); err != nil { + return &ValidationError{Name: "email", err: fmt.Errorf(`ent: validator failed for field "User.email": %w`, err)} + } + } + if v, ok := uuo.mutation.Password(); ok { + if err := user.PasswordValidator(v); err != nil { + return &ValidationError{Name: "password", err: fmt.Errorf(`ent: validator failed for field "User.password": %w`, err)} + } + } + return nil +} + +func (uuo *UserUpdateOne) sqlSave(ctx context.Context) (_node *User, err error) { + _spec := &sqlgraph.UpdateSpec{ + Node: &sqlgraph.NodeSpec{ + Table: user.Table, + Columns: user.Columns, + ID: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: user.FieldID, + }, + }, + } + id, ok := uuo.mutation.ID() + if !ok { + return nil, &ValidationError{Name: "id", err: errors.New(`ent: missing "User.id" for update`)} + } + _spec.Node.ID.Value = id + if fields := uuo.fields; len(fields) > 0 { + _spec.Node.Columns = make([]string, 0, len(fields)) + _spec.Node.Columns = append(_spec.Node.Columns, user.FieldID) + for _, f := range fields { + if !user.ValidColumn(f) { + return nil, &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)} + } + if f != user.FieldID { + _spec.Node.Columns = append(_spec.Node.Columns, f) + } + } + } + if ps := uuo.mutation.predicates; len(ps) > 0 { + _spec.Predicate = func(selector *sql.Selector) { + for i := range ps { + ps[i](selector) + } + } + } + if value, ok := uuo.mutation.Name(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: user.FieldName, + }) + } + if value, ok := uuo.mutation.Email(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: user.FieldEmail, + }) + } + if value, ok := uuo.mutation.Password(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: user.FieldPassword, + }) + } + if value, ok := uuo.mutation.IsSuperuser(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeBool, + Value: value, + Column: user.FieldIsSuperuser, + }) + } + if uuo.mutation.AuthTokensCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: user.AuthTokensTable, + Columns: []string{user.AuthTokensColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeInt, + Column: authtokens.FieldID, + }, + }, + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := uuo.mutation.RemovedAuthTokensIDs(); len(nodes) > 0 && !uuo.mutation.AuthTokensCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: user.AuthTokensTable, + Columns: []string{user.AuthTokensColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeInt, + Column: authtokens.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := uuo.mutation.AuthTokensIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: user.AuthTokensTable, + Columns: []string{user.AuthTokensColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeInt, + Column: authtokens.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Add = append(_spec.Edges.Add, edge) + } + _node = &User{config: uuo.config} + _spec.Assign = _node.assignValues + _spec.ScanValues = _node.scanValues + if err = sqlgraph.UpdateNode(ctx, uuo.driver, _spec); err != nil { + if _, ok := err.(*sqlgraph.NotFoundError); ok { + err = &NotFoundError{user.Label} + } else if sqlgraph.IsConstraintError(err) { + err = &ConstraintError{err.Error(), err} + } + return nil, err + } + return _node, nil +} diff --git a/backend/go.mod b/backend/go.mod new file mode 100644 index 0000000..70ff971 --- /dev/null +++ b/backend/go.mod @@ -0,0 +1,49 @@ +module github.com/hay-kot/git-web-template/backend + +go 1.18 + +require ( + entgo.io/ent v0.10.0 + github.com/ardanlabs/conf/v2 v2.2.0 + github.com/go-chi/chi/v5 v5.0.7 + github.com/google/uuid v1.3.0 + github.com/mattn/go-sqlite3 v1.14.10 + github.com/stretchr/testify v1.7.1-0.20210427113832-6241f9ab9942 + github.com/swaggo/http-swagger v1.3.0 + github.com/swaggo/swag v1.8.3 + github.com/tkrajina/typescriptify-golang-structs v0.1.7 + github.com/urfave/cli/v2 v2.3.0 + golang.org/x/crypto v0.0.0-20210921155107-089bfa567519 +) + +require ( + ariga.io/atlas v0.3.2-0.20220120225051-c3fac7d636dd // indirect + github.com/KyleBanks/depth v1.2.1 // indirect + github.com/agext/levenshtein v1.2.1 // indirect + github.com/apparentlymart/go-textseg/v13 v13.0.0 // indirect + github.com/cpuguy83/go-md2man/v2 v2.0.1 // indirect + github.com/davecgh/go-spew v1.1.1 // indirect + github.com/go-openapi/inflect v0.19.0 // indirect + github.com/go-openapi/jsonpointer v0.19.5 // indirect + github.com/go-openapi/jsonreference v0.20.0 // indirect + github.com/go-openapi/spec v0.20.6 // indirect + github.com/go-openapi/swag v0.21.1 // indirect + github.com/google/go-cmp v0.5.6 // indirect + github.com/hashicorp/hcl/v2 v2.10.0 // indirect + github.com/josharian/intern v1.0.0 // indirect + github.com/kr/pretty v0.2.0 // indirect + github.com/mailru/easyjson v0.7.7 // indirect + github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7 // indirect + github.com/pmezard/go-difflib v1.0.0 // indirect + github.com/russross/blackfriday/v2 v2.1.0 // indirect + github.com/swaggo/files v0.0.0-20220610200504-28940afbdbfe // indirect + github.com/tkrajina/go-reflector v0.5.5 // indirect + github.com/zclconf/go-cty v1.8.0 // indirect + golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4 // indirect + golang.org/x/net v0.0.0-20220706163947-c90051bbdb60 // indirect + golang.org/x/sys v0.0.0-20220704084225-05e143d24a9e // indirect + golang.org/x/text v0.3.7 // indirect + golang.org/x/tools v0.1.11 // indirect + gopkg.in/yaml.v2 v2.4.0 // indirect + gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b // indirect +) diff --git a/backend/go.sum b/backend/go.sum new file mode 100644 index 0000000..118c2e5 --- /dev/null +++ b/backend/go.sum @@ -0,0 +1,154 @@ +ariga.io/atlas v0.3.2-0.20220120225051-c3fac7d636dd h1:YxnJl3ySvwQ3C7Rspa4CrQtwrftTZ0F8WJ36CvY7nWE= +ariga.io/atlas v0.3.2-0.20220120225051-c3fac7d636dd/go.mod h1:XcLUpQX7Cq4qtagEHIleq3MJaBeeJ76BS8doc4gkOJk= +entgo.io/ent v0.10.0 h1:9cBomE1fh+WX34DPYQL7tDNAIvhKa3tXvwxuLyhYCMo= +entgo.io/ent v0.10.0/go.mod h1:5bjIYdTizykmdtPY3knXrrGpxAh0cMjFfxdNnlNiUGU= +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/DATA-DOG/go-sqlmock v1.5.0 h1:Shsta01QNfFxHCfpW6YH2STWB0MudeXXEWMr20OEh60= +github.com/KyleBanks/depth v1.2.1 h1:5h8fQADFrWtarTdtDudMmGsC7GPbOAu6RVB3ffsVFHc= +github.com/KyleBanks/depth v1.2.1/go.mod h1:jzSb9d0L43HxTQfT+oSA1EEp2q+ne2uh6XgeJcm8brE= +github.com/agext/levenshtein v1.2.1 h1:QmvMAjj2aEICytGiWzmxoE0x2KZvE0fvmqMOfy2tjT8= +github.com/agext/levenshtein v1.2.1/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558= +github.com/apparentlymart/go-dump v0.0.0-20180507223929-23540a00eaa3/go.mod h1:oL81AME2rN47vu18xqj1S1jPIPuN7afo62yKTNn3XMM= +github.com/apparentlymart/go-textseg v1.0.0/go.mod h1:z96Txxhf3xSFMPmb5X/1W05FF/Nj9VFpLOpjS5yuumk= +github.com/apparentlymart/go-textseg/v13 v13.0.0 h1:Y+KvPE1NYz0xl601PVImeQfFyEy6iT90AvPUL1NNfNw= +github.com/apparentlymart/go-textseg/v13 v13.0.0/go.mod h1:ZK2fH7c4NqDTLtiYLvIkEghdlcqw7yxLeM89kiTRPUo= +github.com/ardanlabs/conf/v2 v2.2.0 h1:ar1+TYIYAh2Tdeg2DQroh7ruR56/vJR8BDfzDIrXgtk= +github.com/ardanlabs/conf/v2 v2.2.0/go.mod h1:m37ZKdW9jwMUEhGX36jRNt8VzSQ/HVmSziLZH2p33nY= +github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= +github.com/cpuguy83/go-md2man/v2 v2.0.1 h1:r/myEWzV9lfsM1tFLgDyu0atFtJ1fXn261LKYj/3DxU= +github.com/cpuguy83/go-md2man/v2 v2.0.1/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= +github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/go-chi/chi/v5 v5.0.7 h1:rDTPXLDHGATaeHvVlLcR4Qe0zftYethFucbjVQ1PxU8= +github.com/go-chi/chi/v5 v5.0.7/go.mod h1:DslCQbL2OYiznFReuXYUmQ2hGd1aDpCnlMNITLSKoi8= +github.com/go-openapi/inflect v0.19.0 h1:9jCH9scKIbHeV9m12SmPilScz6krDxKRasNNSNPXu/4= +github.com/go-openapi/inflect v0.19.0/go.mod h1:lHpZVlpIQqLyKwJ4N+YSc9hchQy/i12fJykb83CRBH4= +github.com/go-openapi/jsonpointer v0.19.3/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg= +github.com/go-openapi/jsonpointer v0.19.5 h1:gZr+CIYByUqjcgeLXnQu2gHYQC9o73G2XUeOFYEICuY= +github.com/go-openapi/jsonpointer v0.19.5/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg= +github.com/go-openapi/jsonreference v0.20.0 h1:MYlu0sBgChmCfJxxUKZ8g1cPWFOB37YSZqewK7OKeyA= +github.com/go-openapi/jsonreference v0.20.0/go.mod h1:Ag74Ico3lPc+zR+qjn4XBUmXymS4zJbYVCZmcgkasdo= +github.com/go-openapi/spec v0.20.6 h1:ich1RQ3WDbfoeTqTAb+5EIxNmpKVJZWBNah9RAT0jIQ= +github.com/go-openapi/spec v0.20.6/go.mod h1:2OpW+JddWPrpXSCIX8eOx7lZ5iyuWj3RYR6VaaBKcWA= +github.com/go-openapi/swag v0.19.5/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk= +github.com/go-openapi/swag v0.19.15/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ= +github.com/go-openapi/swag v0.21.1 h1:wm0rhTb5z7qpJRHBdPOMuY4QjVUMbF6/kwoYeRAOrKU= +github.com/go-openapi/swag v0.21.1/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ= +github.com/go-test/deep v1.0.3 h1:ZrJSEWsXzPOxaZnFteGEfooLba+ju3FYIbOrS+rQd68= +github.com/go-test/deep v1.0.3/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA= +github.com/golang/protobuf v1.1.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.5.6 h1:BKbKCqvP6I+rmFHt06ZmyQtvB8xAkWdhFyr0ZUNZcxQ= +github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I= +github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/hashicorp/hcl/v2 v2.10.0 h1:1S1UnuhDGlv3gRFV4+0EdwB+znNP5HmcGbIqwnSCByg= +github.com/hashicorp/hcl/v2 v2.10.0/go.mod h1:FwWsfWEjyV/CMj8s/gqAuiviY72rJ1/oayI9WftqcKg= +github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= +github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pretty v0.2.0 h1:s5hAObm+yFO5uHYt5dYjxi2rXrsnmRpJx4OYvIWUaQs= +github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348 h1:MtvEpTB6LX3vkb4ax0b5D2DHbNAUsen0Gx5wZoq3lV4= +github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348/go.mod h1:B69LEHPfb2qLo0BaaOLcbitczOKLWTsrBG9LczfCD4k= +github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= +github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= +github.com/mailru/easyjson v0.7.6/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= +github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= +github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= +github.com/mattn/go-sqlite3 v1.14.10 h1:MLn+5bFRlWMGoSRmJour3CL1w/qL96mvipqpwQW/Sfk= +github.com/mattn/go-sqlite3 v1.14.10/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= +github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7 h1:DpOJ2HYzCv8LZP15IdmG+YdwD2luVPHITV96TkirNBM= +github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7/go.mod h1:ZXFpozHsX6DPmq2I0TCekCxypsnAUbP2oI0UX1GXzOo= +github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs= +github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= +github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ= +github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= +github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= +github.com/spf13/pflag v1.0.2/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.1-0.20210427113832-6241f9ab9942 h1:t0lM6y/M5IiUZyvbBTcngso8SZEZICH7is9B6g/obVU= +github.com/stretchr/testify v1.7.1-0.20210427113832-6241f9ab9942/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/swaggo/files v0.0.0-20220610200504-28940afbdbfe h1:K8pHPVoTgxFJt1lXuIzzOX7zZhZFldJQK/CgKx9BFIc= +github.com/swaggo/files v0.0.0-20220610200504-28940afbdbfe/go.mod h1:lKJPbtWzJ9JhsTN1k1gZgleJWY/cqq0psdoMmaThG3w= +github.com/swaggo/http-swagger v1.3.0 h1:1+6M4qRorIbdyTWTsGrwnb0r9jGK5dcWN82O6oY/yHQ= +github.com/swaggo/http-swagger v1.3.0/go.mod h1:9glekdg40lwclrrKNRGgj/IMDxpNPZ3kzab4oPcF8EM= +github.com/swaggo/swag v1.8.3 h1:3pZSSCQ//gAH88lfmxM3Cd1+JCsxV8Md6f36b9hrZ5s= +github.com/swaggo/swag v1.8.3/go.mod h1:jMLeXOOmYyjk8PvHTsXBdrubsNd9gUJTTCzL5iBnseg= +github.com/tkrajina/go-reflector v0.5.5 h1:gwoQFNye30Kk7NrExj8zm3zFtrGPqOkzFMLuQZg1DtQ= +github.com/tkrajina/go-reflector v0.5.5/go.mod h1:ECbqLgccecY5kPmPmXg1MrHW585yMcDkVl6IvJe64T4= +github.com/tkrajina/typescriptify-golang-structs v0.1.7 h1:72jmiT/brlgtCPpwu4X0HkhMeUMtx8+xDiTMS93rFqY= +github.com/tkrajina/typescriptify-golang-structs v0.1.7/go.mod h1:sjU00nti/PMEOZb07KljFlR+lJ+RotsC0GBQMv9EKls= +github.com/urfave/cli/v2 v2.3.0 h1:qph92Y649prgesehzOrQjdWyxFOp/QVM+6imKHad91M= +github.com/urfave/cli/v2 v2.3.0/go.mod h1:LJmUH05zAU44vOAcrfzZQKsZbVcdbOG8rtL3/XcUArI= +github.com/vmihailenco/msgpack v3.3.3+incompatible/go.mod h1:fy3FlTQTDXWkZ7Bh6AcGMlsjHatGryHQYUTf1ShIgkk= +github.com/vmihailenco/msgpack/v4 v4.3.12/go.mod h1:gborTTJjAo/GWTqqRjrLCn9pgNN+NXzzngzBKDPIqw4= +github.com/vmihailenco/tagparser v0.1.1/go.mod h1:OeAg3pn3UbLjkWt+rN9oFYB6u/cQgqMEUPoW2WPyhdI= +github.com/zclconf/go-cty v1.2.0/go.mod h1:hOPWgoHbaTUnI5k4D2ld+GRpFJSCe6bCM7m1q/N4PQ8= +github.com/zclconf/go-cty v1.8.0 h1:s4AvqaeQzJIu3ndv4gVIhplVD0krU+bgrcLSVUnaWuA= +github.com/zclconf/go-cty v1.8.0/go.mod h1:vVKLxnk3puL4qRAv72AO+W99LUD4da90g3uUAzyuvAk= +github.com/zclconf/go-cty-debug v0.0.0-20191215020915-b22d67c1ba0b/go.mod h1:ZRKQfBXbGkpdV6QMzT3rU1kSTAnfu1dO8dPKjYprgj8= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20190426145343-a29dc8fdc734/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519 h1:7I4JAnoQBe7ZtJcBaYHi5UtiO8tQHbUSXxL+pnGRANg= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4 h1:6zppjxzCulZykYSLyVDYbneBfbaBIQPYMevg0bEwv2s= +golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= +golang.org/x/net v0.0.0-20180811021610-c39426892332/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= +golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20220706163947-c90051bbdb60 h1:8NSylCMxLW4JvserAndSgFL7aPli6A68yf0bYFTcWCM= +golang.org/x/net v0.0.0-20220706163947-c90051bbdb60/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190502175342-a43fa875dd82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20220704084225-05e143d24a9e h1:CsOuNlbOuf0mzxJIefr6Q4uAUetRUwZE4qt7VfzP+xo= +golang.org/x/sys v0.0.0-20220704084225-05e143d24a9e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= +golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7 h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.1.11 h1:loJ25fNOEhSXfHrpoGj91eCUThwdNX6u24rO1xnNteY= +golang.org/x/tools v0.1.11/go.mod h1:SgwaegtQh8clINPpECJMqnxLv9I09HLqnW3RMqW0CA4= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= +google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= +google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU= +gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b h1:h8qDotaEPuJATrMmW04NCwg7v22aHH28wwpauUhK9Oo= +gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/backend/internal/config/conf.go b/backend/internal/config/conf.go new file mode 100644 index 0000000..cf90938 --- /dev/null +++ b/backend/internal/config/conf.go @@ -0,0 +1,81 @@ +package config + +import ( + "encoding/json" + "errors" + "fmt" + "io/ioutil" + + "github.com/ardanlabs/conf/v2" + "github.com/ardanlabs/conf/v2/yaml" + + "os" +) + +const ( + ModeDevelopment = "development" + ModeProduction = "production" +) + +type Config struct { + Mode string `yaml:"mode" conf:"default:development"` // development or production + Web WebConfig `yaml:"web"` + Database Database `yaml:"database"` + Log LoggerConf `yaml:"logger"` + Mailer MailerConf `yaml:"mailer"` + Seed Seed `yaml:"seed"` + Swagger SwaggerConf `yaml:"swagger"` +} + +type SwaggerConf struct { + Host string `yaml:"host" conf:"default:localhost:7745"` + Scheme string `yaml:"scheme" conf:"default:http"` +} + +type WebConfig struct { + Port string `yaml:"port" conf:"default:7745"` + Host string `yaml:"host" conf:"default:127.0.0.1"` +} + +// NewConfig parses the CLI/Config file and returns a Config struct. If the file argument is an empty string, the +// file is not read. If the file is not empty, the file is read and the Config struct is returned. +func NewConfig(file string) (*Config, error) { + var cfg Config + + const prefix = "API" + + help, err := func() (string, error) { + if _, err := os.Stat(file); errors.Is(err, os.ErrNotExist) { + return conf.Parse(prefix, &cfg) + } else { + yamlData, err := ioutil.ReadFile(file) + if err != nil { + return "", err + } + return conf.Parse(prefix, &cfg, yaml.WithData(yamlData)) + } + }() + + if err != nil { + if errors.Is(err, conf.ErrHelpWanted) { + fmt.Println(help) + os.Exit(0) + } + return &cfg, fmt.Errorf("parsing config: %w", err) + } + + return &cfg, nil +} + +// Print prints the configuration to stdout as a json indented string +// This is useful for debugging. If the marshaller errors out, it will panic. +func (c *Config) Print() { + res, err := json.MarshalIndent(c, "", " ") + + if err != nil { + panic(err) + } + + fmt.Println(string(res)) + +} diff --git a/backend/internal/config/conf_database.go b/backend/internal/config/conf_database.go new file mode 100644 index 0000000..d8a6c7b --- /dev/null +++ b/backend/internal/config/conf_database.go @@ -0,0 +1,27 @@ +package config + +const ( + DriverSqlite3 = "sqlite3" + DriverPostgres = "postgres" +) + +type Database struct { + Driver string `yaml:"driver" conf:"default:sqlite3"` + SqliteUrl string `yaml:"sqlite-url" conf:"default:file:ent?mode=memory&cache=shared&_fk=1"` + PostgresUrl string `yaml:"postgres-url" conf:""` +} + +func (d *Database) GetDriver() string { + return d.Driver +} + +func (d *Database) GetUrl() string { + switch d.Driver { + case DriverSqlite3: + return d.SqliteUrl + case DriverPostgres: + return d.PostgresUrl + default: + panic("unknown database driver") + } +} diff --git a/backend/internal/config/conf_database_test.go b/backend/internal/config/conf_database_test.go new file mode 100644 index 0000000..4720a15 --- /dev/null +++ b/backend/internal/config/conf_database_test.go @@ -0,0 +1,36 @@ +package config + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_DatabaseConfig_Sqlite(t *testing.T) { + dbConf := &Database{ + Driver: DriverSqlite3, + SqliteUrl: "file:ent?mode=memory&cache=shared&_fk=1", + } + + assert.Equal(t, "sqlite3", dbConf.GetDriver()) + assert.Equal(t, "file:ent?mode=memory&cache=shared&_fk=1", dbConf.GetUrl()) +} + +func Test_DatabaseConfig_Postgres(t *testing.T) { + dbConf := &Database{ + Driver: DriverPostgres, + PostgresUrl: "postgres://user:pass@host:port/dbname?sslmode=disable", + } + + assert.Equal(t, "postgres", dbConf.GetDriver()) + assert.Equal(t, "postgres://user:pass@host:port/dbname?sslmode=disable", dbConf.GetUrl()) +} + +func Test_DatabaseConfig_Unknown(t *testing.T) { + dbConf := &Database{ + Driver: "null", + } + + assert.Panics(t, func() { dbConf.GetUrl() }) + +} diff --git a/backend/internal/config/conf_logger.go b/backend/internal/config/conf_logger.go new file mode 100644 index 0000000..9438cb8 --- /dev/null +++ b/backend/internal/config/conf_logger.go @@ -0,0 +1,6 @@ +package config + +type LoggerConf struct { + Level string `conf:"default:debug"` + File string `conf:""` +} diff --git a/backend/internal/config/conf_mailer.go b/backend/internal/config/conf_mailer.go new file mode 100644 index 0000000..1335a96 --- /dev/null +++ b/backend/internal/config/conf_mailer.go @@ -0,0 +1,15 @@ +package config + +type MailerConf struct { + Host string `conf:""` + Port int `conf:""` + Username string `conf:""` + Password string `conf:""` + From string `conf:""` +} + +// Ready is a simple check to ensure that the configuration is not empty. +// or with it's default state. +func (mc *MailerConf) Ready() bool { + return mc.Host != "" && mc.Port != 0 && mc.Username != "" && mc.Password != "" && mc.From != "" +} diff --git a/backend/internal/config/conf_mailer_test.go b/backend/internal/config/conf_mailer_test.go new file mode 100644 index 0000000..8656755 --- /dev/null +++ b/backend/internal/config/conf_mailer_test.go @@ -0,0 +1,40 @@ +package config + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_MailerReady_Success(t *testing.T) { + mc := &MailerConf{ + Host: "host", + Port: 1, + Username: "username", + Password: "password", + From: "from", + } + + assert.True(t, mc.Ready()) +} + +func Test_MailerReady_Failure(t *testing.T) { + mc := &MailerConf{} + assert.False(t, mc.Ready()) + + mc.Host = "host" + assert.False(t, mc.Ready()) + + mc.Port = 1 + assert.False(t, mc.Ready()) + + mc.Username = "username" + assert.False(t, mc.Ready()) + + mc.Password = "password" + assert.False(t, mc.Ready()) + + mc.From = "from" + assert.True(t, mc.Ready()) + +} diff --git a/backend/internal/config/conf_seed.go b/backend/internal/config/conf_seed.go new file mode 100644 index 0000000..e076593 --- /dev/null +++ b/backend/internal/config/conf_seed.go @@ -0,0 +1,13 @@ +package config + +type SeedUser struct { + Name string `yaml:"name"` + Email string `yaml:"email"` + Password string `yaml:"password"` + IsSuperuser bool `yaml:"isSuperuser"` +} + +type Seed struct { + Enabled bool `yaml:"enabled" conf:"default:false"` + Users []SeedUser `yaml:"users"` +} diff --git a/backend/internal/mapper/users_automapper.go b/backend/internal/mapper/users_automapper.go new file mode 100644 index 0000000..4f97e09 --- /dev/null +++ b/backend/internal/mapper/users_automapper.go @@ -0,0 +1,27 @@ +// Code generated by "/pkgs/automapper"; DO NOT EDIT. +package mapper + +import ( + "github.com/hay-kot/git-web-template/backend/ent" + "github.com/hay-kot/git-web-template/backend/internal/types" +) + +func UserOutFromModel(from ent.User) types.UserOut { + return types.UserOut{ + ID: from.ID, + Name: from.Name, + Email: from.Email, + Password: from.Password, + IsSuperuser: from.IsSuperuser, + } +} + +func UserOutToModel(from types.UserOut) ent.User { + return ent.User{ + ID: from.ID, + Name: from.Name, + Email: from.Email, + Password: from.Password, + IsSuperuser: from.IsSuperuser, + } +} diff --git a/backend/internal/mocks/chimocker/chimocker.go b/backend/internal/mocks/chimocker/chimocker.go new file mode 100644 index 0000000..b918403 --- /dev/null +++ b/backend/internal/mocks/chimocker/chimocker.go @@ -0,0 +1,30 @@ +package chimocker + +import ( + "context" + "net/http" + + "github.com/go-chi/chi/v5" +) + +type Params map[string]string + +// WithUrlParam returns a pointer to a request object with the given URL params +// added to a new chi.Context object. +func WithUrlParam(r *http.Request, key, value string) *http.Request { + chiCtx := chi.NewRouteContext() + req := r.WithContext(context.WithValue(r.Context(), chi.RouteCtxKey, chiCtx)) + chiCtx.URLParams.Add(key, value) + return req +} + +// WithUrlParams returns a pointer to a request object with the given URL params +// added to a new chi.Context object. for single param assignment see WithUrlParam +func WithUrlParams(r *http.Request, params Params) *http.Request { + chiCtx := chi.NewRouteContext() + req := r.WithContext(context.WithValue(r.Context(), chi.RouteCtxKey, chiCtx)) + for key, value := range params { + chiCtx.URLParams.Add(key, value) + } + return req +} diff --git a/backend/internal/mocks/factories/users.go b/backend/internal/mocks/factories/users.go new file mode 100644 index 0000000..1642a7a --- /dev/null +++ b/backend/internal/mocks/factories/users.go @@ -0,0 +1,16 @@ +package factories + +import ( + "github.com/hay-kot/git-web-template/backend/internal/types" + "github.com/hay-kot/git-web-template/backend/pkgs/faker" +) + +func UserFactory() types.UserCreate { + f := faker.NewFaker() + return types.UserCreate{ + Name: f.RandomString(10), + Email: f.RandomEmail(), + Password: f.RandomString(10), + IsSuperuser: f.RandomBool(), + } +} diff --git a/backend/internal/mocks/mock_logger.go b/backend/internal/mocks/mock_logger.go new file mode 100644 index 0000000..d367161 --- /dev/null +++ b/backend/internal/mocks/mock_logger.go @@ -0,0 +1,11 @@ +package mocks + +import ( + "os" + + "github.com/hay-kot/git-web-template/backend/pkgs/logger" +) + +func GetStructLogger() *logger.Logger { + return logger.New(os.Stdout, logger.LevelDebug) +} diff --git a/backend/internal/mocks/mocker_services.go b/backend/internal/mocks/mocker_services.go new file mode 100644 index 0000000..3011fb1 --- /dev/null +++ b/backend/internal/mocks/mocker_services.go @@ -0,0 +1,10 @@ +package mocks + +import ( + "github.com/hay-kot/git-web-template/backend/internal/repo" + "github.com/hay-kot/git-web-template/backend/internal/services" +) + +func GetMockServices(repos *repo.AllRepos) *services.AllServices { + return services.NewServices(repos) +} diff --git a/backend/internal/mocks/mocks_ent_repo.go b/backend/internal/mocks/mocks_ent_repo.go new file mode 100644 index 0000000..9273502 --- /dev/null +++ b/backend/internal/mocks/mocks_ent_repo.go @@ -0,0 +1,22 @@ +package mocks + +import ( + "context" + + "github.com/hay-kot/git-web-template/backend/ent" + "github.com/hay-kot/git-web-template/backend/internal/repo" + _ "github.com/mattn/go-sqlite3" +) + +func GetEntRepos() (*repo.AllRepos, func() error) { + c, err := ent.Open("sqlite3", "file:ent?mode=memory&cache=shared&_fk=1") + if err != nil { + panic(err) + } + + if err := c.Schema.Create(context.Background()); err != nil { + panic(err) + } + + return repo.EntAllRepos(c), c.Close +} diff --git a/backend/internal/repo/main_test.go b/backend/internal/repo/main_test.go new file mode 100644 index 0000000..f516bc5 --- /dev/null +++ b/backend/internal/repo/main_test.go @@ -0,0 +1,38 @@ +package repo + +import ( + "context" + "log" + "math/rand" + "os" + "testing" + "time" + + "github.com/hay-kot/git-web-template/backend/ent" + _ "github.com/mattn/go-sqlite3" +) + +var testEntClient *ent.Client +var testRepos *AllRepos + +func TestMain(m *testing.M) { + rand.Seed(int64(time.Now().Unix())) + + client, err := ent.Open("sqlite3", "file:ent?mode=memory&cache=shared&_fk=1") + if err != nil { + log.Fatalf("failed opening connection to sqlite: %v", err) + } + + if err := client.Schema.Create(context.Background()); err != nil { + log.Fatalf("failed creating schema resources: %v", err) + } + + testEntClient = client + testRepos = EntAllRepos(testEntClient) + + defer client.Close() + + m.Run() + + os.Exit(m.Run()) +} diff --git a/backend/internal/repo/repos_all.go b/backend/internal/repo/repos_all.go new file mode 100644 index 0000000..faf50ee --- /dev/null +++ b/backend/internal/repo/repos_all.go @@ -0,0 +1,16 @@ +package repo + +import "github.com/hay-kot/git-web-template/backend/ent" + +// AllRepos is a container for all the repository interfaces +type AllRepos struct { + Users UserRepository + AuthTokens TokenRepository +} + +func EntAllRepos(db *ent.Client) *AllRepos { + return &AllRepos{ + Users: &EntUserRepository{db}, + AuthTokens: &EntTokenRepository{db}, + } +} diff --git a/backend/internal/repo/token_ent.go b/backend/internal/repo/token_ent.go new file mode 100644 index 0000000..f96ed7c --- /dev/null +++ b/backend/internal/repo/token_ent.go @@ -0,0 +1,74 @@ +package repo + +import ( + "context" + "time" + + "github.com/hay-kot/git-web-template/backend/ent" + "github.com/hay-kot/git-web-template/backend/ent/authtokens" + "github.com/hay-kot/git-web-template/backend/internal/mapper" + "github.com/hay-kot/git-web-template/backend/internal/types" +) + +type EntTokenRepository struct { + db *ent.Client +} + +// GetUserFromToken get's a user from a token +func (r *EntTokenRepository) GetUserFromToken(ctx context.Context, token []byte) (types.UserOut, error) { + dbToken, err := r.db.AuthTokens.Query(). + Where(authtokens.Token(token)). + Where(authtokens.ExpiresAtGTE(time.Now())). + WithUser(). + Only(ctx) + + if err != nil { + return types.UserOut{}, err + } + + return mapper.UserOutFromModel(*dbToken.Edges.User), nil +} + +// Creates a token for a user +func (r *EntTokenRepository) CreateToken(ctx context.Context, createToken types.UserAuthTokenCreate) (types.UserAuthToken, error) { + tokenOut := types.UserAuthToken{} + + dbToken, err := r.db.AuthTokens.Create(). + SetToken(createToken.TokenHash). + SetUserID(createToken.UserID). + SetExpiresAt(createToken.ExpiresAt). + Save(ctx) + + if err != nil { + return tokenOut, err + } + + tokenOut.TokenHash = dbToken.Token + tokenOut.UserID = createToken.UserID + tokenOut.CreatedAt = dbToken.CreatedAt + tokenOut.ExpiresAt = dbToken.ExpiresAt + + return tokenOut, nil +} + +// DeleteToken remove a single token from the database - equivalent to revoke or logout +func (r *EntTokenRepository) DeleteToken(ctx context.Context, token []byte) error { + _, err := r.db.AuthTokens.Delete().Where(authtokens.Token(token)).Exec(ctx) + return err +} + +// PurgeExpiredTokens removes all expired tokens from the database +func (r *EntTokenRepository) PurgeExpiredTokens(ctx context.Context) (int, error) { + tokensDeleted, err := r.db.AuthTokens.Delete().Where(authtokens.ExpiresAtLTE(time.Now())).Exec(ctx) + + if err != nil { + return 0, err + } + + return tokensDeleted, nil +} + +func (r *EntTokenRepository) DeleteAll(ctx context.Context) (int, error) { + amount, err := r.db.AuthTokens.Delete().Exec(ctx) + return amount, err +} diff --git a/backend/internal/repo/token_ent_test.go b/backend/internal/repo/token_ent_test.go new file mode 100644 index 0000000..ae01a5a --- /dev/null +++ b/backend/internal/repo/token_ent_test.go @@ -0,0 +1,110 @@ +package repo + +import ( + "context" + "testing" + "time" + + "github.com/hay-kot/git-web-template/backend/internal/types" + "github.com/hay-kot/git-web-template/backend/pkgs/hasher" + "github.com/stretchr/testify/assert" +) + +func Test_EntAuthTokenRepo_CreateToken(t *testing.T) { + assert := assert.New(t) + ctx := context.Background() + + user := UserFactory() + + userOut, _ := testRepos.Users.Create(ctx, user) + + expiresAt := time.Now().Add(time.Hour) + + generatedToken := hasher.GenerateToken() + + token, err := testRepos.AuthTokens.CreateToken(ctx, types.UserAuthTokenCreate{ + TokenHash: generatedToken.Hash, + ExpiresAt: expiresAt, + UserID: userOut.ID, + }) + + assert.NoError(err) + assert.Equal(userOut.ID, token.UserID) + assert.Equal(expiresAt, token.ExpiresAt) + + // Cleanup + err = testRepos.Users.Delete(ctx, userOut.ID) + _, err = testRepos.AuthTokens.DeleteAll(ctx) +} + +func Test_EntAuthTokenRepo_GetUserByToken(t *testing.T) { + assert := assert.New(t) + ctx := context.Background() + + user := UserFactory() + userOut, _ := testRepos.Users.Create(ctx, user) + + expiresAt := time.Now().Add(time.Hour) + generatedToken := hasher.GenerateToken() + + token, err := testRepos.AuthTokens.CreateToken(ctx, types.UserAuthTokenCreate{ + TokenHash: generatedToken.Hash, + ExpiresAt: expiresAt, + UserID: userOut.ID, + }) + + // Get User from token + foundUser, err := testRepos.AuthTokens.GetUserFromToken(ctx, token.TokenHash) + + assert.NoError(err) + assert.Equal(userOut.ID, foundUser.ID) + assert.Equal(userOut.Name, foundUser.Name) + assert.Equal(userOut.Email, foundUser.Email) + + // Cleanup + err = testRepos.Users.Delete(ctx, userOut.ID) + _, err = testRepos.AuthTokens.DeleteAll(ctx) +} + +func Test_EntAuthTokenRepo_PurgeExpiredTokens(t *testing.T) { + assert := assert.New(t) + ctx := context.Background() + + user := UserFactory() + userOut, _ := testRepos.Users.Create(ctx, user) + + createdTokens := []types.UserAuthToken{} + + for i := 0; i < 5; i++ { + expiresAt := time.Now() + generatedToken := hasher.GenerateToken() + + createdToken, err := testRepos.AuthTokens.CreateToken(ctx, types.UserAuthTokenCreate{ + TokenHash: generatedToken.Hash, + ExpiresAt: expiresAt, + UserID: userOut.ID, + }) + + assert.NoError(err) + assert.NotNil(createdToken) + + createdTokens = append(createdTokens, createdToken) + + } + + // Purge expired tokens + tokensDeleted, err := testRepos.AuthTokens.PurgeExpiredTokens(ctx) + + assert.NoError(err) + assert.Equal(5, tokensDeleted) + + // Check if tokens are deleted + for _, token := range createdTokens { + _, err := testRepos.AuthTokens.GetUserFromToken(ctx, token.TokenHash) + assert.Error(err) + } + + // Cleanup + err = testRepos.Users.Delete(ctx, userOut.ID) + _, err = testRepos.AuthTokens.DeleteAll(ctx) +} diff --git a/backend/internal/repo/token_interface.go b/backend/internal/repo/token_interface.go new file mode 100644 index 0000000..4396063 --- /dev/null +++ b/backend/internal/repo/token_interface.go @@ -0,0 +1,20 @@ +package repo + +import ( + "context" + + "github.com/hay-kot/git-web-template/backend/internal/types" +) + +type TokenRepository interface { + // GetUserFromToken get's a user from a token + GetUserFromToken(ctx context.Context, token []byte) (types.UserOut, error) + // Creates a token for a user + CreateToken(ctx context.Context, createToken types.UserAuthTokenCreate) (types.UserAuthToken, error) + // DeleteToken remove a single token from the database - equivalent to revoke or logout + DeleteToken(ctx context.Context, token []byte) error + // PurgeExpiredTokens removes all expired tokens from the database + PurgeExpiredTokens(ctx context.Context) (int, error) + // DeleteAll removes all tokens from the database + DeleteAll(ctx context.Context) (int, error) +} diff --git a/backend/internal/repo/users_ent.go b/backend/internal/repo/users_ent.go new file mode 100644 index 0000000..0131ea7 --- /dev/null +++ b/backend/internal/repo/users_ent.go @@ -0,0 +1,141 @@ +package repo + +import ( + "context" + + "github.com/google/uuid" + "github.com/hay-kot/git-web-template/backend/ent" + "github.com/hay-kot/git-web-template/backend/ent/user" + "github.com/hay-kot/git-web-template/backend/internal/types" +) + +type EntUserRepository struct { + db *ent.Client +} + +func (e *EntUserRepository) toUserOut(usr *types.UserOut, entUsr *ent.User) { + usr.ID = entUsr.ID + usr.Password = entUsr.Password + usr.Name = entUsr.Name + usr.Email = entUsr.Email + usr.IsSuperuser = entUsr.IsSuperuser +} + +func (e *EntUserRepository) GetOneId(ctx context.Context, id uuid.UUID) (types.UserOut, error) { + usr, err := e.db.User.Query().Where(user.ID(id)).Only(ctx) + + usrOut := types.UserOut{} + + if err != nil { + return usrOut, err + } + + e.toUserOut(&usrOut, usr) + + return usrOut, nil +} + +func (e *EntUserRepository) GetOneEmail(ctx context.Context, email string) (types.UserOut, error) { + usr, err := e.db.User.Query().Where(user.Email(email)).Only(ctx) + + usrOut := types.UserOut{} + + if err != nil { + return usrOut, err + } + + e.toUserOut(&usrOut, usr) + + return usrOut, nil +} + +func (e *EntUserRepository) GetAll(ctx context.Context) ([]types.UserOut, error) { + users, err := e.db.User.Query().All(ctx) + + if err != nil { + return nil, err + } + + var usrs []types.UserOut + + for _, usr := range users { + usrOut := types.UserOut{} + e.toUserOut(&usrOut, usr) + usrs = append(usrs, usrOut) + } + + return usrs, nil +} + +func (e *EntUserRepository) Create(ctx context.Context, usr types.UserCreate) (types.UserOut, error) { + err := usr.Validate() + usrOut := types.UserOut{} + + if err != nil { + return usrOut, err + } + + entUser, err := e.db.User. + Create(). + SetName(usr.Name). + SetEmail(usr.Email). + SetPassword(usr.Password). + SetIsSuperuser(usr.IsSuperuser). + Save(ctx) + + e.toUserOut(&usrOut, entUser) + + return usrOut, err +} + +func (e *EntUserRepository) Update(ctx context.Context, ID uuid.UUID, data types.UserUpdate) error { + bldr := e.db.User.Update().Where(user.ID(ID)) + + if data.Name != nil { + bldr = bldr.SetName(*data.Name) + } + + if data.Email != nil { + bldr = bldr.SetEmail(*data.Email) + } + + // TODO: FUTURE + // if data.Password != nil { + // bldr = bldr.SetPassword(*data.Password) + // } + + // if data.IsSuperuser != nil { + // bldr = bldr.SetIsSuperuser(*data.IsSuperuser) + // } + + _, err := bldr.Save(ctx) + return err +} + +func (e *EntUserRepository) Delete(ctx context.Context, id uuid.UUID) error { + _, err := e.db.User.Delete().Where(user.ID(id)).Exec(ctx) + return err +} + +func (e *EntUserRepository) DeleteAll(ctx context.Context) error { + _, err := e.db.User.Delete().Exec(ctx) + return err +} + +func (e *EntUserRepository) GetSuperusers(ctx context.Context) ([]types.UserOut, error) { + users, err := e.db.User.Query().Where(user.IsSuperuser(true)).All(ctx) + + if err != nil { + return nil, err + } + + var usrs []types.UserOut + + for _, usr := range users { + usrOut := types.UserOut{} + e.toUserOut(&usrOut, usr) + usrs = append(usrs, usrOut) + } + + return usrs, nil +} diff --git a/backend/internal/repo/users_ent_test.go b/backend/internal/repo/users_ent_test.go new file mode 100644 index 0000000..01a228e --- /dev/null +++ b/backend/internal/repo/users_ent_test.go @@ -0,0 +1,148 @@ +package repo + +import ( + "context" + "fmt" + "testing" + + "github.com/hay-kot/git-web-template/backend/internal/types" + "github.com/hay-kot/git-web-template/backend/pkgs/faker" + "github.com/stretchr/testify/assert" +) + +func UserFactory() types.UserCreate { + f := faker.NewFaker() + return types.UserCreate{ + Name: f.RandomString(10), + Email: f.RandomEmail(), + Password: f.RandomString(10), + IsSuperuser: f.RandomBool(), + } +} + +func Test_EntUserRepo_GetOneEmail(t *testing.T) { + assert := assert.New(t) + user := UserFactory() + ctx := context.Background() + + testRepos.Users.Create(ctx, user) + + foundUser, err := testRepos.Users.GetOneEmail(ctx, user.Email) + + assert.NotNil(foundUser) + assert.Nil(err) + assert.Equal(user.Email, foundUser.Email) + assert.Equal(user.Name, foundUser.Name) + + // Cleanup + testRepos.Users.DeleteAll(ctx) +} + +func Test_EntUserRepo_GetOneId(t *testing.T) { + assert := assert.New(t) + user := UserFactory() + ctx := context.Background() + + userOut, _ := testRepos.Users.Create(ctx, user) + foundUser, err := testRepos.Users.GetOneId(ctx, userOut.ID) + + assert.NotNil(foundUser) + assert.Nil(err) + assert.Equal(user.Email, foundUser.Email) + assert.Equal(user.Name, foundUser.Name) + + // Cleanup + testRepos.Users.DeleteAll(ctx) +} + +func Test_EntUserRepo_GetAll(t *testing.T) { + // Setup + toCreate := []types.UserCreate{ + UserFactory(), + UserFactory(), + UserFactory(), + UserFactory(), + } + + ctx := context.Background() + + created := []types.UserOut{} + + for _, usr := range toCreate { + usrOut, _ := testRepos.Users.Create(ctx, usr) + created = append(created, usrOut) + } + + // Validate + allUsers, err := testRepos.Users.GetAll(ctx) + + assert.Nil(t, err) + assert.Equal(t, len(created), len(allUsers)) + + for _, usr := range created { + fmt.Printf("%+v\n", usr) + assert.Contains(t, allUsers, usr) + } + + for _, usr := range created { + testRepos.Users.Delete(ctx, usr.ID) + } + + // Cleanup + testRepos.Users.DeleteAll(ctx) +} + +func Test_EntUserRepo_Update(t *testing.T) { + t.Skip() +} + +func Test_EntUserRepo_Delete(t *testing.T) { + // Create 10 Users + for i := 0; i < 10; i++ { + user := UserFactory() + ctx := context.Background() + _, _ = testRepos.Users.Create(ctx, user) + } + + // Delete all + ctx := context.Background() + allUsers, _ := testRepos.Users.GetAll(ctx) + + assert.Greater(t, len(allUsers), 0) + testRepos.Users.DeleteAll(ctx) + + allUsers, _ = testRepos.Users.GetAll(ctx) + assert.Equal(t, len(allUsers), 0) + +} + +func Test_EntUserRepo_GetSuperusers(t *testing.T) { + // Create 10 Users + superuser := 0 + users := 0 + + for i := 0; i < 10; i++ { + user := UserFactory() + ctx := context.Background() + _, _ = testRepos.Users.Create(ctx, user) + + if user.IsSuperuser { + superuser++ + } else { + users++ + } + } + + // Delete all + ctx := context.Background() + + superUsers, err := testRepos.Users.GetSuperusers(ctx) + assert.NoError(t, err) + + for _, usr := range superUsers { + assert.True(t, usr.IsSuperuser) + } + + // Cleanup + testRepos.Users.DeleteAll(ctx) +} diff --git a/backend/internal/repo/users_interface.go b/backend/internal/repo/users_interface.go new file mode 100644 index 0000000..161850d --- /dev/null +++ b/backend/internal/repo/users_interface.go @@ -0,0 +1,27 @@ +package repo + +import ( + "context" + + "github.com/google/uuid" + "github.com/hay-kot/git-web-template/backend/internal/types" +) + +type UserRepository interface { + // GetOneId returns a user by id + GetOneId(ctx context.Context, ID uuid.UUID) (types.UserOut, error) + // GetOneEmail returns a user by email + GetOneEmail(ctx context.Context, email string) (types.UserOut, error) + // GetAll returns all users + GetAll(ctx context.Context) ([]types.UserOut, error) + // Get Super Users + GetSuperusers(ctx context.Context) ([]types.UserOut, error) + // Create creates a new user + Create(ctx context.Context, user types.UserCreate) (types.UserOut, error) + // Update updates a user + Update(ctx context.Context, ID uuid.UUID, user types.UserUpdate) error + // Delete deletes a user + Delete(ctx context.Context, ID uuid.UUID) error + + DeleteAll(ctx context.Context) error +} diff --git a/backend/internal/services/all.go b/backend/internal/services/all.go new file mode 100644 index 0000000..3d4273d --- /dev/null +++ b/backend/internal/services/all.go @@ -0,0 +1,15 @@ +package services + +import "github.com/hay-kot/git-web-template/backend/internal/repo" + +type AllServices struct { + User *UserService + Admin *AdminService +} + +func NewServices(repos *repo.AllRepos) *AllServices { + return &AllServices{ + User: &UserService{repos}, + Admin: &AdminService{repos}, + } +} diff --git a/backend/internal/services/contexts.go b/backend/internal/services/contexts.go new file mode 100644 index 0000000..d6a0968 --- /dev/null +++ b/backend/internal/services/contexts.go @@ -0,0 +1,40 @@ +package services + +import ( + "context" + + "github.com/hay-kot/git-web-template/backend/internal/types" +) + +type contextKeys struct { + name string +} + +var ( + ContextUser = &contextKeys{name: "User"} + ContextUserToken = &contextKeys{name: "UserToken"} +) + +// SetUserCtx is a helper function that sets the ContextUser and ContextUserToken +// values within the context of a web request (or any context). +func SetUserCtx(ctx context.Context, user *types.UserOut, token string) context.Context { + ctx = context.WithValue(ctx, ContextUser, user) + ctx = context.WithValue(ctx, ContextUserToken, token) + return ctx +} + +// UseUserCtx is a helper function that returns the user from the context. +func UseUserCtx(ctx context.Context) *types.UserOut { + if val := ctx.Value(ContextUser); val != nil { + return val.(*types.UserOut) + } + return nil +} + +// UseTokenCtx is a helper function that returns the user token from the context. +func UseTokenCtx(ctx context.Context) string { + if val := ctx.Value(ContextUserToken); val != nil { + return val.(string) + } + return "" +} diff --git a/backend/internal/services/contexts_test.go b/backend/internal/services/contexts_test.go new file mode 100644 index 0000000..9cae289 --- /dev/null +++ b/backend/internal/services/contexts_test.go @@ -0,0 +1,39 @@ +package services + +import ( + "context" + "testing" + + "github.com/google/uuid" + "github.com/hay-kot/git-web-template/backend/internal/types" + "github.com/stretchr/testify/assert" +) + +func Test_SetAuthContext(t *testing.T) { + user := &types.UserOut{ + ID: uuid.New(), + } + + token := uuid.New().String() + + ctx := SetUserCtx(context.Background(), user, token) + + ctxUser := UseUserCtx(ctx) + + assert.NotNil(t, ctxUser) + assert.Equal(t, user.ID, ctxUser.ID) + + ctxUserToken := UseTokenCtx(ctx) + assert.NotEmpty(t, ctxUserToken) +} + +func Test_SetAuthContext_Nulls(t *testing.T) { + ctx := SetUserCtx(context.Background(), nil, "") + + ctxUser := UseUserCtx(ctx) + + assert.Nil(t, ctxUser) + + ctxUserToken := UseTokenCtx(ctx) + assert.Empty(t, ctxUserToken) +} diff --git a/backend/internal/services/service_admin.go b/backend/internal/services/service_admin.go new file mode 100644 index 0000000..3d60c18 --- /dev/null +++ b/backend/internal/services/service_admin.go @@ -0,0 +1,47 @@ +package services + +import ( + "context" + + "github.com/google/uuid" + "github.com/hay-kot/git-web-template/backend/internal/repo" + "github.com/hay-kot/git-web-template/backend/internal/types" +) + +type AdminService struct { + repos *repo.AllRepos +} + +func (svc *AdminService) Create(ctx context.Context, usr types.UserCreate) (types.UserOut, error) { + return svc.repos.Users.Create(ctx, usr) +} + +func (svc *AdminService) GetAll(ctx context.Context) ([]types.UserOut, error) { + return svc.repos.Users.GetAll(ctx) +} + +func (svc *AdminService) GetByID(ctx context.Context, id uuid.UUID) (types.UserOut, error) { + return svc.repos.Users.GetOneId(ctx, id) +} + +func (svc *AdminService) GetByEmail(ctx context.Context, email string) (types.UserOut, error) { + return svc.repos.Users.GetOneEmail(ctx, email) +} + +func (svc *AdminService) UpdateProperties(ctx context.Context, ID uuid.UUID, data types.UserUpdate) (types.UserOut, error) { + err := svc.repos.Users.Update(ctx, ID, data) + + if err != nil { + return types.UserOut{}, err + } + + return svc.repos.Users.GetOneId(ctx, ID) +} + +func (svc *AdminService) Delete(ctx context.Context, id uuid.UUID) error { + return svc.repos.Users.Delete(ctx, id) +} + +func (svc *AdminService) DeleteAll(ctx context.Context) error { + return svc.repos.Users.DeleteAll(ctx) +} diff --git a/backend/internal/services/service_user.go b/backend/internal/services/service_user.go new file mode 100644 index 0000000..4dfbc74 --- /dev/null +++ b/backend/internal/services/service_user.go @@ -0,0 +1,84 @@ +package services + +import ( + "context" + "errors" + "time" + + "github.com/google/uuid" + "github.com/hay-kot/git-web-template/backend/internal/repo" + "github.com/hay-kot/git-web-template/backend/internal/types" + "github.com/hay-kot/git-web-template/backend/pkgs/hasher" +) + +var ( + oneWeek = time.Hour * 24 * 7 + ErrorInvalidLogin = errors.New("invalid username or password") + ErrorInvalidToken = errors.New("invalid token") + ErrorTokenIdMismatch = errors.New("token id mismatch") +) + +type UserService struct { + repos *repo.AllRepos +} + +// GetSelf returns the user that is currently logged in based of the token provided within +func (svc *UserService) GetSelf(ctx context.Context, requestToken string) (types.UserOut, error) { + hash := hasher.HashToken(requestToken) + return svc.repos.AuthTokens.GetUserFromToken(ctx, hash) +} + +func (svc *UserService) UpdateSelf(ctx context.Context, ID uuid.UUID, data types.UserUpdate) (types.UserOut, error) { + err := svc.repos.Users.Update(ctx, ID, data) + + if err != nil { + return types.UserOut{}, err + } + + return svc.repos.Users.GetOneId(ctx, ID) +} + +// ============================================================================ +// User Authentication + +func (svc *UserService) createToken(ctx context.Context, userId uuid.UUID) (types.UserAuthTokenDetail, error) { + newToken := hasher.GenerateToken() + + created, err := svc.repos.AuthTokens.CreateToken(ctx, types.UserAuthTokenCreate{ + UserID: userId, + TokenHash: newToken.Hash, + ExpiresAt: time.Now().Add(oneWeek), + }) + + return types.UserAuthTokenDetail{Raw: newToken.Raw, ExpiresAt: created.ExpiresAt}, err +} + +func (svc *UserService) Login(ctx context.Context, username, password string) (types.UserAuthTokenDetail, error) { + usr, err := svc.repos.Users.GetOneEmail(ctx, username) + + if err != nil || !hasher.CheckPasswordHash(password, usr.Password) { + return types.UserAuthTokenDetail{}, ErrorInvalidLogin + } + + return svc.createToken(ctx, usr.ID) +} + +func (svc *UserService) Logout(ctx context.Context, token string) error { + hash := hasher.HashToken(token) + err := svc.repos.AuthTokens.DeleteToken(ctx, hash) + return err +} + +func (svc *UserService) RenewToken(ctx context.Context, token string) (types.UserAuthTokenDetail, error) { + hash := hasher.HashToken(token) + + dbToken, err := svc.repos.AuthTokens.GetUserFromToken(ctx, hash) + + if err != nil { + return types.UserAuthTokenDetail{}, ErrorInvalidToken + } + + newToken, _ := svc.createToken(ctx, dbToken.ID) + + return newToken, nil +} diff --git a/backend/internal/types/about_types.go b/backend/internal/types/about_types.go new file mode 100644 index 0000000..7db4bd9 --- /dev/null +++ b/backend/internal/types/about_types.go @@ -0,0 +1,11 @@ +package types + +// ApiSummary +// +// @public +type ApiSummary struct { + Healthy bool `json:"health"` + Versions []string `json:"versions"` + Title string `json:"title"` + Message string `json:"message"` +} diff --git a/backend/internal/types/token_types.go b/backend/internal/types/token_types.go new file mode 100644 index 0000000..56b0b49 --- /dev/null +++ b/backend/internal/types/token_types.go @@ -0,0 +1,39 @@ +package types + +import ( + "time" + + "github.com/google/uuid" +) + +type LoginForm struct { + Username string `json:"username"` + Password string `json:"password"` +} + +type TokenResponse struct { + BearerToken string `json:"token"` + ExpiresAt time.Time `json:"expiresAt"` +} + +type UserAuthTokenDetail struct { + Raw string `json:"raw"` + ExpiresAt time.Time `json:"expiresAt"` +} + +type UserAuthToken struct { + TokenHash []byte `json:"token"` + UserID uuid.UUID `json:"userId"` + ExpiresAt time.Time `json:"expiresAt"` + CreatedAt time.Time `json:"createdAt"` +} + +func (u UserAuthToken) IsExpired() bool { + return u.ExpiresAt.Before(time.Now()) +} + +type UserAuthTokenCreate struct { + TokenHash []byte `json:"token"` + UserID uuid.UUID `json:"userId"` + ExpiresAt time.Time `json:"expiresAt"` +} diff --git a/backend/internal/types/users_types.go b/backend/internal/types/users_types.go new file mode 100644 index 0000000..db1c404 --- /dev/null +++ b/backend/internal/types/users_types.go @@ -0,0 +1,58 @@ +package types + +import ( + "errors" + + "github.com/google/uuid" +) + +var ( + ErrNameEmpty = errors.New("name is empty") + ErrEmailEmpty = errors.New("email is empty") +) + +// UserIn is a basic user input struct containing only the fields that are +// required for user creation. +type UserIn struct { + Name string `json:"name"` + Email string `json:"email"` + Password string `json:"password"` +} + +// UserCreate is the Data object contain the requirements of creating a user +// in the database. It should to create users from an API unless the user has +// rights to create SuperUsers. For regular user in data use the UserIn struct. +type UserCreate struct { + Name string `json:"name"` + Email string `json:"email"` + Password string `json:"password"` + IsSuperuser bool `json:"isSuperuser"` +} + +func (u *UserCreate) Validate() error { + if u.Name == "" { + return ErrNameEmpty + } + if u.Email == "" { + return ErrEmailEmpty + } + return nil +} + +type UserOut struct { + ID uuid.UUID `json:"id"` + Name string `json:"name"` + Email string `json:"email"` + Password string `json:"-"` + IsSuperuser bool `json:"isSuperuser"` +} + +// IsNull is a proxy call for `usr.Id == uuid.Nil` +func (usr *UserOut) IsNull() bool { + return usr.ID == uuid.Nil +} + +type UserUpdate struct { + Name *string `json:"name"` + Email *string `json:"email"` +} diff --git a/backend/internal/types/users_types_test.go b/backend/internal/types/users_types_test.go new file mode 100644 index 0000000..bc3b825 --- /dev/null +++ b/backend/internal/types/users_types_test.go @@ -0,0 +1,76 @@ +package types + +import ( + "testing" + + "github.com/google/uuid" + "github.com/stretchr/testify/assert" +) + +func TestUserCreate_Validate(t *testing.T) { + type fields struct { + Name string + Email string + Password string + IsSuperuser bool + } + tests := []struct { + name string + fields fields + wantErr bool + }{ + { + name: "no_name", + fields: fields{ + Name: "", + Email: "", + Password: "", + IsSuperuser: false, + }, + wantErr: true, + }, + { + name: "no_email", + fields: fields{ + Name: "test", + Email: "", + Password: "", + IsSuperuser: false, + }, + wantErr: true, + }, + { + name: "valid", + fields: fields{ + Name: "test", + Email: "test@email.com", + Password: "mypassword", + IsSuperuser: false, + }, + wantErr: false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + u := &UserCreate{ + Name: tt.fields.Name, + Email: tt.fields.Email, + Password: tt.fields.Password, + IsSuperuser: tt.fields.IsSuperuser, + } + if err := u.Validate(); (err != nil) != tt.wantErr { + t.Errorf("UserCreate.Validate() error = %v, wantErr %v", err, tt.wantErr) + } + }) + } +} + +func TestUserOut_IsNull(t *testing.T) { + nullUser := UserOut{} + + assert.True(t, nullUser.IsNull()) + + nullUser.ID = uuid.New() + + assert.False(t, nullUser.IsNull()) +} diff --git a/backend/pkgs/automapper/README.md b/backend/pkgs/automapper/README.md new file mode 100644 index 0000000..fc9b01b --- /dev/null +++ b/backend/pkgs/automapper/README.md @@ -0,0 +1,56 @@ +# Automapper + + +Automapper is an opinionated Go library that provides a dead simple interface to mapping 1-1 models To/From a database Model to a DTO or Schema using value semantics. It does not rely on code comments, but instead uses standard Go code to define your mapping and configuration to make it easy to use an refactor. + +Current Limitation +- flat/single level models +- single schema to model per config entry +- limited configuration (support lowercase, camelcase, snakecase, etc) + + +Future Considerations +- [ ] Recursive mapping of embed structs +- [ ] Optional generate time type checker. +- [ ] Ensure values are copied to the destination and not just a reference +- [ ] ?!?!? + + +## Example Configuration + +```go +package main + +import ( + "github.com/mealie-recipes/mealie-analytics/ent" + "github.com/mealie-recipes/mealie-analytics/internal/types" + "github.com/mealie-recipes/mealie-analytics/pkgs/automapper" +) + +// getMappers serialized the config file into a list of automapper struct +func getMappers() []automapper.AutoMapper { + return []automapper.AutoMapper{ + { + Package: "mapper", // generated package name + Prefix: "analytics", // generating file prefix -> analytics_automapper.go + Name: "Mealie Analytics", // For console output + Schema: automapper.Schema{ + Type: types.Analytics{}, + Prefix: "types", // Package namespace + }, + Model: automapper.Model{ + Type: ent.Analytics{}, + Prefix: "ent", // Package namespace + }, + Imports: []string{}, // Specify additional imports here + }, + } +} + +func main() { + automappers := getMappers() + conf := automapper.DefaultConf() + + automapper.Generate(automappers, conf) +} +``` \ No newline at end of file diff --git a/backend/pkgs/automapper/automapper.go b/backend/pkgs/automapper/automapper.go new file mode 100644 index 0000000..f620c60 --- /dev/null +++ b/backend/pkgs/automapper/automapper.go @@ -0,0 +1,92 @@ +package automapper + +import ( + "bytes" + "fmt" + "go/format" + "os" + "reflect" + "strings" + "text/template" +) + +type FieldAssignment struct { + ModelField string + SchemaField string +} + +type Model struct { + Type interface{} + Prefix string + Fields []reflect.StructField + Reference string +} + +type Schema struct { + Name string + Type interface{} + Prefix string + Fields []reflect.StructField + Reference string +} + +type AutoMapper struct { + Name string + Package string + Prefix string + Schema Schema + Model Model + Imports []string + FieldAssignments []FieldAssignment +} + +func (mapper *AutoMapper) ExecuteTemplates(conf *AutoMapperConf) { + t := template.New("automapper") + t, err := t.Parse(automapperTemplate) + if err != nil { + fmt.Println(err) + } + + // Ensure the output directory exists + os.MkdirAll(conf.OutDir, 0755) + + var path = fmt.Sprintf("%s/%s", conf.OutDir, mapper.GetFileName()) + + f, err := os.Create(path) + if err != nil { + panic(err) + } + defer f.Close() + + var buf bytes.Buffer + + err = t.Execute(&buf, mapper) + if err != nil { + fmt.Println(err) + } + + text, err := format.Source(buf.Bytes()) + + if err != nil { + fmt.Println(err) + } + + f.Write(text) + +} + +// GetFileName returns the computed file name based off user preference. +// If the Prefix has been specified on the AutoMapper it will be used +// in place of the Struct name. If the Prefix is not specified, the +// Struct name will be used. +// +// Examples: +// prefix_automapper.go +// mystructname_automapper.go +func (mapper *AutoMapper) GetFileName() string { + if mapper.Prefix == "" { + return strings.ToLower(mapper.Schema.Reference) + "_" + "automapper.go" + } + return strings.ToLower(mapper.Prefix) + "_" + "automapper.go" + +} diff --git a/backend/pkgs/automapper/conf.go b/backend/pkgs/automapper/conf.go new file mode 100644 index 0000000..96f090a --- /dev/null +++ b/backend/pkgs/automapper/conf.go @@ -0,0 +1,11 @@ +package automapper + +type AutoMapperConf struct { + OutDir string +} + +func DefaultConf() *AutoMapperConf { + return &AutoMapperConf{ + OutDir: "internal/mapper", + } +} diff --git a/backend/pkgs/automapper/main.go b/backend/pkgs/automapper/main.go new file mode 100644 index 0000000..e3eba73 --- /dev/null +++ b/backend/pkgs/automapper/main.go @@ -0,0 +1,48 @@ +package automapper + +import ( + "fmt" + "reflect" + "strings" +) + +func Generate(automappers []AutoMapper, conf *AutoMapperConf) { + for _, mapper := range automappers { + modelType := reflect.TypeOf(mapper.Model.Type) + transferObjectType := reflect.TypeOf(mapper.Schema.Type) + + fmt.Printf("%s: %s -> %s\n", mapper.Name, modelType.Name(), transferObjectType.Name()) + + // From Fields + mapper.Imports = append(mapper.Imports, modelType.PkgPath()) + mapper.Model.Reference = modelType.Name() + mapper.Model.Fields = make([]reflect.StructField, 0) + for i := 0; i < modelType.NumField(); i++ { + mapper.Model.Fields = append(mapper.Model.Fields, modelType.Field(i)) + } + + // To Fields + mapper.Imports = append(mapper.Imports, transferObjectType.PkgPath()) + mapper.Schema.Reference = transferObjectType.Name() + mapper.Schema.Fields = make([]reflect.StructField, 0) + for i := 0; i < transferObjectType.NumField(); i++ { + mapper.Schema.Fields = append(mapper.Schema.Fields, transferObjectType.Field(i)) + } + + // Determine Field Assignments by matching the To fields and From fields by name + mapper.FieldAssignments = make([]FieldAssignment, 0) + + for _, toField := range mapper.Schema.Fields { + for _, fromField := range mapper.Model.Fields { + if strings.EqualFold(toField.Name, fromField.Name) { + mapper.FieldAssignments = append(mapper.FieldAssignments, FieldAssignment{ + ModelField: fromField.Name, + SchemaField: toField.Name, + }) + } + } + } + + mapper.ExecuteTemplates(conf) + } +} diff --git a/backend/pkgs/automapper/templates.go b/backend/pkgs/automapper/templates.go new file mode 100644 index 0000000..b001f61 --- /dev/null +++ b/backend/pkgs/automapper/templates.go @@ -0,0 +1,22 @@ +package automapper + +var automapperTemplate = `// Code generated by "/pkgs/automapper"; DO NOT EDIT. +package {{ .Package }} + +import ( + {{ range $import := .Imports }}"{{ $import }}" + {{ end }} +) + +func {{ .Schema.Reference }}FromModel(from {{ .Model.Prefix}}.{{ .Model.Reference }}) {{ .Schema.Prefix}}.{{ .Schema.Reference }} { + return {{ .Schema.Prefix}}.{{ .Schema.Reference }}{ {{ range $i, $f := .FieldAssignments }} + {{ $f.SchemaField }}: from.{{ $f.ModelField }},{{ end }} + } +} + +func {{ .Schema.Reference }}ToModel(from {{ .Schema.Prefix}}.{{ .Schema.Reference }}) {{ .Model.Prefix}}.{{ .Model.Reference }} { + return {{ .Model.Prefix}}.{{ .Model.Reference }}{ {{ range $i, $f := .FieldAssignments }} + {{ $f.ModelField }}: from.{{ $f.SchemaField }},{{ end }} + } +} +` diff --git a/backend/pkgs/faker/random.go b/backend/pkgs/faker/random.go new file mode 100644 index 0000000..42ef538 --- /dev/null +++ b/backend/pkgs/faker/random.go @@ -0,0 +1,37 @@ +package faker + +import ( + "math/rand" + "time" +) + +var letters = []rune("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ") + +type Faker struct { +} + +func NewFaker() *Faker { + rand.Seed(time.Now().UnixNano()) + return &Faker{} +} + +func (f *Faker) RandomString(length int) string { + + b := make([]rune, length) + for i := range b { + b[i] = letters[rand.Intn(len(letters))] + } + return string(b) +} + +func (f *Faker) RandomEmail() string { + return f.RandomString(10) + "@email.com" +} + +func (f *Faker) RandomBool() bool { + return rand.Intn(2) == 1 +} + +func (f *Faker) RandomNumber(min, max int) int { + return rand.Intn(max-min) + min +} diff --git a/backend/pkgs/faker/randoms_test.go b/backend/pkgs/faker/randoms_test.go new file mode 100644 index 0000000..79747c2 --- /dev/null +++ b/backend/pkgs/faker/randoms_test.go @@ -0,0 +1,95 @@ +package faker + +import ( + "testing" +) + +const Loops = 500 + +func ValidateUnique(values []string) bool { + for i := 0; i < len(values); i++ { + for j := i + 1; j < len(values); j++ { + if values[i] == values[j] { + return false + } + } + } + return true +} + +func Test_GetRandomString(t *testing.T) { + t.Parallel() + // Test that the function returns a string of the correct length + var generated = make([]string, Loops) + + faker := NewFaker() + + for i := 0; i < Loops; i++ { + generated[i] = faker.RandomString(10) + } + + if !ValidateUnique(generated) { + t.Error("Generated values are not unique") + } +} + +func Test_GetRandomEmail(t *testing.T) { + t.Parallel() + // Test that the function returns a string of the correct length + var generated = make([]string, Loops) + + faker := NewFaker() + + for i := 0; i < Loops; i++ { + generated[i] = faker.RandomEmail() + } + + if !ValidateUnique(generated) { + t.Error("Generated values are not unique") + } +} + +func Test_GetRandomBool(t *testing.T) { + t.Parallel() + + var trues = 0 + var falses = 0 + + faker := NewFaker() + + for i := 0; i < Loops; i++ { + if faker.RandomBool() { + trues++ + } else { + falses++ + } + } + + if trues == 0 || falses == 0 { + t.Error("Generated boolean don't appear random") + } +} + +func Test_RandomNumber(t *testing.T) { + t.Parallel() + + f := NewFaker() + + const MIN = 0 + const MAX = 100 + + last := MIN - 1 + + for i := 0; i < Loops; i++ { + n := f.RandomNumber(MIN, MAX) + + if n == last { + t.Errorf("RandomNumber() failed to generate unique number") + } + + if n < MIN || n > MAX { + t.Errorf("RandomNumber() failed to generate a number between %v and %v", MIN, MAX) + } + } + +} diff --git a/backend/pkgs/hasher/password.go b/backend/pkgs/hasher/password.go new file mode 100644 index 0000000..f7cca4d --- /dev/null +++ b/backend/pkgs/hasher/password.go @@ -0,0 +1,13 @@ +package hasher + +import "golang.org/x/crypto/bcrypt" + +func HashPassword(password string) (string, error) { + bytes, err := bcrypt.GenerateFromPassword([]byte(password), 14) + return string(bytes), err +} + +func CheckPasswordHash(password, hash string) bool { + err := bcrypt.CompareHashAndPassword([]byte(hash), []byte(password)) + return err == nil +} diff --git a/backend/pkgs/hasher/password_test.go b/backend/pkgs/hasher/password_test.go new file mode 100644 index 0000000..6f9128e --- /dev/null +++ b/backend/pkgs/hasher/password_test.go @@ -0,0 +1,40 @@ +package hasher + +import "testing" + +func TestHashPassword(t *testing.T) { + t.Parallel() + type args struct { + password string + } + tests := []struct { + name string + args args + wantErr bool + }{ + { + name: "letters_and_numbers", + args: args{ + password: "password123456788", + }, + }, + { + name: "letters_number_and_special", + args: args{ + password: "!2afj3214pofajip3142j;fa", + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := HashPassword(tt.args.password) + if (err != nil) != tt.wantErr { + t.Errorf("HashPassword() error = %v, wantErr %v", err, tt.wantErr) + return + } + if !CheckPasswordHash(tt.args.password, got) { + t.Errorf("CheckPasswordHash() failed to validate password=%v against hash=%v", tt.args.password, got) + } + }) + } +} diff --git a/backend/pkgs/hasher/token.go b/backend/pkgs/hasher/token.go new file mode 100644 index 0000000..117eeca --- /dev/null +++ b/backend/pkgs/hasher/token.go @@ -0,0 +1,30 @@ +package hasher + +import ( + "crypto/rand" + "crypto/sha256" + "encoding/base32" +) + +type Token struct { + Raw string + Hash []byte +} + +func GenerateToken() Token { + randomBytes := make([]byte, 16) + rand.Read(randomBytes) + + plainText := base32.StdEncoding.WithPadding(base32.NoPadding).EncodeToString(randomBytes) + hash := HashToken(plainText) + + return Token{ + Raw: plainText, + Hash: hash, + } +} + +func HashToken(plainTextToken string) []byte { + hash := sha256.Sum256([]byte(plainTextToken)) + return hash[:] +} diff --git a/backend/pkgs/hasher/token_test.go b/backend/pkgs/hasher/token_test.go new file mode 100644 index 0000000..d61fddf --- /dev/null +++ b/backend/pkgs/hasher/token_test.go @@ -0,0 +1,44 @@ +package hasher + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +const ITERATIONS = 200 + +func Test_NewToken(t *testing.T) { + t.Parallel() + tokens := make([]Token, ITERATIONS) + for i := 0; i < ITERATIONS; i++ { + tokens[i] = GenerateToken() + } + + // Check if they are unique + for i := 0; i < 5; i++ { + for j := i + 1; j < 5; j++ { + if tokens[i].Raw == tokens[j].Raw { + t.Errorf("NewToken() failed to generate unique tokens") + } + } + } +} + +func Test_HashToken_CheckTokenHash(t *testing.T) { + t.Parallel() + for i := 0; i < ITERATIONS; i++ { + token := GenerateToken() + + // Check raw text is reltively random + for j := 0; j < 5; j++ { + assert.NotEqual(t, token.Raw, GenerateToken().Raw) + } + + // Check token length is less than 32 characters + assert.Less(t, len(token.Raw), 32) + + // Check hash is the same + assert.Equal(t, token.Hash, HashToken(token.Raw)) + } +} diff --git a/backend/pkgs/logger/struct_logger.go b/backend/pkgs/logger/struct_logger.go new file mode 100644 index 0000000..2007ebb --- /dev/null +++ b/backend/pkgs/logger/struct_logger.go @@ -0,0 +1,121 @@ +package logger + +import ( + "encoding/json" + "io" + "os" + "runtime/debug" + "sync" + "time" +) + +type Level int8 + +const ( + LevelDebug Level = iota + LevelInfo + LevelError + LevelFatal + LevelOff +) + +func (l Level) String() string { + switch l { + case LevelDebug: + return "DEBUG" + case LevelInfo: + return "INFO" + case LevelError: + return "ERROR" + case LevelFatal: + return "FATAL" + default: + return "" + } +} + +type Props map[string]string + +type Logger struct { + out io.Writer + minLevel Level + mu sync.Mutex +} + +func New(out io.Writer, minLevel Level) *Logger { + return &Logger{ + out: out, + minLevel: minLevel, + } +} + +func (l *Logger) Debug(message string, properties map[string]string) { + l.print(LevelDebug, message, properties) +} + +func (l *Logger) Info(message string, properties map[string]string) { + l.print(LevelInfo, message, properties) +} + +func (l *Logger) Error(err error, properties map[string]string) { + l.print(LevelError, err.Error(), properties) +} + +func (l *Logger) Fatal(err error, properties map[string]string) { + l.print(LevelFatal, err.Error(), properties) + os.Exit(1) // For entries at the FATAL level, we also terminate the application. +} + +func (l *Logger) print(level Level, message string, properties map[string]string) (int, error) { + // If the severity level of the log entry is below the minimum severity for the + // logger, then return with no further action. + if level < l.minLevel { + return 0, nil + } + + // Declare an anonymous struct holding the data for the log entry. + aux := struct { + Level string `json:"level"` + Time string `json:"time"` + Message string `json:"message"` + Properties map[string]string `json:"properties,omitempty"` + Trace string `json:"trace,omitempty"` + }{ + Level: level.String(), + Time: time.Now().UTC().Format(time.RFC3339), + Message: message, + Properties: properties, + } + + // Include a stack trace for entries at the ERROR and FATAL levels. + if level >= LevelError { + aux.Trace = string(debug.Stack()) + } + + // Declare a line variable for holding the actual log entry text. + var line []byte + + // Marshal the anonymous struct to JSON and store it in the line variable. If there + // was a problem creating the JSON, set the contents of the log entry to be that + // plain-text error message instead.” + line, err := json.Marshal(aux) + if err != nil { + line = []byte(LevelError.String() + ": unable to marshal log message:" + err.Error()) + } + + // Lock the mutex so that no two writes to the output destination cannot happen + // concurrently. If we don't do this, it's possible that the text for two or more + // log entries will be intermingled in the output. + l.mu.Lock() + defer l.mu.Unlock() + + // Write the log entry followed by a newline. + return l.out.Write(append(line, '\n')) +} + +// We also implement a Write() method on our Logger type so that it satisfies the +// io.Writer interface. This writes a log entry at the ERROR level with no additional +// properties. +func (l *Logger) Write(message []byte) (n int, err error) { + return l.print(LevelError, string(message), nil) +} diff --git a/backend/pkgs/logger/struct_logger_test.go b/backend/pkgs/logger/struct_logger_test.go new file mode 100644 index 0000000..9b8526d --- /dev/null +++ b/backend/pkgs/logger/struct_logger_test.go @@ -0,0 +1,119 @@ +package logger + +import ( + "encoding/json" + "errors" + "testing" + + "github.com/stretchr/testify/assert" +) + +var lastWrite = []byte{} + +type testLogRecorder struct { + t *testing.T +} + +func (tlr testLogRecorder) Write(p []byte) (n int, err error) { + lastWrite = p + return len(p), nil +} + +type logEntry struct { + Level string `json:"level"` + Message string `json:"message"` + Props *Props `json:"properties"` +} + +func (lr *logEntry) Unmarshal(t *testing.T, jbytes []byte) { + err := json.Unmarshal(jbytes, lr) + if err != nil { + t.Error(err) + } +} + +func Test_LevelString(t *testing.T) { + assert.Equal(t, "DEBUG", LevelDebug.String()) + assert.Equal(t, "INFO", LevelInfo.String()) + assert.Equal(t, "ERROR", LevelError.String()) + assert.Equal(t, "FATAL", LevelFatal.String()) + assert.Equal(t, "", LevelOff.String()) +} + +func Test_NewLogger(t *testing.T) { + logRecorder := testLogRecorder{t: t} + + logger := New(logRecorder, LevelInfo) + assert.NotNil(t, logger) +} + +func getTestLogger(t *testing.T, level Level) *Logger { + logRecorder := testLogRecorder{t: t} + + logger := New(logRecorder, level) + assert.NotNil(t, logger) + + return logger +} + +func checkLastEntry(t *testing.T, level Level, message string, props *Props) { + entry := &logEntry{} + entry.Unmarshal(t, lastWrite) + + assert.Equal(t, level.String(), entry.Level) + assert.Equal(t, message, entry.Message) + assert.Equal(t, props, entry.Props) + +} + +func Test_LoggerDebug(t *testing.T) { + lgr := getTestLogger(t, LevelDebug) + + lgr.Debug("Test Debug", Props{"Hello": "World"}) + checkLastEntry(t, LevelDebug, "Test Debug", &Props{"Hello": "World"}) + + lastWrite = []byte{} +} + +func Test_LoggerInfo(t *testing.T) { + lgr := getTestLogger(t, LevelInfo) + + lgr.Info("Test Info", Props{"Hello": "World"}) + checkLastEntry(t, LevelInfo, "Test Info", &Props{"Hello": "World"}) + lastWrite = []byte{} + +} + +func Test_LoggerError(t *testing.T) { + lgr := getTestLogger(t, LevelError) + + myerror := errors.New("Test Error") + + lgr.Error(myerror, Props{"Hello": "World"}) + checkLastEntry(t, LevelError, "Test Error", &Props{"Hello": "World"}) + lastWrite = []byte{} + +} + +func Test_LoggerLevelScale(t *testing.T) { + lgr := getTestLogger(t, LevelInfo) + lastWrite = []byte{} + lgr.Debug("Test Debug", Props{"Hello": "World"}) + + assert.Equal(t, []byte{}, lastWrite) + + lgr = getTestLogger(t, LevelError) + lastWrite = []byte{} + lgr.Info("Test Debug", Props{"Hello": "World"}) + lgr.Debug("Test Debug", Props{"Hello": "World"}) + + assert.Equal(t, []byte{}, lastWrite) + + lgr = getTestLogger(t, LevelFatal) + + lgr.Info("Test Debug", Props{"Hello": "World"}) + lgr.Debug("Test Debug", Props{"Hello": "World"}) + lgr.Error(errors.New("Test Error"), Props{"Hello": "World"}) + + assert.Equal(t, []byte{}, lastWrite) +} diff --git a/backend/pkgs/mailer/mailer.go b/backend/pkgs/mailer/mailer.go new file mode 100644 index 0000000..22609aa --- /dev/null +++ b/backend/pkgs/mailer/mailer.go @@ -0,0 +1,51 @@ +package mailer + +import ( + "encoding/base64" + "fmt" + "mime" + "net/smtp" + "strconv" +) + +type Mailer struct { + Host string `json:"host,omitempty"` + Port int `json:"port,omitempty"` + Username string `json:"username,omitempty"` + Password string `json:"password,omitempty"` + From string `json:"from,omitempty"` +} + +func (m *Mailer) Ready() bool { + return m.Host != "" && m.Port != 0 && m.Username != "" && m.Password != "" && m.From != "" +} + +func (m *Mailer) server() string { + return m.Host + ":" + strconv.Itoa(m.Port) +} + +func (m *Mailer) Send(msg *Message) error { + server := m.server() + + header := make(map[string]string) + header["From"] = msg.From.String() + header["To"] = msg.To.String() + header["Subject"] = mime.QEncoding.Encode("UTF-8", msg.Subject) + header["MIME-Version"] = "1.0" + header["Content-Type"] = "text/html; charset=\"utf-8\"" + header["Content-Transfer-Encoding"] = "base64" + + message := "" + for k, v := range header { + message += fmt.Sprintf("%s: %s\r\n", k, v) + } + message += "\r\n" + base64.StdEncoding.EncodeToString([]byte(msg.Body)) + + return smtp.SendMail( + server, + smtp.PlainAuth("", m.Username, m.Password, m.Host), + m.From, + []string{msg.To.Address}, + []byte(message), + ) +} diff --git a/backend/pkgs/mailer/mailer_test.go b/backend/pkgs/mailer/mailer_test.go new file mode 100644 index 0000000..f1b71db --- /dev/null +++ b/backend/pkgs/mailer/mailer_test.go @@ -0,0 +1,66 @@ +package mailer + +import ( + "encoding/json" + "io/ioutil" + "testing" + + "github.com/stretchr/testify/assert" +) + +const ( + TestMailerConfig = "test-mailer.json" +) + +func GetTestMailer() (*Mailer, error) { + // Read JSON File + bytes, err := ioutil.ReadFile(TestMailerConfig) + + mailer := &Mailer{} + + if err != nil { + return nil, err + } + + // Unmarshal JSON + err = json.Unmarshal(bytes, mailer) + + if err != nil { + return nil, err + } + + return mailer, nil + +} + +func Test_Mailer(t *testing.T) { + t.Parallel() + + mailer, err := GetTestMailer() + + if err != nil { + t.Skip("Error Reading Test Mailer Config - Skipping") + } + + if !mailer.Ready() { + t.Skip("Mailer not ready - Skipping") + } + + message, err := RenderWelcome() + + if err != nil { + t.Error(err) + } + + mb := NewMessageBuilder(). + SetBody(message). + SetSubject("Hello"). + SetTo("John Doe", "john@doe.com"). + SetFrom("Jane Doe", "jane@doe.com") + + msg := mb.Build() + + err = mailer.Send(msg) + + assert.Nil(t, err) +} diff --git a/backend/pkgs/mailer/message.go b/backend/pkgs/mailer/message.go new file mode 100644 index 0000000..e0552b3 --- /dev/null +++ b/backend/pkgs/mailer/message.go @@ -0,0 +1,56 @@ +package mailer + +import "net/mail" + +type Message struct { + Subject string + To mail.Address + From mail.Address + Body string +} + +type MessageBuilder struct { + subject string + to mail.Address + from mail.Address + body string +} + +func NewMessageBuilder() *MessageBuilder { + return &MessageBuilder{} +} + +func (mb *MessageBuilder) Build() *Message { + return &Message{ + Subject: mb.subject, + To: mb.to, + From: mb.from, + Body: mb.body, + } +} + +func (mb *MessageBuilder) SetSubject(subject string) *MessageBuilder { + mb.subject = subject + return mb +} + +func (mb *MessageBuilder) SetTo(name, to string) *MessageBuilder { + mb.to = mail.Address{ + Name: name, + Address: to, + } + return mb +} + +func (mb *MessageBuilder) SetFrom(name, from string) *MessageBuilder { + mb.from = mail.Address{ + Name: name, + Address: from, + } + return mb +} + +func (mb *MessageBuilder) SetBody(body string) *MessageBuilder { + mb.body = body + return mb +} diff --git a/backend/pkgs/mailer/message_test.go b/backend/pkgs/mailer/message_test.go new file mode 100644 index 0000000..ca465b7 --- /dev/null +++ b/backend/pkgs/mailer/message_test.go @@ -0,0 +1,26 @@ +package mailer + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_MessageBuilder(t *testing.T) { + t.Parallel() + + mb := NewMessageBuilder(). + SetBody("Hello World!"). + SetSubject("Hello"). + SetTo("John Doe", "john@doe.com"). + SetFrom("Jane Doe", "jane@doe.com") + + msg := mb.Build() + + assert.Equal(t, "Hello", msg.Subject) + assert.Equal(t, "Hello World!", msg.Body) + assert.Equal(t, "John Doe", msg.To.Name) + assert.Equal(t, "john@doe.com", msg.To.Address) + assert.Equal(t, "Jane Doe", msg.From.Name) + assert.Equal(t, "jane@doe.com", msg.From.Address) +} diff --git a/backend/pkgs/mailer/templates.go b/backend/pkgs/mailer/templates.go new file mode 100644 index 0000000..b7984c0 --- /dev/null +++ b/backend/pkgs/mailer/templates.go @@ -0,0 +1,62 @@ +package mailer + +import ( + "bytes" + _ "embed" + "html/template" +) + +//go:embed templates/welcome.html +var templatesWelcome string + +type TemplateDefaults struct { + CompanyName string + CompanyAddress string + CompanyURL string + ActivateAccountURL string + UnsubscribeURL string +} + +type TemplateProps struct { + Defaults TemplateDefaults + Data map[string]string +} + +func (tp *TemplateProps) Set(key, value string) { + tp.Data[key] = value +} + +func DefaultTemplateData() TemplateProps { + return TemplateProps{ + Defaults: TemplateDefaults{ + CompanyName: "Haybytes.com", + CompanyAddress: "123 Main St, Anytown, CA 12345", + CompanyURL: "https://haybytes.com", + ActivateAccountURL: "https://google.com", + UnsubscribeURL: "https://google.com", + }, + Data: make(map[string]string), + } +} + +func render(tpl string, data TemplateProps) (string, error) { + tmpl, err := template.New("name").Parse(tpl) + + if err != nil { + return "", err + } + + var tplBuffer bytes.Buffer + + err = tmpl.Execute(&tplBuffer, data) + + if err != nil { + return "", err + } + + return tplBuffer.String(), nil +} + +func RenderWelcome() (string, error) { + return render(templatesWelcome, DefaultTemplateData()) +} diff --git a/backend/pkgs/mailer/templates/welcome.html b/backend/pkgs/mailer/templates/welcome.html new file mode 100644 index 0000000..ed8d9c8 --- /dev/null +++ b/backend/pkgs/mailer/templates/welcome.html @@ -0,0 +1,444 @@ + + + + + + Welcome! + + + + This is preheader text. Some clients will show this text as a + preview. + + + + + + + + + diff --git a/backend/pkgs/mailer/test-mailer-template.json b/backend/pkgs/mailer/test-mailer-template.json new file mode 100644 index 0000000..9ff353e --- /dev/null +++ b/backend/pkgs/mailer/test-mailer-template.json @@ -0,0 +1,7 @@ +{ + "host": "", + "port": 465, + "username": "", + "password": "", + "from": "" +} \ No newline at end of file diff --git a/backend/pkgs/server/constants.go b/backend/pkgs/server/constants.go new file mode 100644 index 0000000..1d07ef5 --- /dev/null +++ b/backend/pkgs/server/constants.go @@ -0,0 +1,7 @@ +package server + +const ( + ContentType = "Content-Type" + ContentJSON = "application/json" + ContentXML = "application/xml" +) diff --git a/backend/pkgs/server/request.go b/backend/pkgs/server/request.go new file mode 100644 index 0000000..c4b30a4 --- /dev/null +++ b/backend/pkgs/server/request.go @@ -0,0 +1,48 @@ +package server + +import ( + "encoding/json" + "net/http" +) + +// Decode reads the body of an HTTP request looking for a JSON document. The +// body is decoded into the provided value. +func Decode(r *http.Request, val interface{}) error { + decoder := json.NewDecoder(r.Body) + decoder.DisallowUnknownFields() + if err := decoder.Decode(val); err != nil { + return err + } + return nil +} + +// GetId is a shotcut to get the id from the request URL or return a default value +func GetParam(r *http.Request, key, d string) string { + val := r.URL.Query().Get(key) + + if val == "" { + return d + } + + return val +} + +// GetSkip is a shotcut to get the skip from the request URL parameters +func GetSkip(r *http.Request, d string) string { + return GetParam(r, "skip", d) +} + +// GetSkip is a shotcut to get the skip from the request URL parameters +func GetId(r *http.Request, d string) string { + return GetParam(r, "id", d) +} + +// GetLimit is a shotcut to get the limit from the request URL parameters +func GetLimit(r *http.Request, d string) string { + return GetParam(r, "limit", d) +} + +// GetQuery is a shotcut to get the sort from the request URL parameters +func GetQuery(r *http.Request, d string) string { + return GetParam(r, "query", d) +} diff --git a/backend/pkgs/server/request_test.go b/backend/pkgs/server/request_test.go new file mode 100644 index 0000000..05dc8c5 --- /dev/null +++ b/backend/pkgs/server/request_test.go @@ -0,0 +1,210 @@ +package server + +import ( + "net/http" + "net/http/httptest" + "strings" + "testing" +) + +type TestStruct struct { + Name string `json:"name"` + Data string `json:"data"` +} + +func TestDecode(t *testing.T) { + type args struct { + r *http.Request + val interface{} + } + tests := []struct { + name string + args args + wantErr bool + }{ + { + name: "check_error", + args: args{ + r: &http.Request{ + Body: http.NoBody, + }, + val: make(map[string]interface{}), + }, + wantErr: true, + }, + { + name: "check_success", + args: args{ + r: httptest.NewRequest("POST", "/", strings.NewReader(`{"name":"test","data":"test"}`)), + val: TestStruct{ + Name: "test", + Data: "test", + }, + }, + wantErr: false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if err := Decode(tt.args.r, &tt.args.val); (err != nil) != tt.wantErr { + t.Errorf("Decode() error = %v, wantErr %v", err, tt.wantErr) + } + }) + } +} + +func TestGetParam(t *testing.T) { + type args struct { + r *http.Request + key string + d string + } + tests := []struct { + name string + args args + want string + }{ + { + name: "check_default", + args: args{ + r: httptest.NewRequest("POST", "/", strings.NewReader(`{"name":"test","data":"test"}`)), + key: "id", + d: "default", + }, + want: "default", + }, + { + name: "check_id", + args: args{ + r: httptest.NewRequest("POST", "/item?id=123", strings.NewReader(`{"name":"test","data":"test"}`)), + key: "id", + d: "", + }, + want: "123", + }, + { + name: "check_query", + args: args{ + r: httptest.NewRequest("POST", "/item?query=hello-world", strings.NewReader(`{"name":"test","data":"test"}`)), + key: "query", + d: "", + }, + want: "hello-world", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := GetParam(tt.args.r, tt.args.key, tt.args.d); got != tt.want { + t.Errorf("GetParam() = %v, want %v", got, tt.want) + } + }) + } +} + +func TestGetSkip(t *testing.T) { + type args struct { + r *http.Request + d string + } + tests := []struct { + name string + args args + want string + }{ + { + name: "check_default", + args: args{ + r: httptest.NewRequest("POST", "/", strings.NewReader(`{"name":"test","data":"test"}`)), + d: "0", + }, + want: "0", + }, + { + name: "check_skip", + args: args{ + r: httptest.NewRequest("POST", "/item?skip=107", strings.NewReader(`{"name":"test","data":"test"}`)), + d: "0", + }, + want: "107", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := GetSkip(tt.args.r, tt.args.d); got != tt.want { + t.Errorf("GetSkip() = %v, want %v", got, tt.want) + } + }) + } +} + +func TestGetLimit(t *testing.T) { + type args struct { + r *http.Request + d string + } + tests := []struct { + name string + args args + want string + }{ + { + name: "check_default", + args: args{ + r: httptest.NewRequest("POST", "/", strings.NewReader(`{"name":"test","data":"test"}`)), + d: "0", + }, + want: "0", + }, + { + name: "check_limit", + args: args{ + r: httptest.NewRequest("POST", "/item?limit=107", strings.NewReader(`{"name":"test","data":"test"}`)), + d: "0", + }, + want: "107", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := GetLimit(tt.args.r, tt.args.d); got != tt.want { + t.Errorf("GetLimit() = %v, want %v", got, tt.want) + } + }) + } +} + +func TestGetQuery(t *testing.T) { + type args struct { + r *http.Request + d string + } + tests := []struct { + name string + args args + want string + }{ + { + name: "check_default", + args: args{ + r: httptest.NewRequest("POST", "/", strings.NewReader(`{"name":"test","data":"test"}`)), + d: "0", + }, + want: "0", + }, + { + name: "check_query", + args: args{ + r: httptest.NewRequest("POST", "/item?query=hello-query", strings.NewReader(`{"name":"test","data":"test"}`)), + d: "0", + }, + want: "hello-query", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := GetQuery(tt.args.r, tt.args.d); got != tt.want { + t.Errorf("GetQuery() = %v, want %v", got, tt.want) + } + }) + } +} diff --git a/backend/pkgs/server/response.go b/backend/pkgs/server/response.go new file mode 100644 index 0000000..d4d008f --- /dev/null +++ b/backend/pkgs/server/response.go @@ -0,0 +1,61 @@ +package server + +import ( + "encoding/json" + "errors" + "net/http" +) + +// Respond converts a Go value to JSON and sends it to the client. +// Adapted from https://github.com/ardanlabs/service/tree/master/foundation/web +func Respond(w http.ResponseWriter, statusCode int, data interface{}) error { + // If there is nothing to marshal then set status code and return. + if statusCode == http.StatusNoContent { + w.WriteHeader(statusCode) + return nil + } + + // Convert the response value to JSON. + jsonData, err := json.Marshal(data) + if err != nil { + return err + } + + // Set the content type and headers once we know marshaling has succeeded. + w.Header().Set("Content-Type", "application/json") + + // Write the status code to the response. + w.WriteHeader(statusCode) + + // Send the result back to the client. + if _, err := w.Write(jsonData); err != nil { + return err + } + + return nil +} + +// ResponseError is a helper function that sends a JSON response of an error message +func RespondError(w http.ResponseWriter, statusCode int, err error) { + eb := ErrorBuilder{} + eb.AddError(err) + eb.Respond(w, statusCode) +} + +// RespondInternalServerError is a wrapper around RespondError that sends a 500 internal server error. Useful for +// Sending generic errors when everything went wrong. +func RespondInternalServerError(w http.ResponseWriter) { + RespondError(w, http.StatusInternalServerError, errors.New("internal server error")) +} + +// RespondNotFound is a helper utility for responding with a generic +// "unauthorized" error. +func RespondUnauthorized(w http.ResponseWriter) { + RespondError(w, http.StatusUnauthorized, errors.New("unauthorized")) +} + +// RespondForbidden is a helper utility for responding with a generic +// "forbidden" error. +func RespondForbidden(w http.ResponseWriter) { + RespondError(w, http.StatusForbidden, errors.New("forbidden")) +} diff --git a/backend/pkgs/server/response_error_builder.go b/backend/pkgs/server/response_error_builder.go new file mode 100644 index 0000000..ac8d34d --- /dev/null +++ b/backend/pkgs/server/response_error_builder.go @@ -0,0 +1,51 @@ +package server + +import ( + "net/http" +) + +// ErrorBuilder is a helper type to build a response that contains an array of errors. +// Typical use cases are for returning an array of validation errors back to the user. +// +// Example: +// +// +// { +// "errors": [ +// "invalid id", +// "invalid name", +// "invalid description" +// ], +// "message": "Unprocessable Entity", +// "status": 422 +// } +// +type ErrorBuilder struct { + errs []string +} + +// HasErrors returns true if the ErrorBuilder has any errors. +func (eb *ErrorBuilder) HasErrors() bool { + if (eb.errs == nil) || (len(eb.errs) == 0) { + return false + } + return true +} + +// AddError adds an error to the ErrorBuilder if an error is not nil. If the +// Error is nil, then nothing is added. +func (eb *ErrorBuilder) AddError(err error) { + if err != nil { + if eb.errs == nil { + eb.errs = make([]string, 0) + } + + eb.errs = append(eb.errs, err.Error()) + } +} + +// Respond sends a JSON response with the ErrorBuilder's errors. If there are no errors, then +// the errors field will be an empty array. +func (eb *ErrorBuilder) Respond(w http.ResponseWriter, statusCode int) { + Respond(w, statusCode, Wrap(nil).AddError(http.StatusText(statusCode), eb.errs)) +} diff --git a/backend/pkgs/server/response_error_builder_test.go b/backend/pkgs/server/response_error_builder_test.go new file mode 100644 index 0000000..012e744 --- /dev/null +++ b/backend/pkgs/server/response_error_builder_test.go @@ -0,0 +1,107 @@ +package server + +import ( + "encoding/json" + "errors" + "net/http" + "net/http/httptest" + "testing" + + "github.com/hay-kot/git-web-template/backend/pkgs/faker" + "github.com/stretchr/testify/assert" +) + +func Test_ErrorBuilder_HasErrors_NilList(t *testing.T) { + t.Parallel() + + var ebNilList = ErrorBuilder{} + assert.False(t, ebNilList.HasErrors(), "ErrorBuilder.HasErrors() should return false when list is nil") + +} + +func Test_ErrorBuilder_HasErrors_EmptyList(t *testing.T) { + t.Parallel() + + var ebEmptyList = ErrorBuilder{ + errs: []string{}, + } + assert.False(t, ebEmptyList.HasErrors(), "ErrorBuilder.HasErrors() should return false when list is empty") + +} + +func Test_ErrorBuilder_HasErrors_WithError(t *testing.T) { + t.Parallel() + + var ebList = ErrorBuilder{} + ebList.AddError(errors.New("test error")) + + assert.True(t, ebList.HasErrors(), "ErrorBuilder.HasErrors() should return true when list is not empty") + +} + +func Test_ErrorBuilder_AddError(t *testing.T) { + t.Parallel() + + randomError := make([]error, 10) + + f := faker.NewFaker() + + errorStrings := make([]string, 10) + + for i := 0; i < 10; i++ { + err := errors.New(f.RandomString(10)) + randomError[i] = err + errorStrings[i] = err.Error() + } + + // Check Results + var ebList = ErrorBuilder{} + + for _, err := range randomError { + ebList.AddError(err) + } + + assert.Equal(t, errorStrings, ebList.errs, "ErrorBuilder.AddError() should add an error to the list") +} + +func Test_ErrorBuilder_Respond(t *testing.T) { + t.Parallel() + + f := faker.NewFaker() + + randomError := make([]error, 5) + + for i := 0; i < 5; i++ { + err := errors.New(f.RandomString(5)) + randomError[i] = err + } + + // Check Results + var ebList = ErrorBuilder{} + + for _, err := range randomError { + ebList.AddError(err) + } + + fakeWriter := httptest.NewRecorder() + + ebList.Respond(fakeWriter, 422) + + assert.Equal(t, 422, fakeWriter.Code, "ErrorBuilder.Respond() should return a status code of 422") + + // Check errors payload is correct + + errorsStruct := struct { + Errors []string `json:"details"` + Message string `json:"message"` + Error bool `json:"error"` + }{ + Errors: ebList.errs, + Message: http.StatusText(http.StatusUnprocessableEntity), + Error: true, + } + + asJson, _ := json.Marshal(errorsStruct) + assert.JSONEq(t, string(asJson), fakeWriter.Body.String(), "ErrorBuilder.Respond() should return a JSON response with the errors") + +} diff --git a/backend/pkgs/server/response_test.go b/backend/pkgs/server/response_test.go new file mode 100644 index 0000000..2e98365 --- /dev/null +++ b/backend/pkgs/server/response_test.go @@ -0,0 +1,78 @@ +package server + +import ( + "errors" + "net/http" + "net/http/httptest" + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_Respond_NoContent(t *testing.T) { + recorder := httptest.NewRecorder() + dummystruct := struct { + Name string + }{ + Name: "dummy", + } + + Respond(recorder, http.StatusNoContent, dummystruct) + + assert.Equal(t, http.StatusNoContent, recorder.Code) + assert.Empty(t, recorder.Body.String()) +} + +func Test_Respond_JSON(t *testing.T) { + recorder := httptest.NewRecorder() + dummystruct := struct { + Name string `json:"name"` + }{ + Name: "dummy", + } + + Respond(recorder, http.StatusCreated, dummystruct) + + assert.Equal(t, http.StatusCreated, recorder.Code) + assert.JSONEq(t, recorder.Body.String(), `{"name":"dummy"}`) + assert.Equal(t, "application/json", recorder.Header().Get("Content-Type")) + +} + +func Test_RespondError(t *testing.T) { + recorder := httptest.NewRecorder() + var customError = errors.New("custom error") + + RespondError(recorder, http.StatusBadRequest, customError) + + assert.Equal(t, http.StatusBadRequest, recorder.Code) + assert.JSONEq(t, recorder.Body.String(), `{"details":["custom error"], "message":"Bad Request", "error":true}`) + +} +func Test_RespondInternalServerError(t *testing.T) { + recorder := httptest.NewRecorder() + + RespondInternalServerError(recorder) + + assert.Equal(t, http.StatusInternalServerError, recorder.Code) + assert.JSONEq(t, recorder.Body.String(), `{"details":["internal server error"], "message":"Internal Server Error", "error":true}`) + +} +func Test_RespondUnauthorized(t *testing.T) { + recorder := httptest.NewRecorder() + + RespondUnauthorized(recorder) + + assert.Equal(t, http.StatusUnauthorized, recorder.Code) + assert.JSONEq(t, recorder.Body.String(), `{"details":["unauthorized"], "message":"Unauthorized", "error":true}`) + +} +func Test_RespondForbidden(t *testing.T) { + recorder := httptest.NewRecorder() + + RespondForbidden(recorder) + + assert.Equal(t, http.StatusForbidden, recorder.Code) + assert.JSONEq(t, recorder.Body.String(), `{"details":["forbidden"], "message":"Forbidden", "error":true}`) + +} diff --git a/backend/pkgs/server/result.go b/backend/pkgs/server/result.go new file mode 100644 index 0000000..c2340a5 --- /dev/null +++ b/backend/pkgs/server/result.go @@ -0,0 +1,27 @@ +package server + +type Result struct { + Error bool `json:"error,omitempty"` + Details interface{} `json:"details,omitempty"` + Message string `json:"message,omitempty"` + Item interface{} `json:"item,omitempty"` +} + +// Wrap creates a Wrapper instance and adds the initial namespace and data to be returned. +func Wrap(data interface{}) Result { + return Result{ + Item: data, + } +} + +func (r Result) AddMessage(message string) Result { + r.Message = message + return r +} + +func (r Result) AddError(err string, details interface{}) Result { + r.Message = err + r.Details = details + r.Error = true + return r +} diff --git a/backend/pkgs/server/server.go b/backend/pkgs/server/server.go new file mode 100644 index 0000000..628f234 --- /dev/null +++ b/backend/pkgs/server/server.go @@ -0,0 +1,123 @@ +package server + +import ( + "context" + "errors" + "fmt" + "net/http" + "os" + "os/signal" + "sync" + "syscall" + "time" +) + +// TODO: #2 Implement Go routine pool/job queue + +var ErrServerNotStarted = errors.New("server not started") +var ErrServerAlreadyStarted = errors.New("server already started") + +type Server struct { + Host string + Port string + + Worker Worker + wg sync.WaitGroup + + started bool + activeServer *http.Server +} + +func NewServer(host, port string) *Server { + return &Server{ + Host: host, + Port: port, + wg: sync.WaitGroup{}, + Worker: NewSimpleWorker(), + } +} + +func (s *Server) Shutdown(sig string) error { + if !s.started { + return ErrServerNotStarted + } + fmt.Printf("Received %s signal, shutting down\n", sig) + + // Create a context with a 5-second timeout. + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second) + defer cancel() + + err := s.activeServer.Shutdown(ctx) + s.started = false + if err != nil { + return err + } + + fmt.Println("Http server shutdown, waiting for all tasks to finish") + s.wg.Wait() + + return nil + +} + +func (s *Server) Start(router http.Handler) error { + if s.started { + return ErrServerAlreadyStarted + } + + s.activeServer = &http.Server{ + Addr: s.Host + ":" + s.Port, + Handler: router, + IdleTimeout: time.Minute, + ReadTimeout: 10 * time.Second, + WriteTimeout: 10 * time.Second, + } + + shutdownError := make(chan error) + + go func() { + // Create a quit channel which carries os.Signal values. + quit := make(chan os.Signal, 1) + + // Use signal.Notify() to listen for incoming SIGINT and SIGTERM signals and + // relay them to the quit channel. + signal.Notify(quit, syscall.SIGINT, syscall.SIGTERM) + + // Read the signal from the quit channel. block until received + sig := <-quit + + err := s.Shutdown(sig.String()) + if err != nil { + shutdownError <- err + } + + // Exit the application with a 0 (success) status code. + os.Exit(0) + }() + + s.started = true + err := s.activeServer.ListenAndServe() + + if !errors.Is(err, http.ErrServerClosed) { + return err + } + + err = <-shutdownError + if err != nil { + return err + } + + fmt.Println("Server shutdown successfully") + + return nil +} + +// Background starts a go routine that runs on the servers pool. In the event of a shutdown +// request, the server will wait until all open goroutines have finished before shutting down. +func (svr *Server) Background(task func()) { + svr.wg.Add(1) + svr.Worker.Add(func() { + defer svr.wg.Done() + task() + }) +} diff --git a/backend/pkgs/server/server_test.go b/backend/pkgs/server/server_test.go new file mode 100644 index 0000000..18eed9e --- /dev/null +++ b/backend/pkgs/server/server_test.go @@ -0,0 +1,97 @@ +package server + +import ( + "net/http" + "testing" + "time" + + "github.com/stretchr/testify/assert" +) + +func testServer(t *testing.T, r http.Handler) *Server { + svr := NewServer("127.0.0.1", "19245") + + go func() { + svr.Start(r) + }() + + ping := func() error { + _, err := http.Get("http://127.0.0.1:19245") + return err + } + + for { + if err := ping(); err == nil { + break + } + time.Sleep(time.Millisecond * 100) + } + + return svr +} + +func Test_ServerShutdown_Error(t *testing.T) { + svr := NewServer("127.0.0.1", "19245") + + err := svr.Shutdown("test") + assert.ErrorIs(t, err, ErrServerNotStarted) +} + +func Test_ServerStarts_Error(t *testing.T) { + svr := testServer(t, nil) + + err := svr.Start(nil) + assert.ErrorIs(t, err, ErrServerAlreadyStarted) + + err = svr.Shutdown("test") + assert.NoError(t, err) +} + +func Test_ServerStarts(t *testing.T) { + svr := testServer(t, nil) + err := svr.Shutdown("test") + assert.NoError(t, err) +} + +func Test_GracefulServerShutdownWithWorkers(t *testing.T) { + isFinished := false + + svr := testServer(t, nil) + + svr.Background(func() { + time.Sleep(time.Second * 4) + isFinished = true + }) + + err := svr.Shutdown("test") + + assert.NoError(t, err) + assert.True(t, isFinished) + +} + +func Test_GracefulServerShutdownWithRequests(t *testing.T) { + isFinished := false + + router := http.NewServeMux() + + // add long running handler func + router.HandleFunc("/test", func(rw http.ResponseWriter, r *http.Request) { + time.Sleep(time.Second * 3) + isFinished = true + }) + + svr := testServer(t, router) + + // Make request to "/test" + go func() { + http.Get("http://127.0.0.1:19245/test") // This is probably bad? + }() + + time.Sleep(time.Second) // Hack to wait for the request to be made + + err := svr.Shutdown("test") + assert.NoError(t, err) + + assert.True(t, isFinished) +} diff --git a/backend/pkgs/server/worker.go b/backend/pkgs/server/worker.go new file mode 100644 index 0000000..682d5d6 --- /dev/null +++ b/backend/pkgs/server/worker.go @@ -0,0 +1,20 @@ +package server + +type Worker interface { + Add(func()) +} + +// SimpleWorker is a simple background worker that implements +// the Worker interface and runs all tasks in a go routine without +// a pool or que or limits. It's useful for simple or small applications +// with minimal/short background tasks +type SimpleWorker struct { +} + +func NewSimpleWorker() *SimpleWorker { + return &SimpleWorker{} +} + +func (sw *SimpleWorker) Add(task func()) { + go task() +} diff --git a/backend/static/favicon.ico b/backend/static/favicon.ico new file mode 100644 index 0000000000000000000000000000000000000000..c6f7f740621c2f01f10b5c6614ed1960a7e66244 GIT binary patch literal 1366 zcmZ`(c~DbF9DV{R9n_B1QhLm2I*Jz}NjyS8AhD#7LLLwjfk7mMaDyN@6!8d$RICDm zk`zQDI$kXj2q9FICT2=Uz)DdGhQmNWLKvw?5&|Ue^~LFo(?7cN?f!Ouzwi5Yc6Mh= z!@;1{E?>C-0I-@897sbV+TK5QM8?1y=em&KltQAC0HC>ir8NO%FD3@lr~q($GXMyR z0bmJ93G@KK!vKJR2>?DK0C3B>azA1p0664@htkOifx%$lL7I0M-45^r@F0C}7#$D@ z2=+V!0YWBFBfY`*phhCg4}2<}5c*ylMkmYd}{nJ3hwYCe+=Rd zTLR)!d>L_!%Ii@--Sv-4BC;}gBa1BQW3Z?)Z^+d8H#D*UPcFj!CRl8Mj*dasXP`gk z;a(#worMHCi1X4!zj*EN(`nAoLQemiG}RD=U&tApkGxPvE34yZ%*8sJVF^ZBj}b1= zL0SuJw!t;i5MK+WbPhz6iel=UlRNtPudIBn?d1Y&vB4#i5dC5USWuDqcwDH5OeRzO z!PxDa-Sdv}=a!+WDX72yFQT6>cIS>3iR95cI8`x7&N`MT>lRg4yRG|FsZjhd2{q3` zk>O#R(Cded%h`Q1H6o$vk&;0RcmHO?h{>AWXSl3~%w{v2#q{#ncKT%gTPu8V3L1Fv zg1n!E--T3Y@dTu|p9p%`-MbOJVf)rC>d_IS z4HoI4{BcOP2tPH!vO8kcdF~ljB!#dmbl;we^q7`biRvBvcph$=g-+|BykU#t0$*N! zf;YO%thsZd>XN+vlJaKFqg%C-D^AJ2@8J`0T7#V^cOW zm7U2-Pfku}WwKMnO)(Zw?l&^N*u;=- z%*|C664$UZYMm1%-3B!!ZiXH#=Jp*oSD~Qz`<`x(?Y6tbUPF!M{_&xb?^aVQ0%XS8 zeWZpbo}QkKS#QwEK_MY2!p7Rpz20c9v7?CC;73F~L@`k(Jw0vfGPf9J3SITnLsW-S z@G6d{;r{j|`gL!NbgEY-d!}~u=u^vObvmf2d&_P~ex3FHrhN3$A+%p4Muwse;PZ z`<8_iyeX}^1=jm>%uXPxt!)aKmTMCta>mAg4>{&4E{s5Nk-sGWoa3R~hIt}#d+uCv zG?$&gP4Y?1O+o^I!(a)XSez#YPsd?>uo$1+SPu-w2ZNd7Y{>YJAS;`lmYnzhg6f^O Q)QA9}kio!v` { + item: T; +} + +interface Status { + status: string; + message: string; +} + +export interface ApiSummary { + health: boolean; + versions: string[]; + title: string; + message: string; +} + +export interface ApiToken { + token: string; + expiresAt: string; +} + +export interface UserSelf { + id: string; + name: string; + email: string; + isSuperuser: boolean; +} + +export class v1ApiClient { + version: string; + baseUrl: string; + requests: Axios; + + token: string; + expires: Date; + + constructor(baseUrl: string, version = "v1") { + this.version = version; + this.baseUrl = baseUrl; + this.requests = axios.create({ + baseURL: `${this.baseUrl}/${this.version}`, + }); + } + + v1(url: string) { + return `${this.baseUrl}/api/v1${url}`; + } + + api(url: string) { + return `${this.baseUrl}/api${url}`; + } + + setToken(token: string, expires: Date) { + this.token = token; + this.expires = expires; + + this.requests.defaults.headers.common["Authorization"] = token; + } + + async login(username: string, password: string) { + const response = await this.requests.post( + this.v1("/users/login"), + { + username, + password, + } + ); + + this.setToken(response.data.token, new Date(response.data.expiresAt)); + + return response; + } + + async logout() { + const response = await this.requests.post(this.v1("/users/logout")); + + if (response.status === 200) { + this.setToken("", new Date()); + } + + return response; + } + + async self() { + return this.requests.get>(this.v1("/users/self")); + } + + async status() { + return this.requests.get>(this.api("/status")); + } +} diff --git a/client/package-lock.json b/client/package-lock.json new file mode 100644 index 0000000..a5b40df --- /dev/null +++ b/client/package-lock.json @@ -0,0 +1,3024 @@ +{ + "name": "client", + "lockfileVersion": 2, + "requires": true, + "packages": { + "": { + "dependencies": { + "axios": "^0.25.0" + }, + "devDependencies": { + "@types/expect": "^24.3.0", + "@types/mocha": "^9.1.0", + "@types/node": "^17.0.14", + "typescript": "^4.5.5", + "vitest": "^0.2.5" + } + }, + "node_modules/@babel/code-frame": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.16.7.tgz", + "integrity": "sha512-iAXqUn8IIeBTNd72xsFlgaXHkMBMt6y4HJp1tIaK465CWLT/fG1aqB7ykr95gHHmlBdGbFeWWfyB4NJJ0nmeIg==", + "dev": true, + "dependencies": { + "@babel/highlight": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-identifier": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.16.7.tgz", + "integrity": "sha512-hsEnFemeiW4D08A5gUAZxLBTXpZ39P+a+DGDsHw1yxqyQ/jzFEnxf5uTEGp+3bzAbNOxU1paTgYS4ECU/IgfDw==", + "dev": true, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/highlight": { + "version": "7.16.10", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.16.10.tgz", + "integrity": "sha512-5FnTQLSLswEj6IkgVw5KusNUUFY9ZGqe/TRFnP/BKYHYgfh7tc+C7mwiy95/yNP7Dh9x580Vv8r7u7ZfTBFxdw==", + "dev": true, + "dependencies": { + "@babel/helper-validator-identifier": "^7.16.7", + "chalk": "^2.0.0", + "js-tokens": "^4.0.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/highlight/node_modules/ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dev": true, + "dependencies": { + "color-convert": "^1.9.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/highlight/node_modules/chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dev": true, + "dependencies": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/highlight/node_modules/color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dev": true, + "dependencies": { + "color-name": "1.1.3" + } + }, + "node_modules/@babel/highlight/node_modules/color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=", + "dev": true + }, + "node_modules/@babel/highlight/node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=", + "dev": true, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/@babel/highlight/node_modules/has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/highlight/node_modules/supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dev": true, + "dependencies": { + "has-flag": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@jest/types": { + "version": "27.4.2", + "resolved": "https://registry.npmjs.org/@jest/types/-/types-27.4.2.tgz", + "integrity": "sha512-j35yw0PMTPpZsUoOBiuHzr1zTYoad1cVIE0ajEjcrJONxxrko/IRGKkXx3os0Nsi4Hu3+5VmDbVfq5WhG/pWAg==", + "dev": true, + "dependencies": { + "@types/istanbul-lib-coverage": "^2.0.0", + "@types/istanbul-reports": "^3.0.0", + "@types/node": "*", + "@types/yargs": "^16.0.0", + "chalk": "^4.0.0" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/@types/chai": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/@types/chai/-/chai-4.3.0.tgz", + "integrity": "sha512-/ceqdqeRraGolFTcfoXNiqjyQhZzbINDngeoAq9GoHa8PPK1yNzTaxWjA6BFWp5Ua9JpXEMSS4s5i9tS0hOJtw==", + "dev": true + }, + "node_modules/@types/chai-subset": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/@types/chai-subset/-/chai-subset-1.3.3.tgz", + "integrity": "sha512-frBecisrNGz+F4T6bcc+NLeolfiojh5FxW2klu669+8BARtyQv2C/GkNW6FUodVe4BroGMP/wER/YDGc7rEllw==", + "dev": true, + "dependencies": { + "@types/chai": "*" + } + }, + "node_modules/@types/concat-stream": { + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/@types/concat-stream/-/concat-stream-1.6.1.tgz", + "integrity": "sha512-eHE4cQPoj6ngxBZMvVf6Hw7Mh4jMW4U9lpGmS5GBPB9RYxlFg+CHaVN7ErNY4W9XfLIEn20b4VDYaIrbq0q4uA==", + "dev": true, + "optional": true, + "peer": true, + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/expect": { + "version": "24.3.0", + "resolved": "https://registry.npmjs.org/@types/expect/-/expect-24.3.0.tgz", + "integrity": "sha512-aq5Z+YFBz5o2b6Sp1jigx5nsmoZMK5Ceurjwy6PZmRv7dEi1jLtkARfvB1ME+OXJUG+7TZUDcv3WoCr/aor6dQ==", + "deprecated": "This is a stub types definition. expect provides its own type definitions, so you do not need this installed.", + "dev": true, + "dependencies": { + "expect": "*" + } + }, + "node_modules/@types/form-data": { + "version": "0.0.33", + "resolved": "https://registry.npmjs.org/@types/form-data/-/form-data-0.0.33.tgz", + "integrity": "sha1-yayFsqX9GENbjIXZ7LUObWyJP/g=", + "dev": true, + "optional": true, + "peer": true, + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/istanbul-lib-coverage": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.4.tgz", + "integrity": "sha512-z/QT1XN4K4KYuslS23k62yDIDLwLFkzxOuMplDtObz0+y7VqJCaO2o+SPwHCvLFZh7xazvvoor2tA/hPz9ee7g==", + "dev": true + }, + "node_modules/@types/istanbul-lib-report": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz", + "integrity": "sha512-plGgXAPfVKFoYfa9NpYDAkseG+g6Jr294RqeqcqDixSbU34MZVJRi/P+7Y8GDpzkEwLaGZZOpKIEmeVZNtKsrg==", + "dev": true, + "dependencies": { + "@types/istanbul-lib-coverage": "*" + } + }, + "node_modules/@types/istanbul-reports": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@types/istanbul-reports/-/istanbul-reports-3.0.1.tgz", + "integrity": "sha512-c3mAZEuK0lvBp8tmuL74XRKn1+y2dcwOUpH7x4WrF6gk1GIgiluDRgMYQtw2OFcBvAJWlt6ASU3tSqxp0Uu0Aw==", + "dev": true, + "dependencies": { + "@types/istanbul-lib-report": "*" + } + }, + "node_modules/@types/mocha": { + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/@types/mocha/-/mocha-9.1.0.tgz", + "integrity": "sha512-QCWHkbMv4Y5U9oW10Uxbr45qMMSzl4OzijsozynUAgx3kEHUdXB00udx2dWDQ7f2TU2a2uuiFaRZjCe3unPpeg==", + "dev": true + }, + "node_modules/@types/node": { + "version": "17.0.14", + "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.14.tgz", + "integrity": "sha512-SbjLmERksKOGzWzPNuW7fJM7fk3YXVTFiZWB/Hs99gwhk+/dnrQRPBQjPW9aO+fi1tAffi9PrwFvsmOKmDTyng==", + "dev": true + }, + "node_modules/@types/qs": { + "version": "6.9.7", + "resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.9.7.tgz", + "integrity": "sha512-FGa1F62FT09qcrueBA6qYTrJPVDzah9a+493+o2PCXsesWHIn27G98TsSMs3WPNbZIEj4+VJf6saSFpvD+3Zsw==", + "dev": true, + "optional": true, + "peer": true + }, + "node_modules/@types/stack-utils": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@types/stack-utils/-/stack-utils-2.0.1.tgz", + "integrity": "sha512-Hl219/BT5fLAaz6NDkSuhzasy49dwQS/DSdu4MdggFB8zcXv7vflBI3xp7FEmkmdDkBUI2bPUNeMttp2knYdxw==", + "dev": true + }, + "node_modules/@types/yargs": { + "version": "16.0.4", + "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-16.0.4.tgz", + "integrity": "sha512-T8Yc9wt/5LbJyCaLiHPReJa0kApcIgJ7Bn735GjItUfh08Z1pJvu8QZqb9s+mMvKV6WUQRV7K2R46YbjMXTTJw==", + "dev": true, + "dependencies": { + "@types/yargs-parser": "*" + } + }, + "node_modules/@types/yargs-parser": { + "version": "20.2.1", + "resolved": "https://registry.npmjs.org/@types/yargs-parser/-/yargs-parser-20.2.1.tgz", + "integrity": "sha512-7tFImggNeNBVMsn0vLrpn1H1uPrUBdnARPTpZoitY37ZrdJREzf7I16tMrlK3hen349gr1NYh8CmZQa7CTG6Aw==", + "dev": true + }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/asap": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz", + "integrity": "sha1-5QNHYR1+aQlDIIu9r+vLwvuGbUY=", + "dev": true, + "optional": true, + "peer": true + }, + "node_modules/assertion-error": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz", + "integrity": "sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw==", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=", + "dev": true, + "optional": true, + "peer": true + }, + "node_modules/axios": { + "version": "0.25.0", + "resolved": "https://registry.npmjs.org/axios/-/axios-0.25.0.tgz", + "integrity": "sha512-cD8FOb0tRH3uuEe6+evtAbgJtfxr7ly3fQjYcMcuPlgkwVS9xboaVIpcDV+cYQe+yGykgwZCs1pzjntcGa6l5g==", + "dependencies": { + "follow-redirects": "^1.14.7" + } + }, + "node_modules/braces": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", + "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "dev": true, + "dependencies": { + "fill-range": "^7.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/buffer-from": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz", + "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==", + "dev": true, + "optional": true, + "peer": true + }, + "node_modules/call-bind": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz", + "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==", + "dev": true, + "optional": true, + "peer": true, + "dependencies": { + "function-bind": "^1.1.1", + "get-intrinsic": "^1.0.2" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/caseless": { + "version": "0.12.0", + "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", + "integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw=", + "dev": true, + "optional": true, + "peer": true + }, + "node_modules/chai": { + "version": "4.3.6", + "resolved": "https://registry.npmjs.org/chai/-/chai-4.3.6.tgz", + "integrity": "sha512-bbcp3YfHCUzMOvKqsztczerVgBKSsEijCySNlHHbX3VG1nskvqjz5Rfso1gGwD6w6oOV3eI60pKuMOV5MV7p3Q==", + "dev": true, + "dependencies": { + "assertion-error": "^1.1.0", + "check-error": "^1.0.2", + "deep-eql": "^3.0.1", + "get-func-name": "^2.0.0", + "loupe": "^2.3.1", + "pathval": "^1.1.1", + "type-detect": "^4.0.5" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/chalk/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/check-error": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/check-error/-/check-error-1.0.2.tgz", + "integrity": "sha1-V00xLt2Iu13YkS6Sht1sCu1KrII=", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "dev": true, + "optional": true, + "peer": true, + "dependencies": { + "delayed-stream": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/concat-stream": { + "version": "1.6.2", + "resolved": "https://registry.npmjs.org/concat-stream/-/concat-stream-1.6.2.tgz", + "integrity": "sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw==", + "dev": true, + "engines": [ + "node >= 0.8" + ], + "optional": true, + "peer": true, + "dependencies": { + "buffer-from": "^1.0.0", + "inherits": "^2.0.3", + "readable-stream": "^2.2.2", + "typedarray": "^0.0.6" + } + }, + "node_modules/core-util-is": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", + "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==", + "dev": true, + "optional": true, + "peer": true + }, + "node_modules/deep-eql": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-3.0.1.tgz", + "integrity": "sha512-+QeIQyN5ZuO+3Uk5DYh6/1eKO0m0YmJFGNmFHGACpf1ClL1nmlV/p4gNgbl2pJGxgXb4faqo6UE+M5ACEMyVcw==", + "dev": true, + "dependencies": { + "type-detect": "^4.0.0" + }, + "engines": { + "node": ">=0.12" + } + }, + "node_modules/delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=", + "dev": true, + "optional": true, + "peer": true, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/diff-sequences": { + "version": "27.4.0", + "resolved": "https://registry.npmjs.org/diff-sequences/-/diff-sequences-27.4.0.tgz", + "integrity": "sha512-YqiQzkrsmHMH5uuh8OdQFU9/ZpADnwzml8z0O5HvRNda+5UZsaX/xN+AAxfR2hWq1Y7HZnAzO9J5lJXOuDz2Ww==", + "dev": true, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/esbuild": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.13.15.tgz", + "integrity": "sha512-raCxt02HBKv8RJxE8vkTSCXGIyKHdEdGfUmiYb8wnabnaEmHzyW7DCHb5tEN0xU8ryqg5xw54mcwnYkC4x3AIw==", + "dev": true, + "hasInstallScript": true, + "bin": { + "esbuild": "bin/esbuild" + }, + "optionalDependencies": { + "esbuild-android-arm64": "0.13.15", + "esbuild-darwin-64": "0.13.15", + "esbuild-darwin-arm64": "0.13.15", + "esbuild-freebsd-64": "0.13.15", + "esbuild-freebsd-arm64": "0.13.15", + "esbuild-linux-32": "0.13.15", + "esbuild-linux-64": "0.13.15", + "esbuild-linux-arm": "0.13.15", + "esbuild-linux-arm64": "0.13.15", + "esbuild-linux-mips64le": "0.13.15", + "esbuild-linux-ppc64le": "0.13.15", + "esbuild-netbsd-64": "0.13.15", + "esbuild-openbsd-64": "0.13.15", + "esbuild-sunos-64": "0.13.15", + "esbuild-windows-32": "0.13.15", + "esbuild-windows-64": "0.13.15", + "esbuild-windows-arm64": "0.13.15" + } + }, + "node_modules/esbuild-android-arm64": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-android-arm64/-/esbuild-android-arm64-0.13.15.tgz", + "integrity": "sha512-m602nft/XXeO8YQPUDVoHfjyRVPdPgjyyXOxZ44MK/agewFFkPa8tUo6lAzSWh5Ui5PB4KR9UIFTSBKh/RrCmg==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/esbuild-darwin-64": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-darwin-64/-/esbuild-darwin-64-0.13.15.tgz", + "integrity": "sha512-ihOQRGs2yyp7t5bArCwnvn2Atr6X4axqPpEdCFPVp7iUj4cVSdisgvEKdNR7yH3JDjW6aQDw40iQFoTqejqxvQ==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/esbuild-darwin-arm64": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-darwin-arm64/-/esbuild-darwin-arm64-0.13.15.tgz", + "integrity": "sha512-i1FZssTVxUqNlJ6cBTj5YQj4imWy3m49RZRnHhLpefFIh0To05ow9DTrXROTE1urGTQCloFUXTX8QfGJy1P8dQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/esbuild-freebsd-64": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-freebsd-64/-/esbuild-freebsd-64-0.13.15.tgz", + "integrity": "sha512-G3dLBXUI6lC6Z09/x+WtXBXbOYQZ0E8TDBqvn7aMaOCzryJs8LyVXKY4CPnHFXZAbSwkCbqiPuSQ1+HhrNk7EA==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/esbuild-freebsd-arm64": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-freebsd-arm64/-/esbuild-freebsd-arm64-0.13.15.tgz", + "integrity": "sha512-KJx0fzEDf1uhNOZQStV4ujg30WlnwqUASaGSFPhznLM/bbheu9HhqZ6mJJZM32lkyfGJikw0jg7v3S0oAvtvQQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/esbuild-linux-32": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-linux-32/-/esbuild-linux-32-0.13.15.tgz", + "integrity": "sha512-ZvTBPk0YWCLMCXiFmD5EUtB30zIPvC5Itxz0mdTu/xZBbbHJftQgLWY49wEPSn2T/TxahYCRDWun5smRa0Tu+g==", + "cpu": [ + "ia32" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/esbuild-linux-64": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-linux-64/-/esbuild-linux-64-0.13.15.tgz", + "integrity": "sha512-eCKzkNSLywNeQTRBxJRQ0jxRCl2YWdMB3+PkWFo2BBQYC5mISLIVIjThNtn6HUNqua1pnvgP5xX0nHbZbPj5oA==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/esbuild-linux-arm": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-linux-arm/-/esbuild-linux-arm-0.13.15.tgz", + "integrity": "sha512-wUHttDi/ol0tD8ZgUMDH8Ef7IbDX+/UsWJOXaAyTdkT7Yy9ZBqPg8bgB/Dn3CZ9SBpNieozrPRHm0BGww7W/jA==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/esbuild-linux-arm64": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-linux-arm64/-/esbuild-linux-arm64-0.13.15.tgz", + "integrity": "sha512-bYpuUlN6qYU9slzr/ltyLTR9YTBS7qUDymO8SV7kjeNext61OdmqFAzuVZom+OLW1HPHseBfJ/JfdSlx8oTUoA==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/esbuild-linux-mips64le": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-linux-mips64le/-/esbuild-linux-mips64le-0.13.15.tgz", + "integrity": "sha512-KlVjIG828uFPyJkO/8gKwy9RbXhCEUeFsCGOJBepUlpa7G8/SeZgncUEz/tOOUJTcWMTmFMtdd3GElGyAtbSWg==", + "cpu": [ + "mips64el" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/esbuild-linux-ppc64le": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-linux-ppc64le/-/esbuild-linux-ppc64le-0.13.15.tgz", + "integrity": "sha512-h6gYF+OsaqEuBjeesTBtUPw0bmiDu7eAeuc2OEH9S6mV9/jPhPdhOWzdeshb0BskRZxPhxPOjqZ+/OqLcxQwEQ==", + "cpu": [ + "ppc64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/esbuild-netbsd-64": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-netbsd-64/-/esbuild-netbsd-64-0.13.15.tgz", + "integrity": "sha512-3+yE9emwoevLMyvu+iR3rsa+Xwhie7ZEHMGDQ6dkqP/ndFzRHkobHUKTe+NCApSqG5ce2z4rFu+NX/UHnxlh3w==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "netbsd" + ] + }, + "node_modules/esbuild-openbsd-64": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-openbsd-64/-/esbuild-openbsd-64-0.13.15.tgz", + "integrity": "sha512-wTfvtwYJYAFL1fSs8yHIdf5GEE4NkbtbXtjLWjM3Cw8mmQKqsg8kTiqJ9NJQe5NX/5Qlo7Xd9r1yKMMkHllp5g==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "openbsd" + ] + }, + "node_modules/esbuild-sunos-64": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-sunos-64/-/esbuild-sunos-64-0.13.15.tgz", + "integrity": "sha512-lbivT9Bx3t1iWWrSnGyBP9ODriEvWDRiweAs69vI+miJoeKwHWOComSRukttbuzjZ8r1q0mQJ8Z7yUsDJ3hKdw==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "sunos" + ] + }, + "node_modules/esbuild-windows-32": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-windows-32/-/esbuild-windows-32-0.13.15.tgz", + "integrity": "sha512-fDMEf2g3SsJ599MBr50cY5ve5lP1wyVwTe6aLJsM01KtxyKkB4UT+fc5MXQFn3RLrAIAZOG+tHC+yXObpSn7Nw==", + "cpu": [ + "ia32" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/esbuild-windows-64": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-windows-64/-/esbuild-windows-64-0.13.15.tgz", + "integrity": "sha512-9aMsPRGDWCd3bGjUIKG/ZOJPKsiztlxl/Q3C1XDswO6eNX/Jtwu4M+jb6YDH9hRSUflQWX0XKAfWzgy5Wk54JQ==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/esbuild-windows-arm64": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-windows-arm64/-/esbuild-windows-arm64-0.13.15.tgz", + "integrity": "sha512-zzvyCVVpbwQQATaf3IG8mu1IwGEiDxKkYUdA4FpoCHi1KtPa13jeScYDjlW0Qh+ebWzpKfR2ZwvqAQkSWNcKjA==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/expect": { + "version": "27.4.6", + "resolved": "https://registry.npmjs.org/expect/-/expect-27.4.6.tgz", + "integrity": "sha512-1M/0kAALIaj5LaG66sFJTbRsWTADnylly82cu4bspI0nl+pgP4E6Bh/aqdHlTUjul06K7xQnnrAoqfxVU0+/ag==", + "dev": true, + "dependencies": { + "@jest/types": "^27.4.2", + "jest-get-type": "^27.4.0", + "jest-matcher-utils": "^27.4.6", + "jest-message-util": "^27.4.6" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/fill-range": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", + "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "dev": true, + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/follow-redirects": { + "version": "1.14.7", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.14.7.tgz", + "integrity": "sha512-+hbxoLbFMbRKDwohX8GkTataGqO6Jb7jGwpAlwgy2bIz25XtRm7KEzJM76R1WiNT5SwZkX4Y75SwBolkpmE7iQ==", + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/RubenVerborgh" + } + ], + "engines": { + "node": ">=4.0" + }, + "peerDependenciesMeta": { + "debug": { + "optional": true + } + } + }, + "node_modules/form-data": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.1.tgz", + "integrity": "sha512-m21N3WOmEEURgk6B9GLOE4RuWOFf28Lhh9qGYeNlGq4VDXUlJy2th2slBNU8Gp8EzloYZOibZJ7t5ecIrFSjVA==", + "dev": true, + "optional": true, + "peer": true, + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.6", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 0.12" + } + }, + "node_modules/fsevents": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", + "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", + "dev": true, + "hasInstallScript": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/function-bind": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", + "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==", + "dev": true + }, + "node_modules/get-func-name": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.0.tgz", + "integrity": "sha1-6td0q+5y4gQJQzoGY2YCPdaIekE=", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/get-intrinsic": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.1.tgz", + "integrity": "sha512-kWZrnVM42QCiEA2Ig1bG8zjoIMOgxWwYCEeNdwY6Tv/cOSeGpcoX4pXHfKUxNKVoArnrEr2e9srnAxxGIraS9Q==", + "dev": true, + "optional": true, + "peer": true, + "dependencies": { + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.1" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-port": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/get-port/-/get-port-3.2.0.tgz", + "integrity": "sha1-3Xzn3hh8Bsi/NTeWrHHgmfCYDrw=", + "dev": true, + "optional": true, + "peer": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/graceful-fs": { + "version": "4.2.9", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.9.tgz", + "integrity": "sha512-NtNxqUcXgpW2iMrfqSfR73Glt39K+BLwWsPs94yR63v45T0Wbej7eRmL5cWfwEgqXnmjQp3zaJTshdRW/qC2ZQ==", + "dev": true + }, + "node_modules/happy-dom": { + "version": "2.31.1", + "resolved": "https://registry.npmjs.org/happy-dom/-/happy-dom-2.31.1.tgz", + "integrity": "sha512-hbTLxMqyluLT06nRN4TDGLjjKni73tZlvLdF6qGfdv5U4EnrSYSwcZK3ESmv0LEEa5St7NY7e62rhISotH8O3Q==", + "dev": true, + "optional": true, + "peer": true, + "dependencies": { + "he": "^1.1.1", + "node-fetch": "^2.6.1", + "sync-request": "^6.1.0", + "webidl-conversions": "^7.0.0", + "whatwg-encoding": "^1.0.5", + "whatwg-mimetype": "^2.3.0" + } + }, + "node_modules/happy-dom/node_modules/webidl-conversions": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-7.0.0.tgz", + "integrity": "sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==", + "dev": true, + "optional": true, + "peer": true, + "engines": { + "node": ">=12" + } + }, + "node_modules/has": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", + "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "dev": true, + "dependencies": { + "function-bind": "^1.1.1" + }, + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/has-symbols": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.2.tgz", + "integrity": "sha512-chXa79rL/UC2KlX17jo3vRGz0azaWEx5tGqZg5pO3NUyEJVB17dMruQlzCCOfUvElghKcm5194+BCRvi2Rv/Gw==", + "dev": true, + "optional": true, + "peer": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/he": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz", + "integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==", + "dev": true, + "optional": true, + "peer": true, + "bin": { + "he": "bin/he" + } + }, + "node_modules/http-basic": { + "version": "8.1.3", + "resolved": "https://registry.npmjs.org/http-basic/-/http-basic-8.1.3.tgz", + "integrity": "sha512-/EcDMwJZh3mABI2NhGfHOGOeOZITqfkEO4p/xK+l3NpyncIHUQBoMvCSF/b5GqvKtySC2srL/GGG3+EtlqlmCw==", + "dev": true, + "optional": true, + "peer": true, + "dependencies": { + "caseless": "^0.12.0", + "concat-stream": "^1.6.2", + "http-response-object": "^3.0.1", + "parse-cache-control": "^1.0.1" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/http-response-object": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/http-response-object/-/http-response-object-3.0.2.tgz", + "integrity": "sha512-bqX0XTF6fnXSQcEJ2Iuyr75yVakyjIDCqroJQ/aHfSdlM743Cwqoi2nDYMzLGWUcuTWGWy8AAvOKXTfiv6q9RA==", + "dev": true, + "optional": true, + "peer": true, + "dependencies": { + "@types/node": "^10.0.3" + } + }, + "node_modules/http-response-object/node_modules/@types/node": { + "version": "10.17.60", + "resolved": "https://registry.npmjs.org/@types/node/-/node-10.17.60.tgz", + "integrity": "sha512-F0KIgDJfy2nA3zMLmWGKxcH2ZVEtCZXHHdOQs2gSaQ27+lNeEfGxzkIw90aXswATX7AZ33tahPbzy6KAfUreVw==", + "dev": true, + "optional": true, + "peer": true + }, + "node_modules/iconv-lite": { + "version": "0.4.24", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", + "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", + "dev": true, + "optional": true, + "peer": true, + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true, + "optional": true, + "peer": true + }, + "node_modules/is-core-module": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.8.1.tgz", + "integrity": "sha512-SdNCUs284hr40hFTFP6l0IfZ/RSrMXF3qgoRHd3/79unUTvrFO/JoXwkGm+5J/Oe3E/b5GsnG330uUNgRpu1PA==", + "dev": true, + "dependencies": { + "has": "^1.0.3" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=", + "dev": true, + "optional": true, + "peer": true + }, + "node_modules/jest-diff": { + "version": "27.4.6", + "resolved": "https://registry.npmjs.org/jest-diff/-/jest-diff-27.4.6.tgz", + "integrity": "sha512-zjaB0sh0Lb13VyPsd92V7HkqF6yKRH9vm33rwBt7rPYrpQvS1nCvlIy2pICbKta+ZjWngYLNn4cCK4nyZkjS/w==", + "dev": true, + "dependencies": { + "chalk": "^4.0.0", + "diff-sequences": "^27.4.0", + "jest-get-type": "^27.4.0", + "pretty-format": "^27.4.6" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/jest-get-type": { + "version": "27.4.0", + "resolved": "https://registry.npmjs.org/jest-get-type/-/jest-get-type-27.4.0.tgz", + "integrity": "sha512-tk9o+ld5TWq41DkK14L4wox4s2D9MtTpKaAVzXfr5CUKm5ZK2ExcaFE0qls2W71zE/6R2TxxrK9w2r6svAFDBQ==", + "dev": true, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/jest-matcher-utils": { + "version": "27.4.6", + "resolved": "https://registry.npmjs.org/jest-matcher-utils/-/jest-matcher-utils-27.4.6.tgz", + "integrity": "sha512-XD4PKT3Wn1LQnRAq7ZsTI0VRuEc9OrCPFiO1XL7bftTGmfNF0DcEwMHRgqiu7NGf8ZoZDREpGrCniDkjt79WbA==", + "dev": true, + "dependencies": { + "chalk": "^4.0.0", + "jest-diff": "^27.4.6", + "jest-get-type": "^27.4.0", + "pretty-format": "^27.4.6" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/jest-message-util": { + "version": "27.4.6", + "resolved": "https://registry.npmjs.org/jest-message-util/-/jest-message-util-27.4.6.tgz", + "integrity": "sha512-0p5szriFU0U74czRSFjH6RyS7UYIAkn/ntwMuOwTGWrQIOh5NzXXrq72LOqIkJKKvFbPq+byZKuBz78fjBERBA==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.12.13", + "@jest/types": "^27.4.2", + "@types/stack-utils": "^2.0.0", + "chalk": "^4.0.0", + "graceful-fs": "^4.2.4", + "micromatch": "^4.0.4", + "pretty-format": "^27.4.6", + "slash": "^3.0.0", + "stack-utils": "^2.0.3" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "dev": true + }, + "node_modules/local-pkg": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/local-pkg/-/local-pkg-0.4.1.tgz", + "integrity": "sha512-lL87ytIGP2FU5PWwNDo0w3WhIo2gopIAxPg9RxDYF7m4rr5ahuZxP22xnJHIvaLTe4Z9P6uKKY2UHiwyB4pcrw==", + "dev": true, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/antfu" + } + }, + "node_modules/loupe": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/loupe/-/loupe-2.3.2.tgz", + "integrity": "sha512-QgVamnvj0jX1LMPlCAq0MK6hATORFtGqHoUKXTkwNe13BqlN6aePQCKnnTcFvdDYEEITcJ+gBl4mTW7YJtJbyQ==", + "dev": true, + "dependencies": { + "get-func-name": "^2.0.0" + } + }, + "node_modules/micromatch": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.4.tgz", + "integrity": "sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==", + "dev": true, + "dependencies": { + "braces": "^3.0.1", + "picomatch": "^2.2.3" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/mime-db": { + "version": "1.51.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.51.0.tgz", + "integrity": "sha512-5y8A56jg7XVQx2mbv1lu49NR4dokRnhZYTtL+KGfaa27uq4pSTXkwQkFJl4pkRMyNFz/EtYDSkiiEHx3F7UN6g==", + "dev": true, + "optional": true, + "peer": true, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.34", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.34.tgz", + "integrity": "sha512-6cP692WwGIs9XXdOO4++N+7qjqv0rqxxVvJ3VHPh/Sc9mVZcQP+ZGhkKiTvWMQRr2tbHkJP/Yn7Y0npb3ZBs4A==", + "dev": true, + "optional": true, + "peer": true, + "dependencies": { + "mime-db": "1.51.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/nanoid": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.2.0.tgz", + "integrity": "sha512-fmsZYa9lpn69Ad5eDn7FMcnnSR+8R34W9qJEijxYhTbfOWzr22n1QxCMzXLK+ODyW2973V3Fux959iQoUxzUIA==", + "dev": true, + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/node-fetch": { + "version": "2.6.7", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz", + "integrity": "sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==", + "dev": true, + "optional": true, + "peer": true, + "dependencies": { + "whatwg-url": "^5.0.0" + }, + "engines": { + "node": "4.x || >=6.0.0" + }, + "peerDependencies": { + "encoding": "^0.1.0" + }, + "peerDependenciesMeta": { + "encoding": { + "optional": true + } + } + }, + "node_modules/object-inspect": { + "version": "1.12.0", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.0.tgz", + "integrity": "sha512-Ho2z80bVIvJloH+YzRmpZVQe87+qASmBUKZDWgx9cu+KDrX2ZDH/3tMy+gXbZETVGs2M8YdxObOh7XAtim9Y0g==", + "dev": true, + "optional": true, + "peer": true, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/parse-cache-control": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/parse-cache-control/-/parse-cache-control-1.0.1.tgz", + "integrity": "sha1-juqz5U+laSD+Fro493+iGqzC104=", + "dev": true, + "optional": true, + "peer": true + }, + "node_modules/path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "dev": true + }, + "node_modules/pathval": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/pathval/-/pathval-1.1.1.tgz", + "integrity": "sha512-Dp6zGqpTdETdR63lehJYPeIOqpiNBNtc7BpWSLrOje7UaIsE5aY92r/AunQA7rsXvet3lrJ3JnZX29UPTKXyKQ==", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/picocolors": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz", + "integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==", + "dev": true + }, + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true, + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/postcss": { + "version": "8.4.6", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.6.tgz", + "integrity": "sha512-OovjwIzs9Te46vlEx7+uXB0PLijpwjXGKXjVGGPIGubGpq7uh5Xgf6D6FiJ/SzJMBosHDp6a2hiXOS97iBXcaA==", + "dev": true, + "dependencies": { + "nanoid": "^3.2.0", + "picocolors": "^1.0.0", + "source-map-js": "^1.0.2" + }, + "engines": { + "node": "^10 || ^12 || >=14" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + } + }, + "node_modules/pretty-format": { + "version": "27.4.6", + "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-27.4.6.tgz", + "integrity": "sha512-NblstegA1y/RJW2VyML+3LlpFjzx62cUrtBIKIWDXEDkjNeleA7Od7nrzcs/VLQvAeV4CgSYhrN39DRN88Qi/g==", + "dev": true, + "dependencies": { + "ansi-regex": "^5.0.1", + "ansi-styles": "^5.0.0", + "react-is": "^17.0.1" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/pretty-format/node_modules/ansi-styles": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", + "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/process-nextick-args": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", + "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==", + "dev": true, + "optional": true, + "peer": true + }, + "node_modules/promise": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/promise/-/promise-8.1.0.tgz", + "integrity": "sha512-W04AqnILOL/sPRXziNicCjSNRruLAuIHEOVBazepu0545DDNGYHz7ar9ZgZ1fMU8/MA4mVxp5rkBWRi6OXIy3Q==", + "dev": true, + "optional": true, + "peer": true, + "dependencies": { + "asap": "~2.0.6" + } + }, + "node_modules/qs": { + "version": "6.10.3", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.10.3.tgz", + "integrity": "sha512-wr7M2E0OFRfIfJZjKGieI8lBKb7fRCH4Fv5KNPEs7gJ8jadvotdsS08PzOKR7opXhZ/Xkjtt3WF9g38drmyRqQ==", + "dev": true, + "optional": true, + "peer": true, + "dependencies": { + "side-channel": "^1.0.4" + }, + "engines": { + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/react-is": { + "version": "17.0.2", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-17.0.2.tgz", + "integrity": "sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w==", + "dev": true + }, + "node_modules/readable-stream": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "dev": true, + "optional": true, + "peer": true, + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "node_modules/readable-stream/node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "dev": true, + "optional": true, + "peer": true + }, + "node_modules/resolve": { + "version": "1.22.0", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.0.tgz", + "integrity": "sha512-Hhtrw0nLeSrFQ7phPp4OOcVjLPIeMnRlr5mcnVuMe7M/7eBn98A3hmFRLoFo3DLZkivSYwhRUJTyPyWAk56WLw==", + "dev": true, + "dependencies": { + "is-core-module": "^2.8.1", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/rollup": { + "version": "2.67.0", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-2.67.0.tgz", + "integrity": "sha512-W83AaERwvDiHwHEF/dfAfS3z1Be5wf7n+pO3ZAO5IQadCT2lBTr7WQ2MwZZe+nodbD+n3HtC4OCOAdsOPPcKZQ==", + "dev": true, + "bin": { + "rollup": "dist/bin/rollup" + }, + "engines": { + "node": ">=10.0.0" + }, + "optionalDependencies": { + "fsevents": "~2.3.2" + } + }, + "node_modules/safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", + "dev": true, + "optional": true, + "peer": true + }, + "node_modules/side-channel": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", + "integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==", + "dev": true, + "optional": true, + "peer": true, + "dependencies": { + "call-bind": "^1.0.0", + "get-intrinsic": "^1.0.2", + "object-inspect": "^1.9.0" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/source-map-js": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.0.2.tgz", + "integrity": "sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/stack-utils": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/stack-utils/-/stack-utils-2.0.5.tgz", + "integrity": "sha512-xrQcmYhOsn/1kX+Vraq+7j4oE2j/6BFscZ0etmYg81xuM8Gq0022Pxb8+IqgOFUIaxHs0KaSb7T1+OegiNrNFA==", + "dev": true, + "dependencies": { + "escape-string-regexp": "^2.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/stack-utils/node_modules/escape-string-regexp": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz", + "integrity": "sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dev": true, + "optional": true, + "peer": true, + "dependencies": { + "safe-buffer": "~5.1.0" + } + }, + "node_modules/string_decoder/node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "dev": true, + "optional": true, + "peer": true + }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/sync-request": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/sync-request/-/sync-request-6.1.0.tgz", + "integrity": "sha512-8fjNkrNlNCrVc/av+Jn+xxqfCjYaBoHqCsDz6mt030UMxJGr+GSfCV1dQt2gRtlL63+VPidwDVLr7V2OcTSdRw==", + "dev": true, + "optional": true, + "peer": true, + "dependencies": { + "http-response-object": "^3.0.1", + "sync-rpc": "^1.2.1", + "then-request": "^6.0.0" + }, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/sync-rpc": { + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/sync-rpc/-/sync-rpc-1.3.6.tgz", + "integrity": "sha512-J8jTXuZzRlvU7HemDgHi3pGnh/rkoqR/OZSjhTyyZrEkkYQbk7Z33AXp37mkPfPpfdOuj7Ex3H/TJM1z48uPQw==", + "dev": true, + "optional": true, + "peer": true, + "dependencies": { + "get-port": "^3.1.0" + } + }, + "node_modules/then-request": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/then-request/-/then-request-6.0.2.tgz", + "integrity": "sha512-3ZBiG7JvP3wbDzA9iNY5zJQcHL4jn/0BWtXIkagfz7QgOL/LqjCEOBQuJNZfu0XYnv5JhKh+cDxCPM4ILrqruA==", + "dev": true, + "optional": true, + "peer": true, + "dependencies": { + "@types/concat-stream": "^1.6.0", + "@types/form-data": "0.0.33", + "@types/node": "^8.0.0", + "@types/qs": "^6.2.31", + "caseless": "~0.12.0", + "concat-stream": "^1.6.0", + "form-data": "^2.2.0", + "http-basic": "^8.1.1", + "http-response-object": "^3.0.1", + "promise": "^8.0.0", + "qs": "^6.4.0" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/then-request/node_modules/@types/node": { + "version": "8.10.66", + "resolved": "https://registry.npmjs.org/@types/node/-/node-8.10.66.tgz", + "integrity": "sha512-tktOkFUA4kXx2hhhrB8bIFb5TbwzS4uOhKEmwiD+NoiL0qtP2OQ9mFldbgD4dV1djrlBYP6eBuQZiWjuHUpqFw==", + "dev": true, + "optional": true, + "peer": true + }, + "node_modules/tinypool": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/tinypool/-/tinypool-0.1.1.tgz", + "integrity": "sha512-sW2fQZ2BRb/GX5v55NkHiTrbMLx0eX0xNpP+VGhOe2f7Oo04+LeClDyM19zCE/WCy7jJ8kzIJ0Ojrxj3UhN9Sg==", + "dev": true, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/tinyspy": { + "version": "0.2.10", + "resolved": "https://registry.npmjs.org/tinyspy/-/tinyspy-0.2.10.tgz", + "integrity": "sha512-Qij6rGWCDjWIejxCXXVi6bNgvrYBp3PbqC4cBP/0fD6WHDOHCw09Zd13CsxrDqSR5PFq01WeqDws8t5lz5sH0A==", + "dev": true, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/tr46": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha1-gYT9NH2snNwYWZLzpmIuFLnZq2o=", + "dev": true, + "optional": true, + "peer": true + }, + "node_modules/type-detect": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", + "integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/typedarray": { + "version": "0.0.6", + "resolved": "https://registry.npmjs.org/typedarray/-/typedarray-0.0.6.tgz", + "integrity": "sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c=", + "dev": true, + "optional": true, + "peer": true + }, + "node_modules/typescript": { + "version": "4.5.5", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.5.5.tgz", + "integrity": "sha512-TCTIul70LyWe6IJWT8QSYeA54WQe8EjQFU4wY52Fasj5UKx88LNYKCgBEHcOMOrFF1rKGbD8v/xcNWVUq9SymA==", + "dev": true, + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=4.2.0" + } + }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=", + "dev": true, + "optional": true, + "peer": true + }, + "node_modules/vite": { + "version": "2.7.13", + "resolved": "https://registry.npmjs.org/vite/-/vite-2.7.13.tgz", + "integrity": "sha512-Mq8et7f3aK0SgSxjDNfOAimZGW9XryfHRa/uV0jseQSilg+KhYDSoNb9h1rknOy6SuMkvNDLKCYAYYUMCE+IgQ==", + "dev": true, + "dependencies": { + "esbuild": "^0.13.12", + "postcss": "^8.4.5", + "resolve": "^1.20.0", + "rollup": "^2.59.0" + }, + "bin": { + "vite": "bin/vite.js" + }, + "engines": { + "node": ">=12.2.0" + }, + "optionalDependencies": { + "fsevents": "~2.3.2" + }, + "peerDependencies": { + "less": "*", + "sass": "*", + "stylus": "*" + }, + "peerDependenciesMeta": { + "less": { + "optional": true + }, + "sass": { + "optional": true + }, + "stylus": { + "optional": true + } + } + }, + "node_modules/vitest": { + "version": "0.2.5", + "resolved": "https://registry.npmjs.org/vitest/-/vitest-0.2.5.tgz", + "integrity": "sha512-QruEhsNxy8ycLxYG9rrGUfHZzJ8A6YvA9ULZ4w/ecvm0Zejm1nxUar/XkRWkL2xzrqA5AjmfqDSQZ8q2bFbA0Q==", + "dev": true, + "dependencies": { + "@types/chai": "^4.3.0", + "@types/chai-subset": "^1.3.3", + "chai": "^4.3.6", + "local-pkg": "^0.4.1", + "tinypool": "^0.1.1", + "tinyspy": "^0.2.10", + "vite": ">=2.7.13" + }, + "bin": { + "vitest": "vitest.mjs" + }, + "engines": { + "node": ">=14.14.0" + }, + "funding": { + "url": "https://github.com/sponsors/antfu" + }, + "peerDependencies": { + "@vitest/ui": "*", + "c8": "*", + "happy-dom": "*", + "jsdom": "*" + }, + "peerDependenciesMeta": { + "@vitest/ui": { + "optional": true + }, + "c8": { + "optional": true + }, + "happy-dom": { + "optional": true + }, + "jsdom": { + "optional": true + } + } + }, + "node_modules/webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha1-JFNCdeKnvGvnvIZhHMFq4KVlSHE=", + "dev": true, + "optional": true, + "peer": true + }, + "node_modules/whatwg-encoding": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/whatwg-encoding/-/whatwg-encoding-1.0.5.tgz", + "integrity": "sha512-b5lim54JOPN9HtzvK9HFXvBma/rnfFeqsic0hSpjtDbVxR3dJKLc+KB4V6GgiGOvl7CY/KNh8rxSo9DKQrnUEw==", + "dev": true, + "optional": true, + "peer": true, + "dependencies": { + "iconv-lite": "0.4.24" + } + }, + "node_modules/whatwg-mimetype": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/whatwg-mimetype/-/whatwg-mimetype-2.3.0.tgz", + "integrity": "sha512-M4yMwr6mAnQz76TbJm914+gPpB/nCwvZbJU28cUD6dR004SAxDLOOSUaB1JDRqLtaOV/vi0IC5lEAGFgrjGv/g==", + "dev": true, + "optional": true, + "peer": true + }, + "node_modules/whatwg-url": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha1-lmRU6HZUYuN2RNNib2dCzotwll0=", + "dev": true, + "optional": true, + "peer": true, + "dependencies": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" + } + } + }, + "dependencies": { + "@babel/code-frame": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.16.7.tgz", + "integrity": "sha512-iAXqUn8IIeBTNd72xsFlgaXHkMBMt6y4HJp1tIaK465CWLT/fG1aqB7ykr95gHHmlBdGbFeWWfyB4NJJ0nmeIg==", + "dev": true, + "requires": { + "@babel/highlight": "^7.16.7" + } + }, + "@babel/helper-validator-identifier": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.16.7.tgz", + "integrity": "sha512-hsEnFemeiW4D08A5gUAZxLBTXpZ39P+a+DGDsHw1yxqyQ/jzFEnxf5uTEGp+3bzAbNOxU1paTgYS4ECU/IgfDw==", + "dev": true + }, + "@babel/highlight": { + "version": "7.16.10", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.16.10.tgz", + "integrity": "sha512-5FnTQLSLswEj6IkgVw5KusNUUFY9ZGqe/TRFnP/BKYHYgfh7tc+C7mwiy95/yNP7Dh9x580Vv8r7u7ZfTBFxdw==", + "dev": true, + "requires": { + "@babel/helper-validator-identifier": "^7.16.7", + "chalk": "^2.0.0", + "js-tokens": "^4.0.0" + }, + "dependencies": { + "ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dev": true, + "requires": { + "color-convert": "^1.9.0" + } + }, + "chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dev": true, + "requires": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + } + }, + "color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dev": true, + "requires": { + "color-name": "1.1.3" + } + }, + "color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=", + "dev": true + }, + "escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=", + "dev": true + }, + "has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=", + "dev": true + }, + "supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dev": true, + "requires": { + "has-flag": "^3.0.0" + } + } + } + }, + "@jest/types": { + "version": "27.4.2", + "resolved": "https://registry.npmjs.org/@jest/types/-/types-27.4.2.tgz", + "integrity": "sha512-j35yw0PMTPpZsUoOBiuHzr1zTYoad1cVIE0ajEjcrJONxxrko/IRGKkXx3os0Nsi4Hu3+5VmDbVfq5WhG/pWAg==", + "dev": true, + "requires": { + "@types/istanbul-lib-coverage": "^2.0.0", + "@types/istanbul-reports": "^3.0.0", + "@types/node": "*", + "@types/yargs": "^16.0.0", + "chalk": "^4.0.0" + } + }, + "@types/chai": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/@types/chai/-/chai-4.3.0.tgz", + "integrity": "sha512-/ceqdqeRraGolFTcfoXNiqjyQhZzbINDngeoAq9GoHa8PPK1yNzTaxWjA6BFWp5Ua9JpXEMSS4s5i9tS0hOJtw==", + "dev": true + }, + "@types/chai-subset": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/@types/chai-subset/-/chai-subset-1.3.3.tgz", + "integrity": "sha512-frBecisrNGz+F4T6bcc+NLeolfiojh5FxW2klu669+8BARtyQv2C/GkNW6FUodVe4BroGMP/wER/YDGc7rEllw==", + "dev": true, + "requires": { + "@types/chai": "*" + } + }, + "@types/concat-stream": { + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/@types/concat-stream/-/concat-stream-1.6.1.tgz", + "integrity": "sha512-eHE4cQPoj6ngxBZMvVf6Hw7Mh4jMW4U9lpGmS5GBPB9RYxlFg+CHaVN7ErNY4W9XfLIEn20b4VDYaIrbq0q4uA==", + "dev": true, + "optional": true, + "peer": true, + "requires": { + "@types/node": "*" + } + }, + "@types/expect": { + "version": "24.3.0", + "resolved": "https://registry.npmjs.org/@types/expect/-/expect-24.3.0.tgz", + "integrity": "sha512-aq5Z+YFBz5o2b6Sp1jigx5nsmoZMK5Ceurjwy6PZmRv7dEi1jLtkARfvB1ME+OXJUG+7TZUDcv3WoCr/aor6dQ==", + "dev": true, + "requires": { + "expect": "*" + } + }, + "@types/form-data": { + "version": "0.0.33", + "resolved": "https://registry.npmjs.org/@types/form-data/-/form-data-0.0.33.tgz", + "integrity": "sha1-yayFsqX9GENbjIXZ7LUObWyJP/g=", + "dev": true, + "optional": true, + "peer": true, + "requires": { + "@types/node": "*" + } + }, + "@types/istanbul-lib-coverage": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.4.tgz", + "integrity": "sha512-z/QT1XN4K4KYuslS23k62yDIDLwLFkzxOuMplDtObz0+y7VqJCaO2o+SPwHCvLFZh7xazvvoor2tA/hPz9ee7g==", + "dev": true + }, + "@types/istanbul-lib-report": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz", + "integrity": "sha512-plGgXAPfVKFoYfa9NpYDAkseG+g6Jr294RqeqcqDixSbU34MZVJRi/P+7Y8GDpzkEwLaGZZOpKIEmeVZNtKsrg==", + "dev": true, + "requires": { + "@types/istanbul-lib-coverage": "*" + } + }, + "@types/istanbul-reports": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@types/istanbul-reports/-/istanbul-reports-3.0.1.tgz", + "integrity": "sha512-c3mAZEuK0lvBp8tmuL74XRKn1+y2dcwOUpH7x4WrF6gk1GIgiluDRgMYQtw2OFcBvAJWlt6ASU3tSqxp0Uu0Aw==", + "dev": true, + "requires": { + "@types/istanbul-lib-report": "*" + } + }, + "@types/mocha": { + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/@types/mocha/-/mocha-9.1.0.tgz", + "integrity": "sha512-QCWHkbMv4Y5U9oW10Uxbr45qMMSzl4OzijsozynUAgx3kEHUdXB00udx2dWDQ7f2TU2a2uuiFaRZjCe3unPpeg==", + "dev": true + }, + "@types/node": { + "version": "17.0.14", + "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.14.tgz", + "integrity": "sha512-SbjLmERksKOGzWzPNuW7fJM7fk3YXVTFiZWB/Hs99gwhk+/dnrQRPBQjPW9aO+fi1tAffi9PrwFvsmOKmDTyng==", + "dev": true + }, + "@types/qs": { + "version": "6.9.7", + "resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.9.7.tgz", + "integrity": "sha512-FGa1F62FT09qcrueBA6qYTrJPVDzah9a+493+o2PCXsesWHIn27G98TsSMs3WPNbZIEj4+VJf6saSFpvD+3Zsw==", + "dev": true, + "optional": true, + "peer": true + }, + "@types/stack-utils": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@types/stack-utils/-/stack-utils-2.0.1.tgz", + "integrity": "sha512-Hl219/BT5fLAaz6NDkSuhzasy49dwQS/DSdu4MdggFB8zcXv7vflBI3xp7FEmkmdDkBUI2bPUNeMttp2knYdxw==", + "dev": true + }, + "@types/yargs": { + "version": "16.0.4", + "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-16.0.4.tgz", + "integrity": "sha512-T8Yc9wt/5LbJyCaLiHPReJa0kApcIgJ7Bn735GjItUfh08Z1pJvu8QZqb9s+mMvKV6WUQRV7K2R46YbjMXTTJw==", + "dev": true, + "requires": { + "@types/yargs-parser": "*" + } + }, + "@types/yargs-parser": { + "version": "20.2.1", + "resolved": "https://registry.npmjs.org/@types/yargs-parser/-/yargs-parser-20.2.1.tgz", + "integrity": "sha512-7tFImggNeNBVMsn0vLrpn1H1uPrUBdnARPTpZoitY37ZrdJREzf7I16tMrlK3hen349gr1NYh8CmZQa7CTG6Aw==", + "dev": true + }, + "ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true + }, + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "asap": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz", + "integrity": "sha1-5QNHYR1+aQlDIIu9r+vLwvuGbUY=", + "dev": true, + "optional": true, + "peer": true + }, + "assertion-error": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz", + "integrity": "sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw==", + "dev": true + }, + "asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=", + "dev": true, + "optional": true, + "peer": true + }, + "axios": { + "version": "0.25.0", + "resolved": "https://registry.npmjs.org/axios/-/axios-0.25.0.tgz", + "integrity": "sha512-cD8FOb0tRH3uuEe6+evtAbgJtfxr7ly3fQjYcMcuPlgkwVS9xboaVIpcDV+cYQe+yGykgwZCs1pzjntcGa6l5g==", + "requires": { + "follow-redirects": "^1.14.7" + } + }, + "braces": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", + "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "dev": true, + "requires": { + "fill-range": "^7.0.1" + } + }, + "buffer-from": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz", + "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==", + "dev": true, + "optional": true, + "peer": true + }, + "call-bind": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz", + "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==", + "dev": true, + "optional": true, + "peer": true, + "requires": { + "function-bind": "^1.1.1", + "get-intrinsic": "^1.0.2" + } + }, + "caseless": { + "version": "0.12.0", + "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", + "integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw=", + "dev": true, + "optional": true, + "peer": true + }, + "chai": { + "version": "4.3.6", + "resolved": "https://registry.npmjs.org/chai/-/chai-4.3.6.tgz", + "integrity": "sha512-bbcp3YfHCUzMOvKqsztczerVgBKSsEijCySNlHHbX3VG1nskvqjz5Rfso1gGwD6w6oOV3eI60pKuMOV5MV7p3Q==", + "dev": true, + "requires": { + "assertion-error": "^1.1.0", + "check-error": "^1.0.2", + "deep-eql": "^3.0.1", + "get-func-name": "^2.0.0", + "loupe": "^2.3.1", + "pathval": "^1.1.1", + "type-detect": "^4.0.5" + } + }, + "chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "requires": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "dependencies": { + "supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + } + } + }, + "check-error": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/check-error/-/check-error-1.0.2.tgz", + "integrity": "sha1-V00xLt2Iu13YkS6Sht1sCu1KrII=", + "dev": true + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "dev": true, + "optional": true, + "peer": true, + "requires": { + "delayed-stream": "~1.0.0" + } + }, + "concat-stream": { + "version": "1.6.2", + "resolved": "https://registry.npmjs.org/concat-stream/-/concat-stream-1.6.2.tgz", + "integrity": "sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw==", + "dev": true, + "optional": true, + "peer": true, + "requires": { + "buffer-from": "^1.0.0", + "inherits": "^2.0.3", + "readable-stream": "^2.2.2", + "typedarray": "^0.0.6" + } + }, + "core-util-is": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", + "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==", + "dev": true, + "optional": true, + "peer": true + }, + "deep-eql": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-3.0.1.tgz", + "integrity": "sha512-+QeIQyN5ZuO+3Uk5DYh6/1eKO0m0YmJFGNmFHGACpf1ClL1nmlV/p4gNgbl2pJGxgXb4faqo6UE+M5ACEMyVcw==", + "dev": true, + "requires": { + "type-detect": "^4.0.0" + } + }, + "delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=", + "dev": true, + "optional": true, + "peer": true + }, + "diff-sequences": { + "version": "27.4.0", + "resolved": "https://registry.npmjs.org/diff-sequences/-/diff-sequences-27.4.0.tgz", + "integrity": "sha512-YqiQzkrsmHMH5uuh8OdQFU9/ZpADnwzml8z0O5HvRNda+5UZsaX/xN+AAxfR2hWq1Y7HZnAzO9J5lJXOuDz2Ww==", + "dev": true + }, + "esbuild": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.13.15.tgz", + "integrity": "sha512-raCxt02HBKv8RJxE8vkTSCXGIyKHdEdGfUmiYb8wnabnaEmHzyW7DCHb5tEN0xU8ryqg5xw54mcwnYkC4x3AIw==", + "dev": true, + "requires": { + "esbuild-android-arm64": "0.13.15", + "esbuild-darwin-64": "0.13.15", + "esbuild-darwin-arm64": "0.13.15", + "esbuild-freebsd-64": "0.13.15", + "esbuild-freebsd-arm64": "0.13.15", + "esbuild-linux-32": "0.13.15", + "esbuild-linux-64": "0.13.15", + "esbuild-linux-arm": "0.13.15", + "esbuild-linux-arm64": "0.13.15", + "esbuild-linux-mips64le": "0.13.15", + "esbuild-linux-ppc64le": "0.13.15", + "esbuild-netbsd-64": "0.13.15", + "esbuild-openbsd-64": "0.13.15", + "esbuild-sunos-64": "0.13.15", + "esbuild-windows-32": "0.13.15", + "esbuild-windows-64": "0.13.15", + "esbuild-windows-arm64": "0.13.15" + } + }, + "esbuild-android-arm64": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-android-arm64/-/esbuild-android-arm64-0.13.15.tgz", + "integrity": "sha512-m602nft/XXeO8YQPUDVoHfjyRVPdPgjyyXOxZ44MK/agewFFkPa8tUo6lAzSWh5Ui5PB4KR9UIFTSBKh/RrCmg==", + "dev": true, + "optional": true + }, + "esbuild-darwin-64": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-darwin-64/-/esbuild-darwin-64-0.13.15.tgz", + "integrity": "sha512-ihOQRGs2yyp7t5bArCwnvn2Atr6X4axqPpEdCFPVp7iUj4cVSdisgvEKdNR7yH3JDjW6aQDw40iQFoTqejqxvQ==", + "dev": true, + "optional": true + }, + "esbuild-darwin-arm64": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-darwin-arm64/-/esbuild-darwin-arm64-0.13.15.tgz", + "integrity": "sha512-i1FZssTVxUqNlJ6cBTj5YQj4imWy3m49RZRnHhLpefFIh0To05ow9DTrXROTE1urGTQCloFUXTX8QfGJy1P8dQ==", + "dev": true, + "optional": true + }, + "esbuild-freebsd-64": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-freebsd-64/-/esbuild-freebsd-64-0.13.15.tgz", + "integrity": "sha512-G3dLBXUI6lC6Z09/x+WtXBXbOYQZ0E8TDBqvn7aMaOCzryJs8LyVXKY4CPnHFXZAbSwkCbqiPuSQ1+HhrNk7EA==", + "dev": true, + "optional": true + }, + "esbuild-freebsd-arm64": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-freebsd-arm64/-/esbuild-freebsd-arm64-0.13.15.tgz", + "integrity": "sha512-KJx0fzEDf1uhNOZQStV4ujg30WlnwqUASaGSFPhznLM/bbheu9HhqZ6mJJZM32lkyfGJikw0jg7v3S0oAvtvQQ==", + "dev": true, + "optional": true + }, + "esbuild-linux-32": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-linux-32/-/esbuild-linux-32-0.13.15.tgz", + "integrity": "sha512-ZvTBPk0YWCLMCXiFmD5EUtB30zIPvC5Itxz0mdTu/xZBbbHJftQgLWY49wEPSn2T/TxahYCRDWun5smRa0Tu+g==", + "dev": true, + "optional": true + }, + "esbuild-linux-64": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-linux-64/-/esbuild-linux-64-0.13.15.tgz", + "integrity": "sha512-eCKzkNSLywNeQTRBxJRQ0jxRCl2YWdMB3+PkWFo2BBQYC5mISLIVIjThNtn6HUNqua1pnvgP5xX0nHbZbPj5oA==", + "dev": true, + "optional": true + }, + "esbuild-linux-arm": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-linux-arm/-/esbuild-linux-arm-0.13.15.tgz", + "integrity": "sha512-wUHttDi/ol0tD8ZgUMDH8Ef7IbDX+/UsWJOXaAyTdkT7Yy9ZBqPg8bgB/Dn3CZ9SBpNieozrPRHm0BGww7W/jA==", + "dev": true, + "optional": true + }, + "esbuild-linux-arm64": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-linux-arm64/-/esbuild-linux-arm64-0.13.15.tgz", + "integrity": "sha512-bYpuUlN6qYU9slzr/ltyLTR9YTBS7qUDymO8SV7kjeNext61OdmqFAzuVZom+OLW1HPHseBfJ/JfdSlx8oTUoA==", + "dev": true, + "optional": true + }, + "esbuild-linux-mips64le": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-linux-mips64le/-/esbuild-linux-mips64le-0.13.15.tgz", + "integrity": "sha512-KlVjIG828uFPyJkO/8gKwy9RbXhCEUeFsCGOJBepUlpa7G8/SeZgncUEz/tOOUJTcWMTmFMtdd3GElGyAtbSWg==", + "dev": true, + "optional": true + }, + "esbuild-linux-ppc64le": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-linux-ppc64le/-/esbuild-linux-ppc64le-0.13.15.tgz", + "integrity": "sha512-h6gYF+OsaqEuBjeesTBtUPw0bmiDu7eAeuc2OEH9S6mV9/jPhPdhOWzdeshb0BskRZxPhxPOjqZ+/OqLcxQwEQ==", + "dev": true, + "optional": true + }, + "esbuild-netbsd-64": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-netbsd-64/-/esbuild-netbsd-64-0.13.15.tgz", + "integrity": "sha512-3+yE9emwoevLMyvu+iR3rsa+Xwhie7ZEHMGDQ6dkqP/ndFzRHkobHUKTe+NCApSqG5ce2z4rFu+NX/UHnxlh3w==", + "dev": true, + "optional": true + }, + "esbuild-openbsd-64": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-openbsd-64/-/esbuild-openbsd-64-0.13.15.tgz", + "integrity": "sha512-wTfvtwYJYAFL1fSs8yHIdf5GEE4NkbtbXtjLWjM3Cw8mmQKqsg8kTiqJ9NJQe5NX/5Qlo7Xd9r1yKMMkHllp5g==", + "dev": true, + "optional": true + }, + "esbuild-sunos-64": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-sunos-64/-/esbuild-sunos-64-0.13.15.tgz", + "integrity": "sha512-lbivT9Bx3t1iWWrSnGyBP9ODriEvWDRiweAs69vI+miJoeKwHWOComSRukttbuzjZ8r1q0mQJ8Z7yUsDJ3hKdw==", + "dev": true, + "optional": true + }, + "esbuild-windows-32": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-windows-32/-/esbuild-windows-32-0.13.15.tgz", + "integrity": "sha512-fDMEf2g3SsJ599MBr50cY5ve5lP1wyVwTe6aLJsM01KtxyKkB4UT+fc5MXQFn3RLrAIAZOG+tHC+yXObpSn7Nw==", + "dev": true, + "optional": true + }, + "esbuild-windows-64": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-windows-64/-/esbuild-windows-64-0.13.15.tgz", + "integrity": "sha512-9aMsPRGDWCd3bGjUIKG/ZOJPKsiztlxl/Q3C1XDswO6eNX/Jtwu4M+jb6YDH9hRSUflQWX0XKAfWzgy5Wk54JQ==", + "dev": true, + "optional": true + }, + "esbuild-windows-arm64": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-windows-arm64/-/esbuild-windows-arm64-0.13.15.tgz", + "integrity": "sha512-zzvyCVVpbwQQATaf3IG8mu1IwGEiDxKkYUdA4FpoCHi1KtPa13jeScYDjlW0Qh+ebWzpKfR2ZwvqAQkSWNcKjA==", + "dev": true, + "optional": true + }, + "expect": { + "version": "27.4.6", + "resolved": "https://registry.npmjs.org/expect/-/expect-27.4.6.tgz", + "integrity": "sha512-1M/0kAALIaj5LaG66sFJTbRsWTADnylly82cu4bspI0nl+pgP4E6Bh/aqdHlTUjul06K7xQnnrAoqfxVU0+/ag==", + "dev": true, + "requires": { + "@jest/types": "^27.4.2", + "jest-get-type": "^27.4.0", + "jest-matcher-utils": "^27.4.6", + "jest-message-util": "^27.4.6" + } + }, + "fill-range": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", + "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "dev": true, + "requires": { + "to-regex-range": "^5.0.1" + } + }, + "follow-redirects": { + "version": "1.14.7", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.14.7.tgz", + "integrity": "sha512-+hbxoLbFMbRKDwohX8GkTataGqO6Jb7jGwpAlwgy2bIz25XtRm7KEzJM76R1WiNT5SwZkX4Y75SwBolkpmE7iQ==" + }, + "form-data": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.1.tgz", + "integrity": "sha512-m21N3WOmEEURgk6B9GLOE4RuWOFf28Lhh9qGYeNlGq4VDXUlJy2th2slBNU8Gp8EzloYZOibZJ7t5ecIrFSjVA==", + "dev": true, + "optional": true, + "peer": true, + "requires": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.6", + "mime-types": "^2.1.12" + } + }, + "fsevents": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", + "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", + "dev": true, + "optional": true + }, + "function-bind": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", + "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==", + "dev": true + }, + "get-func-name": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.0.tgz", + "integrity": "sha1-6td0q+5y4gQJQzoGY2YCPdaIekE=", + "dev": true + }, + "get-intrinsic": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.1.tgz", + "integrity": "sha512-kWZrnVM42QCiEA2Ig1bG8zjoIMOgxWwYCEeNdwY6Tv/cOSeGpcoX4pXHfKUxNKVoArnrEr2e9srnAxxGIraS9Q==", + "dev": true, + "optional": true, + "peer": true, + "requires": { + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.1" + } + }, + "get-port": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/get-port/-/get-port-3.2.0.tgz", + "integrity": "sha1-3Xzn3hh8Bsi/NTeWrHHgmfCYDrw=", + "dev": true, + "optional": true, + "peer": true + }, + "graceful-fs": { + "version": "4.2.9", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.9.tgz", + "integrity": "sha512-NtNxqUcXgpW2iMrfqSfR73Glt39K+BLwWsPs94yR63v45T0Wbej7eRmL5cWfwEgqXnmjQp3zaJTshdRW/qC2ZQ==", + "dev": true + }, + "happy-dom": { + "version": "2.31.1", + "resolved": "https://registry.npmjs.org/happy-dom/-/happy-dom-2.31.1.tgz", + "integrity": "sha512-hbTLxMqyluLT06nRN4TDGLjjKni73tZlvLdF6qGfdv5U4EnrSYSwcZK3ESmv0LEEa5St7NY7e62rhISotH8O3Q==", + "dev": true, + "optional": true, + "peer": true, + "requires": { + "he": "^1.1.1", + "node-fetch": "^2.6.1", + "sync-request": "^6.1.0", + "webidl-conversions": "^7.0.0", + "whatwg-encoding": "^1.0.5", + "whatwg-mimetype": "^2.3.0" + }, + "dependencies": { + "webidl-conversions": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-7.0.0.tgz", + "integrity": "sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==", + "dev": true, + "optional": true, + "peer": true + } + } + }, + "has": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", + "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "dev": true, + "requires": { + "function-bind": "^1.1.1" + } + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "has-symbols": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.2.tgz", + "integrity": "sha512-chXa79rL/UC2KlX17jo3vRGz0azaWEx5tGqZg5pO3NUyEJVB17dMruQlzCCOfUvElghKcm5194+BCRvi2Rv/Gw==", + "dev": true, + "optional": true, + "peer": true + }, + "he": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz", + "integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==", + "dev": true, + "optional": true, + "peer": true + }, + "http-basic": { + "version": "8.1.3", + "resolved": "https://registry.npmjs.org/http-basic/-/http-basic-8.1.3.tgz", + "integrity": "sha512-/EcDMwJZh3mABI2NhGfHOGOeOZITqfkEO4p/xK+l3NpyncIHUQBoMvCSF/b5GqvKtySC2srL/GGG3+EtlqlmCw==", + "dev": true, + "optional": true, + "peer": true, + "requires": { + "caseless": "^0.12.0", + "concat-stream": "^1.6.2", + "http-response-object": "^3.0.1", + "parse-cache-control": "^1.0.1" + } + }, + "http-response-object": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/http-response-object/-/http-response-object-3.0.2.tgz", + "integrity": "sha512-bqX0XTF6fnXSQcEJ2Iuyr75yVakyjIDCqroJQ/aHfSdlM743Cwqoi2nDYMzLGWUcuTWGWy8AAvOKXTfiv6q9RA==", + "dev": true, + "optional": true, + "peer": true, + "requires": { + "@types/node": "^10.0.3" + }, + "dependencies": { + "@types/node": { + "version": "10.17.60", + "resolved": "https://registry.npmjs.org/@types/node/-/node-10.17.60.tgz", + "integrity": "sha512-F0KIgDJfy2nA3zMLmWGKxcH2ZVEtCZXHHdOQs2gSaQ27+lNeEfGxzkIw90aXswATX7AZ33tahPbzy6KAfUreVw==", + "dev": true, + "optional": true, + "peer": true + } + } + }, + "iconv-lite": { + "version": "0.4.24", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", + "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", + "dev": true, + "optional": true, + "peer": true, + "requires": { + "safer-buffer": ">= 2.1.2 < 3" + } + }, + "inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true, + "optional": true, + "peer": true + }, + "is-core-module": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.8.1.tgz", + "integrity": "sha512-SdNCUs284hr40hFTFP6l0IfZ/RSrMXF3qgoRHd3/79unUTvrFO/JoXwkGm+5J/Oe3E/b5GsnG330uUNgRpu1PA==", + "dev": true, + "requires": { + "has": "^1.0.3" + } + }, + "is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true + }, + "isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=", + "dev": true, + "optional": true, + "peer": true + }, + "jest-diff": { + "version": "27.4.6", + "resolved": "https://registry.npmjs.org/jest-diff/-/jest-diff-27.4.6.tgz", + "integrity": "sha512-zjaB0sh0Lb13VyPsd92V7HkqF6yKRH9vm33rwBt7rPYrpQvS1nCvlIy2pICbKta+ZjWngYLNn4cCK4nyZkjS/w==", + "dev": true, + "requires": { + "chalk": "^4.0.0", + "diff-sequences": "^27.4.0", + "jest-get-type": "^27.4.0", + "pretty-format": "^27.4.6" + } + }, + "jest-get-type": { + "version": "27.4.0", + "resolved": "https://registry.npmjs.org/jest-get-type/-/jest-get-type-27.4.0.tgz", + "integrity": "sha512-tk9o+ld5TWq41DkK14L4wox4s2D9MtTpKaAVzXfr5CUKm5ZK2ExcaFE0qls2W71zE/6R2TxxrK9w2r6svAFDBQ==", + "dev": true + }, + "jest-matcher-utils": { + "version": "27.4.6", + "resolved": "https://registry.npmjs.org/jest-matcher-utils/-/jest-matcher-utils-27.4.6.tgz", + "integrity": "sha512-XD4PKT3Wn1LQnRAq7ZsTI0VRuEc9OrCPFiO1XL7bftTGmfNF0DcEwMHRgqiu7NGf8ZoZDREpGrCniDkjt79WbA==", + "dev": true, + "requires": { + "chalk": "^4.0.0", + "jest-diff": "^27.4.6", + "jest-get-type": "^27.4.0", + "pretty-format": "^27.4.6" + } + }, + "jest-message-util": { + "version": "27.4.6", + "resolved": "https://registry.npmjs.org/jest-message-util/-/jest-message-util-27.4.6.tgz", + "integrity": "sha512-0p5szriFU0U74czRSFjH6RyS7UYIAkn/ntwMuOwTGWrQIOh5NzXXrq72LOqIkJKKvFbPq+byZKuBz78fjBERBA==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.12.13", + "@jest/types": "^27.4.2", + "@types/stack-utils": "^2.0.0", + "chalk": "^4.0.0", + "graceful-fs": "^4.2.4", + "micromatch": "^4.0.4", + "pretty-format": "^27.4.6", + "slash": "^3.0.0", + "stack-utils": "^2.0.3" + } + }, + "js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "dev": true + }, + "local-pkg": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/local-pkg/-/local-pkg-0.4.1.tgz", + "integrity": "sha512-lL87ytIGP2FU5PWwNDo0w3WhIo2gopIAxPg9RxDYF7m4rr5ahuZxP22xnJHIvaLTe4Z9P6uKKY2UHiwyB4pcrw==", + "dev": true + }, + "loupe": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/loupe/-/loupe-2.3.2.tgz", + "integrity": "sha512-QgVamnvj0jX1LMPlCAq0MK6hATORFtGqHoUKXTkwNe13BqlN6aePQCKnnTcFvdDYEEITcJ+gBl4mTW7YJtJbyQ==", + "dev": true, + "requires": { + "get-func-name": "^2.0.0" + } + }, + "micromatch": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.4.tgz", + "integrity": "sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==", + "dev": true, + "requires": { + "braces": "^3.0.1", + "picomatch": "^2.2.3" + } + }, + "mime-db": { + "version": "1.51.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.51.0.tgz", + "integrity": "sha512-5y8A56jg7XVQx2mbv1lu49NR4dokRnhZYTtL+KGfaa27uq4pSTXkwQkFJl4pkRMyNFz/EtYDSkiiEHx3F7UN6g==", + "dev": true, + "optional": true, + "peer": true + }, + "mime-types": { + "version": "2.1.34", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.34.tgz", + "integrity": "sha512-6cP692WwGIs9XXdOO4++N+7qjqv0rqxxVvJ3VHPh/Sc9mVZcQP+ZGhkKiTvWMQRr2tbHkJP/Yn7Y0npb3ZBs4A==", + "dev": true, + "optional": true, + "peer": true, + "requires": { + "mime-db": "1.51.0" + } + }, + "nanoid": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.2.0.tgz", + "integrity": "sha512-fmsZYa9lpn69Ad5eDn7FMcnnSR+8R34W9qJEijxYhTbfOWzr22n1QxCMzXLK+ODyW2973V3Fux959iQoUxzUIA==", + "dev": true + }, + "node-fetch": { + "version": "2.6.7", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz", + "integrity": "sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==", + "dev": true, + "optional": true, + "peer": true, + "requires": { + "whatwg-url": "^5.0.0" + } + }, + "object-inspect": { + "version": "1.12.0", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.0.tgz", + "integrity": "sha512-Ho2z80bVIvJloH+YzRmpZVQe87+qASmBUKZDWgx9cu+KDrX2ZDH/3tMy+gXbZETVGs2M8YdxObOh7XAtim9Y0g==", + "dev": true, + "optional": true, + "peer": true + }, + "parse-cache-control": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/parse-cache-control/-/parse-cache-control-1.0.1.tgz", + "integrity": "sha1-juqz5U+laSD+Fro493+iGqzC104=", + "dev": true, + "optional": true, + "peer": true + }, + "path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "dev": true + }, + "pathval": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/pathval/-/pathval-1.1.1.tgz", + "integrity": "sha512-Dp6zGqpTdETdR63lehJYPeIOqpiNBNtc7BpWSLrOje7UaIsE5aY92r/AunQA7rsXvet3lrJ3JnZX29UPTKXyKQ==", + "dev": true + }, + "picocolors": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz", + "integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==", + "dev": true + }, + "picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true + }, + "postcss": { + "version": "8.4.6", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.6.tgz", + "integrity": "sha512-OovjwIzs9Te46vlEx7+uXB0PLijpwjXGKXjVGGPIGubGpq7uh5Xgf6D6FiJ/SzJMBosHDp6a2hiXOS97iBXcaA==", + "dev": true, + "requires": { + "nanoid": "^3.2.0", + "picocolors": "^1.0.0", + "source-map-js": "^1.0.2" + } + }, + "pretty-format": { + "version": "27.4.6", + "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-27.4.6.tgz", + "integrity": "sha512-NblstegA1y/RJW2VyML+3LlpFjzx62cUrtBIKIWDXEDkjNeleA7Od7nrzcs/VLQvAeV4CgSYhrN39DRN88Qi/g==", + "dev": true, + "requires": { + "ansi-regex": "^5.0.1", + "ansi-styles": "^5.0.0", + "react-is": "^17.0.1" + }, + "dependencies": { + "ansi-styles": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", + "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", + "dev": true + } + } + }, + "process-nextick-args": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", + "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==", + "dev": true, + "optional": true, + "peer": true + }, + "promise": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/promise/-/promise-8.1.0.tgz", + "integrity": "sha512-W04AqnILOL/sPRXziNicCjSNRruLAuIHEOVBazepu0545DDNGYHz7ar9ZgZ1fMU8/MA4mVxp5rkBWRi6OXIy3Q==", + "dev": true, + "optional": true, + "peer": true, + "requires": { + "asap": "~2.0.6" + } + }, + "qs": { + "version": "6.10.3", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.10.3.tgz", + "integrity": "sha512-wr7M2E0OFRfIfJZjKGieI8lBKb7fRCH4Fv5KNPEs7gJ8jadvotdsS08PzOKR7opXhZ/Xkjtt3WF9g38drmyRqQ==", + "dev": true, + "optional": true, + "peer": true, + "requires": { + "side-channel": "^1.0.4" + } + }, + "react-is": { + "version": "17.0.2", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-17.0.2.tgz", + "integrity": "sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w==", + "dev": true + }, + "readable-stream": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "dev": true, + "optional": true, + "peer": true, + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + }, + "dependencies": { + "safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "dev": true, + "optional": true, + "peer": true + } + } + }, + "resolve": { + "version": "1.22.0", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.0.tgz", + "integrity": "sha512-Hhtrw0nLeSrFQ7phPp4OOcVjLPIeMnRlr5mcnVuMe7M/7eBn98A3hmFRLoFo3DLZkivSYwhRUJTyPyWAk56WLw==", + "dev": true, + "requires": { + "is-core-module": "^2.8.1", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + } + }, + "rollup": { + "version": "2.67.0", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-2.67.0.tgz", + "integrity": "sha512-W83AaERwvDiHwHEF/dfAfS3z1Be5wf7n+pO3ZAO5IQadCT2lBTr7WQ2MwZZe+nodbD+n3HtC4OCOAdsOPPcKZQ==", + "dev": true, + "requires": { + "fsevents": "~2.3.2" + } + }, + "safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", + "dev": true, + "optional": true, + "peer": true + }, + "side-channel": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", + "integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==", + "dev": true, + "optional": true, + "peer": true, + "requires": { + "call-bind": "^1.0.0", + "get-intrinsic": "^1.0.2", + "object-inspect": "^1.9.0" + } + }, + "slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true + }, + "source-map-js": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.0.2.tgz", + "integrity": "sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==", + "dev": true + }, + "stack-utils": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/stack-utils/-/stack-utils-2.0.5.tgz", + "integrity": "sha512-xrQcmYhOsn/1kX+Vraq+7j4oE2j/6BFscZ0etmYg81xuM8Gq0022Pxb8+IqgOFUIaxHs0KaSb7T1+OegiNrNFA==", + "dev": true, + "requires": { + "escape-string-regexp": "^2.0.0" + }, + "dependencies": { + "escape-string-regexp": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz", + "integrity": "sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==", + "dev": true + } + } + }, + "string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dev": true, + "optional": true, + "peer": true, + "requires": { + "safe-buffer": "~5.1.0" + }, + "dependencies": { + "safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "dev": true, + "optional": true, + "peer": true + } + } + }, + "supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "dev": true + }, + "sync-request": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/sync-request/-/sync-request-6.1.0.tgz", + "integrity": "sha512-8fjNkrNlNCrVc/av+Jn+xxqfCjYaBoHqCsDz6mt030UMxJGr+GSfCV1dQt2gRtlL63+VPidwDVLr7V2OcTSdRw==", + "dev": true, + "optional": true, + "peer": true, + "requires": { + "http-response-object": "^3.0.1", + "sync-rpc": "^1.2.1", + "then-request": "^6.0.0" + } + }, + "sync-rpc": { + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/sync-rpc/-/sync-rpc-1.3.6.tgz", + "integrity": "sha512-J8jTXuZzRlvU7HemDgHi3pGnh/rkoqR/OZSjhTyyZrEkkYQbk7Z33AXp37mkPfPpfdOuj7Ex3H/TJM1z48uPQw==", + "dev": true, + "optional": true, + "peer": true, + "requires": { + "get-port": "^3.1.0" + } + }, + "then-request": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/then-request/-/then-request-6.0.2.tgz", + "integrity": "sha512-3ZBiG7JvP3wbDzA9iNY5zJQcHL4jn/0BWtXIkagfz7QgOL/LqjCEOBQuJNZfu0XYnv5JhKh+cDxCPM4ILrqruA==", + "dev": true, + "optional": true, + "peer": true, + "requires": { + "@types/concat-stream": "^1.6.0", + "@types/form-data": "0.0.33", + "@types/node": "^8.0.0", + "@types/qs": "^6.2.31", + "caseless": "~0.12.0", + "concat-stream": "^1.6.0", + "form-data": "^2.2.0", + "http-basic": "^8.1.1", + "http-response-object": "^3.0.1", + "promise": "^8.0.0", + "qs": "^6.4.0" + }, + "dependencies": { + "@types/node": { + "version": "8.10.66", + "resolved": "https://registry.npmjs.org/@types/node/-/node-8.10.66.tgz", + "integrity": "sha512-tktOkFUA4kXx2hhhrB8bIFb5TbwzS4uOhKEmwiD+NoiL0qtP2OQ9mFldbgD4dV1djrlBYP6eBuQZiWjuHUpqFw==", + "dev": true, + "optional": true, + "peer": true + } + } + }, + "tinypool": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/tinypool/-/tinypool-0.1.1.tgz", + "integrity": "sha512-sW2fQZ2BRb/GX5v55NkHiTrbMLx0eX0xNpP+VGhOe2f7Oo04+LeClDyM19zCE/WCy7jJ8kzIJ0Ojrxj3UhN9Sg==", + "dev": true + }, + "tinyspy": { + "version": "0.2.10", + "resolved": "https://registry.npmjs.org/tinyspy/-/tinyspy-0.2.10.tgz", + "integrity": "sha512-Qij6rGWCDjWIejxCXXVi6bNgvrYBp3PbqC4cBP/0fD6WHDOHCw09Zd13CsxrDqSR5PFq01WeqDws8t5lz5sH0A==", + "dev": true + }, + "to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "requires": { + "is-number": "^7.0.0" + } + }, + "tr46": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha1-gYT9NH2snNwYWZLzpmIuFLnZq2o=", + "dev": true, + "optional": true, + "peer": true + }, + "type-detect": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", + "integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==", + "dev": true + }, + "typedarray": { + "version": "0.0.6", + "resolved": "https://registry.npmjs.org/typedarray/-/typedarray-0.0.6.tgz", + "integrity": "sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c=", + "dev": true, + "optional": true, + "peer": true + }, + "typescript": { + "version": "4.5.5", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.5.5.tgz", + "integrity": "sha512-TCTIul70LyWe6IJWT8QSYeA54WQe8EjQFU4wY52Fasj5UKx88LNYKCgBEHcOMOrFF1rKGbD8v/xcNWVUq9SymA==", + "dev": true + }, + "util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=", + "dev": true, + "optional": true, + "peer": true + }, + "vite": { + "version": "2.7.13", + "resolved": "https://registry.npmjs.org/vite/-/vite-2.7.13.tgz", + "integrity": "sha512-Mq8et7f3aK0SgSxjDNfOAimZGW9XryfHRa/uV0jseQSilg+KhYDSoNb9h1rknOy6SuMkvNDLKCYAYYUMCE+IgQ==", + "dev": true, + "requires": { + "esbuild": "^0.13.12", + "fsevents": "~2.3.2", + "postcss": "^8.4.5", + "resolve": "^1.20.0", + "rollup": "^2.59.0" + } + }, + "vitest": { + "version": "0.2.5", + "resolved": "https://registry.npmjs.org/vitest/-/vitest-0.2.5.tgz", + "integrity": "sha512-QruEhsNxy8ycLxYG9rrGUfHZzJ8A6YvA9ULZ4w/ecvm0Zejm1nxUar/XkRWkL2xzrqA5AjmfqDSQZ8q2bFbA0Q==", + "dev": true, + "requires": { + "@types/chai": "^4.3.0", + "@types/chai-subset": "^1.3.3", + "chai": "^4.3.6", + "local-pkg": "^0.4.1", + "tinypool": "^0.1.1", + "tinyspy": "^0.2.10", + "vite": ">=2.7.13" + } + }, + "webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha1-JFNCdeKnvGvnvIZhHMFq4KVlSHE=", + "dev": true, + "optional": true, + "peer": true + }, + "whatwg-encoding": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/whatwg-encoding/-/whatwg-encoding-1.0.5.tgz", + "integrity": "sha512-b5lim54JOPN9HtzvK9HFXvBma/rnfFeqsic0hSpjtDbVxR3dJKLc+KB4V6GgiGOvl7CY/KNh8rxSo9DKQrnUEw==", + "dev": true, + "optional": true, + "peer": true, + "requires": { + "iconv-lite": "0.4.24" + } + }, + "whatwg-mimetype": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/whatwg-mimetype/-/whatwg-mimetype-2.3.0.tgz", + "integrity": "sha512-M4yMwr6mAnQz76TbJm914+gPpB/nCwvZbJU28cUD6dR004SAxDLOOSUaB1JDRqLtaOV/vi0IC5lEAGFgrjGv/g==", + "dev": true, + "optional": true, + "peer": true + }, + "whatwg-url": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha1-lmRU6HZUYuN2RNNib2dCzotwll0=", + "dev": true, + "optional": true, + "peer": true, + "requires": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" + } + } + } +} diff --git a/client/package.json b/client/package.json new file mode 100644 index 0000000..f0e428e --- /dev/null +++ b/client/package.json @@ -0,0 +1,17 @@ +{ + "devDependencies": { + "@types/expect": "^24.3.0", + "@types/mocha": "^9.1.0", + "@types/node": "^17.0.14", + "typescript": "^4.5.5", + "vitest": "^0.2.5" + }, + "scripts": { + "test:ci": "TEST_SHUTDOWN_API_SERVER=true vitest --run --config ./test/vitest.config.ts", + "test:local": "TEST_SHUTDOWN_API_SERVER=false && vitest --run --config ./test/vitest.config.ts", + "test:watch": " TEST_SHUTDOWN_API_SERVER=false vitest --config ./test/vitest.config.ts" + }, + "dependencies": { + "axios": "^0.25.0" + } +} \ No newline at end of file diff --git a/client/test/base/base.test.ts b/client/test/base/base.test.ts new file mode 100644 index 0000000..c7bf610 --- /dev/null +++ b/client/test/base/base.test.ts @@ -0,0 +1,26 @@ +import { getClientV1 } from "../../client"; +import { describe, it, expect } from "vitest"; +import * as config from "../config"; + +const client = getClientV1(config.BASE_URL); + +describe("GET /api/status", function () { + it("server is available", async function (done) { + try { + const res = await client.status(); + expect(res.status).toBe(200); + expect(res.statusText).toBe("OK"); + + expect(res.data.item).toEqual({ + health: true, + versions: ["v1"], + title: "Go API Template", + message: "Welcome to the Go API Template Application!", + }); + + done(); + } catch (err) { + done(err); + } + }); +}); diff --git a/client/test/config.ts b/client/test/config.ts new file mode 100644 index 0000000..d8db927 --- /dev/null +++ b/client/test/config.ts @@ -0,0 +1,4 @@ +export const PORT = "7745"; +export const HOST = "http://127.0.0.1"; +export const BASE_URL = HOST + ":" + PORT; + diff --git a/client/test/setup.ts b/client/test/setup.ts new file mode 100644 index 0000000..2315637 --- /dev/null +++ b/client/test/setup.ts @@ -0,0 +1,20 @@ +import { exec } from "child_process"; +import * as config from "./config"; + +export const setup = () => { + console.log("Starting Client Tests"); + console.log({ + PORT: config.PORT, + HOST: config.HOST, + BASE_URL: config.BASE_URL, + }); +}; + +export const teardown = () => { + if (process.env.TEST_SHUTDOWN_API_SERVER) { + const pc = exec("pkill -SIGTERM api"); // Kill background API process + pc.stdout.on("data", (data) => { + console.log(`stdout: ${data}`); + }); + } +}; diff --git a/client/test/v1/login.test.ts b/client/test/v1/login.test.ts new file mode 100644 index 0000000..3492aa8 --- /dev/null +++ b/client/test/v1/login.test.ts @@ -0,0 +1,75 @@ +import { getClientV1 } from "../../client"; +import { describe, it, expect } from "vitest"; +import * as config from "../config"; +import axios, { AxiosError } from "axios"; + +const client = getClientV1(config.BASE_URL); + +describe("POST /api/v1/login", function () { + it("user can login", async function (done) { + try { + const res = await client.login("admin@admin.com", "admin"); + expect(res.status).toBe(200); + expect(res.statusText).toBe("OK"); + + expect(res.data.expiresAt).exist; + expect(res.data.token).exist; + + done(); + } catch (err) { + done(err); + } + }); +}); + +describe("POST /api/v1/users/logout", function () { + it("user can logout", async function (done) { + try { + const myclient = getClientV1(config.BASE_URL); + + const res = await myclient.login("admin@admin.com", "admin"); + expect(res.status).toBe(200); + expect(res.statusText).toBe("OK"); + + const res2 = await myclient.logout(); + expect(res2.status).toBe(204); + expect(res2.statusText).toBe("No Content"); + + // Try to get self again + try { + const res3 = await myclient.self(); + expect(res3.status).toBe(401); + expect(res3.statusText).toBe("Unauthorized"); + } catch (e) { + if (axios.isAxiosError(e)) { + expect(e.response.status).toBe(401); + done(); + } else { + done(e); + } + } + + done(); + } catch (err) { + done(err); + } + }); +}); + +describe("GET /api/v1/users/self", function () { + it("user can access basic self details", async function (done) { + try { + const res = await client.self(); + expect(res.status).toBe(200); + expect(res.statusText).toBe("OK"); + + expect(res.data.item.id).exist; + expect(res.data.item.name).toBe("Admin"); + expect(res.data.item.email).toBe("admin@admin.com"); + + done(); + } catch (err) { + done(err); + } + }); +}); diff --git a/client/test/vitest.config.ts b/client/test/vitest.config.ts new file mode 100644 index 0000000..25f08e4 --- /dev/null +++ b/client/test/vitest.config.ts @@ -0,0 +1,8 @@ +/// +import { defineConfig } from "vite"; + +export default defineConfig({ + test: { + globalSetup: "./test/setup.ts", + }, +}); diff --git a/client/tsconfig.json b/client/tsconfig.json new file mode 100644 index 0000000..249dc6d --- /dev/null +++ b/client/tsconfig.json @@ -0,0 +1,17 @@ +{ + "compilerOptions": { + "target": "es2020", + "module": "commonjs", + "outDir": "build", + "sourceMap": true, + "allowJs": true, + "checkJs": false, + "resolveJsonModule": true, + "skipLibCheck": true, + "strict": false, + "esModuleInterop": true, + "removeComments": true + }, + "include": ["client/**/*", "test/**/*"], + "exclude": ["node_modules", "**/*.spec.ts"] +} From 43eba5437a31baa206dff4f6a6cff3f0c14a45a3 Mon Sep 17 00:00:00 2001 From: Hayden <64056131+hay-kot@users.noreply.github.com> Date: Mon, 29 Aug 2022 18:34:27 -0800 Subject: [PATCH 002/530] drop cli and bump deps --- Taskfile.yml | 10 --- backend/Dockerfile | 2 - backend/app/cli/app.go | 9 --- backend/app/cli/app_users.go | 105 ------------------------------- backend/app/cli/main.go | 82 ------------------------ backend/app/cli/reader/reader.go | 65 ------------------- backend/go.mod | 45 +++++++------ backend/go.sum | 54 ++++++++++++---- 8 files changed, 64 insertions(+), 308 deletions(-) delete mode 100644 backend/app/cli/app.go delete mode 100644 backend/app/cli/app_users.go delete mode 100644 backend/app/cli/main.go delete mode 100644 backend/app/cli/reader/reader.go diff --git a/Taskfile.yml b/Taskfile.yml index ce64fff..b67fbe4 100644 --- a/Taskfile.yml +++ b/Taskfile.yml @@ -1,16 +1,6 @@ version: "3" tasks: - cli: - cmds: - - cd backend && go run ./app/cli/ {{.CLI_ARGS}} - silent: false - - cli:build: - cmds: - - cd backend && go build ./app/cli/ - silent: false - api: cmds: - cd backend/app/api/ && swag fmt diff --git a/backend/Dockerfile b/backend/Dockerfile index 3602d98..fd75d96 100644 --- a/backend/Dockerfile +++ b/backend/Dockerfile @@ -5,7 +5,6 @@ WORKDIR /go/src/app COPY . . RUN go get -d -v ./... RUN go build -o /go/bin/api -v ./app/api/*.go -RUN go build -o /go/bin/manage -v ./app/cli/*.go # Production Stage @@ -14,7 +13,6 @@ FROM alpine:latest RUN apk --no-cache add ca-certificates COPY ./config.template.yml /app/config.yml COPY --from=builder /go/bin/api /app -COPY --from=builder /go/bin/manage /bin RUN chmod +x /app/api RUN chmod +x /bin/manage diff --git a/backend/app/cli/app.go b/backend/app/cli/app.go deleted file mode 100644 index dd31ed9..0000000 --- a/backend/app/cli/app.go +++ /dev/null @@ -1,9 +0,0 @@ -package main - -import ( - "github.com/hay-kot/git-web-template/backend/internal/repo" -) - -type app struct { - repos *repo.AllRepos -} diff --git a/backend/app/cli/app_users.go b/backend/app/cli/app_users.go deleted file mode 100644 index c13ac29..0000000 --- a/backend/app/cli/app_users.go +++ /dev/null @@ -1,105 +0,0 @@ -package main - -import ( - "context" - "fmt" - "os" - "text/tabwriter" - - "github.com/google/uuid" - "github.com/hay-kot/git-web-template/backend/app/cli/reader" - "github.com/hay-kot/git-web-template/backend/internal/types" - "github.com/hay-kot/git-web-template/backend/pkgs/hasher" - "github.com/urfave/cli/v2" -) - -func (a *app) UserCreate(c *cli.Context) error { - var defaultValidators = []reader.StringValidator{ - reader.StringRequired, - reader.StringNoLeadingOrTrailingWhitespace, - } - // Get Flags - name := reader.ReadString("Name: ", - defaultValidators..., - ) - password := reader.ReadString("Password: ", - defaultValidators..., - ) - - email := reader.ReadString("Email: ", - reader.StringRequired, - reader.StringNoLeadingOrTrailingWhitespace, - reader.StringContainsAt, - ) - isSuper := reader.ReadBool("Is Superuser?") - - pwHash, err := hasher.HashPassword(password) - if err != nil { - return err - } - - usr := types.UserCreate{ - Name: name, - Email: email, - Password: pwHash, - IsSuperuser: isSuper, - } - - _, err = a.repos.Users.Create(context.Background(), usr) - - if err == nil { - fmt.Println("Super user created") - } - return err -} - -func (a *app) UserDelete(c *cli.Context) error { - // Get Flags - id := c.String("id") - uid := uuid.MustParse(id) - - fmt.Printf("Deleting user with id: %s\n", id) - - // Confirm Action - fmt.Printf("Are you sure you want to delete this user? (y/n) ") - var answer string - _, err := fmt.Scanln(&answer) - if answer != "y" || err != nil { - fmt.Println("Aborting") - return nil - } - - err = a.repos.Users.Delete(context.Background(), uid) - - if err == nil { - fmt.Printf("%v User(s) deleted (id=%v)\n", 1, id) - } - return err -} - -func (a *app) UserList(c *cli.Context) error { - fmt.Println("Superuser List") - - users, err := a.repos.Users.GetAll(context.Background()) - - if err != nil { - return err - } - - tabWriter := tabwriter.NewWriter(os.Stdout, 0, 0, 2, ' ', 0) - defer func(tabWriter *tabwriter.Writer) { - _ = tabWriter.Flush() - }(tabWriter) - - _, err = fmt.Fprintln(tabWriter, "Id\tName\tEmail\tIsSuper") - - if err != nil { - return err - } - - for _, u := range users { - _, _ = fmt.Fprintf(tabWriter, "%v\t%s\t%s\t%v\n", u.ID, u.Name, u.Email, u.IsSuperuser) - } - - return nil -} diff --git a/backend/app/cli/main.go b/backend/app/cli/main.go deleted file mode 100644 index d778b1f..0000000 --- a/backend/app/cli/main.go +++ /dev/null @@ -1,82 +0,0 @@ -package main - -import ( - "context" - "log" - "os" - - "github.com/hay-kot/git-web-template/backend/ent" - "github.com/hay-kot/git-web-template/backend/internal/config" - "github.com/hay-kot/git-web-template/backend/internal/repo" - _ "github.com/mattn/go-sqlite3" - - "github.com/urfave/cli/v2" -) - -func main() { - cfg, err := config.NewConfig("config.yml") - - if err != nil { - panic(err) - } - - if err := run(cfg); err != nil { - log.Fatal(err) - } -} - -func run(cfg *config.Config) error { - // ========================================================================= - // Initialize Database - c, err := ent.Open(cfg.Database.GetDriver(), cfg.Database.GetUrl()) - if err != nil { - log.Fatalf("failed opening connection to sqlite: %v", err) - } - defer func(c *ent.Client) { - _ = c.Close() - }(c) - if err := c.Schema.Create(context.Background()); err != nil { - log.Fatalf("failed creating schema resources: %v", err) - } - - // Create App - a := &app{ - repos: repo.EntAllRepos(c), - } - - app := &cli.App{ - Commands: []*cli.Command{ - { - Name: "users", - Aliases: []string{"u"}, - Usage: "options to manage users", - Subcommands: []*cli.Command{ - { - Name: "list", - Usage: "list users in database", - Action: a.UserList, - }, - { - Name: "add", - Usage: "add a new user", - Action: a.UserCreate, - }, - { - Name: "delete", - Usage: "delete user in database", - Action: a.UserDelete, - Flags: []cli.Flag{ - &cli.IntFlag{ - Name: "id", - Usage: "name of the user to add", - Required: true, - }, - }, - }, - }, - }, - }, - } - - return app.Run(os.Args) -} diff --git a/backend/app/cli/reader/reader.go b/backend/app/cli/reader/reader.go deleted file mode 100644 index a8cc92c..0000000 --- a/backend/app/cli/reader/reader.go +++ /dev/null @@ -1,65 +0,0 @@ -package reader - -import "fmt" - -type StringValidator func(s string) bool - -func StringRequired(s string) bool { - return s != "" -} - -func StringNoLeadingOrTrailingWhitespace(s string) bool { - return s != "" && len(s) > 0 && s[0] != ' ' && s[len(s)-1] != ' ' -} - -func StringContainsAt(s string) bool { - for _, c := range s { - if c == '@' { - return true - } - } - return false -} - -func ReadString(message string, sv ...StringValidator) string { - for { - fmt.Print(message) - var input string - fmt.Scanln(&input) - - if len(sv) == 0 { - return input - } - - isValid := true - for _, validator := range sv { - if !validator(input) { - isValid = false - fmt.Println("Invalid input") - continue - } - - } - - if isValid { - return input - } - - } -} - -func ReadBool(message string) bool { - for { - fmt.Print(message + " (y/n) ") - var input string - fmt.Scanln(&input) - - if input == "y" { - return true - } else if input == "n" { - return false - } else { - fmt.Println("Invalid input") - } - } -} diff --git a/backend/go.mod b/backend/go.mod index 70ff971..71654b3 100644 --- a/backend/go.mod +++ b/backend/go.mod @@ -3,47 +3,44 @@ module github.com/hay-kot/git-web-template/backend go 1.18 require ( - entgo.io/ent v0.10.0 + entgo.io/ent v0.11.2 github.com/ardanlabs/conf/v2 v2.2.0 github.com/go-chi/chi/v5 v5.0.7 github.com/google/uuid v1.3.0 - github.com/mattn/go-sqlite3 v1.14.10 - github.com/stretchr/testify v1.7.1-0.20210427113832-6241f9ab9942 - github.com/swaggo/http-swagger v1.3.0 - github.com/swaggo/swag v1.8.3 - github.com/tkrajina/typescriptify-golang-structs v0.1.7 - github.com/urfave/cli/v2 v2.3.0 - golang.org/x/crypto v0.0.0-20210921155107-089bfa567519 + github.com/mattn/go-sqlite3 v1.14.15 + github.com/stretchr/testify v1.8.0 + github.com/swaggo/http-swagger v1.3.3 + github.com/swaggo/swag v1.8.5 + github.com/tkrajina/typescriptify-golang-structs v0.1.8 + golang.org/x/crypto v0.0.0-20220829220503-c86fa9a7ed90 ) require ( - ariga.io/atlas v0.3.2-0.20220120225051-c3fac7d636dd // indirect + ariga.io/atlas v0.6.3 // indirect github.com/KyleBanks/depth v1.2.1 // indirect - github.com/agext/levenshtein v1.2.1 // indirect + github.com/agext/levenshtein v1.2.3 // indirect github.com/apparentlymart/go-textseg/v13 v13.0.0 // indirect - github.com/cpuguy83/go-md2man/v2 v2.0.1 // indirect github.com/davecgh/go-spew v1.1.1 // indirect github.com/go-openapi/inflect v0.19.0 // indirect github.com/go-openapi/jsonpointer v0.19.5 // indirect github.com/go-openapi/jsonreference v0.20.0 // indirect - github.com/go-openapi/spec v0.20.6 // indirect - github.com/go-openapi/swag v0.21.1 // indirect - github.com/google/go-cmp v0.5.6 // indirect - github.com/hashicorp/hcl/v2 v2.10.0 // indirect + github.com/go-openapi/spec v0.20.7 // indirect + github.com/go-openapi/swag v0.22.3 // indirect + github.com/google/go-cmp v0.5.8 // indirect + github.com/hashicorp/hcl/v2 v2.13.0 // indirect github.com/josharian/intern v1.0.0 // indirect github.com/kr/pretty v0.2.0 // indirect github.com/mailru/easyjson v0.7.7 // indirect - github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7 // indirect + github.com/mitchellh/go-wordwrap v1.0.1 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect - github.com/russross/blackfriday/v2 v2.1.0 // indirect - github.com/swaggo/files v0.0.0-20220610200504-28940afbdbfe // indirect - github.com/tkrajina/go-reflector v0.5.5 // indirect - github.com/zclconf/go-cty v1.8.0 // indirect + github.com/swaggo/files v0.0.0-20220728132757-551d4a08d97a // indirect + github.com/tkrajina/go-reflector v0.5.6 // indirect + github.com/zclconf/go-cty v1.11.0 // indirect golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4 // indirect - golang.org/x/net v0.0.0-20220706163947-c90051bbdb60 // indirect - golang.org/x/sys v0.0.0-20220704084225-05e143d24a9e // indirect + golang.org/x/net v0.0.0-20220826154423-83b083e8dc8b // indirect + golang.org/x/sys v0.0.0-20220829200755-d48e67d00261 // indirect golang.org/x/text v0.3.7 // indirect - golang.org/x/tools v0.1.11 // indirect + golang.org/x/tools v0.1.13-0.20220804200503-81c7dc4e4efa // indirect gopkg.in/yaml.v2 v2.4.0 // indirect - gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/backend/go.sum b/backend/go.sum index 118c2e5..980a7f8 100644 --- a/backend/go.sum +++ b/backend/go.sum @@ -1,22 +1,24 @@ ariga.io/atlas v0.3.2-0.20220120225051-c3fac7d636dd h1:YxnJl3ySvwQ3C7Rspa4CrQtwrftTZ0F8WJ36CvY7nWE= ariga.io/atlas v0.3.2-0.20220120225051-c3fac7d636dd/go.mod h1:XcLUpQX7Cq4qtagEHIleq3MJaBeeJ76BS8doc4gkOJk= +ariga.io/atlas v0.6.3 h1:MtT4OxHqkW0XgYRjvqU4bmmv+42U1lvw9u8HzJ8yK9c= +ariga.io/atlas v0.6.3/go.mod h1:ft47uSh5hWGDCmQC9DsztZg6Xk+KagM5Ts/mZYKb9JE= entgo.io/ent v0.10.0 h1:9cBomE1fh+WX34DPYQL7tDNAIvhKa3tXvwxuLyhYCMo= entgo.io/ent v0.10.0/go.mod h1:5bjIYdTizykmdtPY3knXrrGpxAh0cMjFfxdNnlNiUGU= -github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +entgo.io/ent v0.11.2 h1:UM2/BUhF2FfsxPHRxLjQbhqJNaDdVlOwNIAMLs2jyto= +entgo.io/ent v0.11.2/go.mod h1:YGHEQnmmIUgtD5b1ICD5vg74dS3npkNnmC5K+0J+IHU= github.com/DATA-DOG/go-sqlmock v1.5.0 h1:Shsta01QNfFxHCfpW6YH2STWB0MudeXXEWMr20OEh60= github.com/KyleBanks/depth v1.2.1 h1:5h8fQADFrWtarTdtDudMmGsC7GPbOAu6RVB3ffsVFHc= github.com/KyleBanks/depth v1.2.1/go.mod h1:jzSb9d0L43HxTQfT+oSA1EEp2q+ne2uh6XgeJcm8brE= github.com/agext/levenshtein v1.2.1 h1:QmvMAjj2aEICytGiWzmxoE0x2KZvE0fvmqMOfy2tjT8= github.com/agext/levenshtein v1.2.1/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558= +github.com/agext/levenshtein v1.2.3 h1:YB2fHEn0UJagG8T1rrWknE3ZQzWM06O8AMAatNn7lmo= +github.com/agext/levenshtein v1.2.3/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558= github.com/apparentlymart/go-dump v0.0.0-20180507223929-23540a00eaa3/go.mod h1:oL81AME2rN47vu18xqj1S1jPIPuN7afo62yKTNn3XMM= github.com/apparentlymart/go-textseg v1.0.0/go.mod h1:z96Txxhf3xSFMPmb5X/1W05FF/Nj9VFpLOpjS5yuumk= github.com/apparentlymart/go-textseg/v13 v13.0.0 h1:Y+KvPE1NYz0xl601PVImeQfFyEy6iT90AvPUL1NNfNw= github.com/apparentlymart/go-textseg/v13 v13.0.0/go.mod h1:ZK2fH7c4NqDTLtiYLvIkEghdlcqw7yxLeM89kiTRPUo= github.com/ardanlabs/conf/v2 v2.2.0 h1:ar1+TYIYAh2Tdeg2DQroh7ruR56/vJR8BDfzDIrXgtk= github.com/ardanlabs/conf/v2 v2.2.0/go.mod h1:m37ZKdW9jwMUEhGX36jRNt8VzSQ/HVmSziLZH2p33nY= -github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= -github.com/cpuguy83/go-md2man/v2 v2.0.1 h1:r/myEWzV9lfsM1tFLgDyu0atFtJ1fXn261LKYj/3DxU= -github.com/cpuguy83/go-md2man/v2 v2.0.1/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= @@ -32,10 +34,14 @@ github.com/go-openapi/jsonreference v0.20.0 h1:MYlu0sBgChmCfJxxUKZ8g1cPWFOB37YSZ github.com/go-openapi/jsonreference v0.20.0/go.mod h1:Ag74Ico3lPc+zR+qjn4XBUmXymS4zJbYVCZmcgkasdo= github.com/go-openapi/spec v0.20.6 h1:ich1RQ3WDbfoeTqTAb+5EIxNmpKVJZWBNah9RAT0jIQ= github.com/go-openapi/spec v0.20.6/go.mod h1:2OpW+JddWPrpXSCIX8eOx7lZ5iyuWj3RYR6VaaBKcWA= +github.com/go-openapi/spec v0.20.7 h1:1Rlu/ZrOCCob0n+JKKJAWhNWMPW8bOZRg8FJaY+0SKI= +github.com/go-openapi/spec v0.20.7/go.mod h1:2OpW+JddWPrpXSCIX8eOx7lZ5iyuWj3RYR6VaaBKcWA= github.com/go-openapi/swag v0.19.5/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk= github.com/go-openapi/swag v0.19.15/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ= github.com/go-openapi/swag v0.21.1 h1:wm0rhTb5z7qpJRHBdPOMuY4QjVUMbF6/kwoYeRAOrKU= github.com/go-openapi/swag v0.21.1/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ= +github.com/go-openapi/swag v0.22.3 h1:yMBqmnQ0gyZvEb/+KzuWZOXgllrXT4SADYbvDaXHv/g= +github.com/go-openapi/swag v0.22.3/go.mod h1:UzaqsxGiab7freDnrUUra0MwWfN/q7tE4j+VcZ0yl14= github.com/go-test/deep v1.0.3 h1:ZrJSEWsXzPOxaZnFteGEfooLba+ju3FYIbOrS+rQd68= github.com/go-test/deep v1.0.3/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA= github.com/golang/protobuf v1.1.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= @@ -44,10 +50,14 @@ github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaW github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.5.6 h1:BKbKCqvP6I+rmFHt06ZmyQtvB8xAkWdhFyr0ZUNZcxQ= github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.8 h1:e6P7q2lk1O+qJJb4BtCQXlK8vWEO8V1ZeuEdJNOqZyg= +github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I= github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/hashicorp/hcl/v2 v2.10.0 h1:1S1UnuhDGlv3gRFV4+0EdwB+znNP5HmcGbIqwnSCByg= github.com/hashicorp/hcl/v2 v2.10.0/go.mod h1:FwWsfWEjyV/CMj8s/gqAuiviY72rJ1/oayI9WftqcKg= +github.com/hashicorp/hcl/v2 v2.13.0 h1:0Apadu1w6M11dyGFxWnmhhcMjkbAiKCv7G1r/2QgCNc= +github.com/hashicorp/hcl/v2 v2.13.0/go.mod h1:e4z5nxYlWNPdDSNYX+ph14EvWYMFm3eP0zIUqPc2jr0= github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= @@ -66,49 +76,64 @@ github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0 github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= github.com/mattn/go-sqlite3 v1.14.10 h1:MLn+5bFRlWMGoSRmJour3CL1w/qL96mvipqpwQW/Sfk= github.com/mattn/go-sqlite3 v1.14.10/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= +github.com/mattn/go-sqlite3 v1.14.15 h1:vfoHhTN1af61xCRSWzFIWzx2YskyMTwHLrExkBOjvxI= +github.com/mattn/go-sqlite3 v1.14.15/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg= github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7 h1:DpOJ2HYzCv8LZP15IdmG+YdwD2luVPHITV96TkirNBM= github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7/go.mod h1:ZXFpozHsX6DPmq2I0TCekCxypsnAUbP2oI0UX1GXzOo= +github.com/mitchellh/go-wordwrap v1.0.1 h1:TLuKupo69TCn6TQSyGxwI1EblZZEsQ0vMlAFQflz0v0= +github.com/mitchellh/go-wordwrap v1.0.1/go.mod h1:R62XHJLzvMFRBbcrT7m7WgmE1eOyTSsCt+hzestvNj0= github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs= github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= -github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= -github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ= github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= -github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= github.com/spf13/pflag v1.0.2/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.1-0.20210427113832-6241f9ab9942 h1:t0lM6y/M5IiUZyvbBTcngso8SZEZICH7is9B6g/obVU= github.com/stretchr/testify v1.7.1-0.20210427113832-6241f9ab9942/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= github.com/swaggo/files v0.0.0-20220610200504-28940afbdbfe h1:K8pHPVoTgxFJt1lXuIzzOX7zZhZFldJQK/CgKx9BFIc= github.com/swaggo/files v0.0.0-20220610200504-28940afbdbfe/go.mod h1:lKJPbtWzJ9JhsTN1k1gZgleJWY/cqq0psdoMmaThG3w= +github.com/swaggo/files v0.0.0-20220728132757-551d4a08d97a h1:kAe4YSu0O0UFn1DowNo2MY5p6xzqtJ/wQ7LZynSvGaY= +github.com/swaggo/files v0.0.0-20220728132757-551d4a08d97a/go.mod h1:lKJPbtWzJ9JhsTN1k1gZgleJWY/cqq0psdoMmaThG3w= github.com/swaggo/http-swagger v1.3.0 h1:1+6M4qRorIbdyTWTsGrwnb0r9jGK5dcWN82O6oY/yHQ= github.com/swaggo/http-swagger v1.3.0/go.mod h1:9glekdg40lwclrrKNRGgj/IMDxpNPZ3kzab4oPcF8EM= +github.com/swaggo/http-swagger v1.3.3 h1:Hu5Z0L9ssyBLofaama21iYaF2VbWyA8jdohaaCGpHsc= +github.com/swaggo/http-swagger v1.3.3/go.mod h1:sE+4PjD89IxMPm77FnkDz0sdO+p5lbXzrVWT6OTVVGo= github.com/swaggo/swag v1.8.3 h1:3pZSSCQ//gAH88lfmxM3Cd1+JCsxV8Md6f36b9hrZ5s= github.com/swaggo/swag v1.8.3/go.mod h1:jMLeXOOmYyjk8PvHTsXBdrubsNd9gUJTTCzL5iBnseg= +github.com/swaggo/swag v1.8.5 h1:7NgtfXsXE+jrcOwRyiftGKW7Ppydj7tZiVenuRf1fE4= +github.com/swaggo/swag v1.8.5/go.mod h1:jMLeXOOmYyjk8PvHTsXBdrubsNd9gUJTTCzL5iBnseg= github.com/tkrajina/go-reflector v0.5.5 h1:gwoQFNye30Kk7NrExj8zm3zFtrGPqOkzFMLuQZg1DtQ= github.com/tkrajina/go-reflector v0.5.5/go.mod h1:ECbqLgccecY5kPmPmXg1MrHW585yMcDkVl6IvJe64T4= +github.com/tkrajina/go-reflector v0.5.6 h1:hKQ0gyocG7vgMD2M3dRlYN6WBBOmdoOzJ6njQSepKdE= +github.com/tkrajina/go-reflector v0.5.6/go.mod h1:ECbqLgccecY5kPmPmXg1MrHW585yMcDkVl6IvJe64T4= github.com/tkrajina/typescriptify-golang-structs v0.1.7 h1:72jmiT/brlgtCPpwu4X0HkhMeUMtx8+xDiTMS93rFqY= github.com/tkrajina/typescriptify-golang-structs v0.1.7/go.mod h1:sjU00nti/PMEOZb07KljFlR+lJ+RotsC0GBQMv9EKls= -github.com/urfave/cli/v2 v2.3.0 h1:qph92Y649prgesehzOrQjdWyxFOp/QVM+6imKHad91M= -github.com/urfave/cli/v2 v2.3.0/go.mod h1:LJmUH05zAU44vOAcrfzZQKsZbVcdbOG8rtL3/XcUArI= +github.com/tkrajina/typescriptify-golang-structs v0.1.8 h1:p7nZ9GP86w5Jh/sWamy9NP7BM03NrHqAAm7elHa/PdA= +github.com/tkrajina/typescriptify-golang-structs v0.1.8/go.mod h1:sjU00nti/PMEOZb07KljFlR+lJ+RotsC0GBQMv9EKls= github.com/vmihailenco/msgpack v3.3.3+incompatible/go.mod h1:fy3FlTQTDXWkZ7Bh6AcGMlsjHatGryHQYUTf1ShIgkk= github.com/vmihailenco/msgpack/v4 v4.3.12/go.mod h1:gborTTJjAo/GWTqqRjrLCn9pgNN+NXzzngzBKDPIqw4= github.com/vmihailenco/tagparser v0.1.1/go.mod h1:OeAg3pn3UbLjkWt+rN9oFYB6u/cQgqMEUPoW2WPyhdI= github.com/zclconf/go-cty v1.2.0/go.mod h1:hOPWgoHbaTUnI5k4D2ld+GRpFJSCe6bCM7m1q/N4PQ8= github.com/zclconf/go-cty v1.8.0 h1:s4AvqaeQzJIu3ndv4gVIhplVD0krU+bgrcLSVUnaWuA= github.com/zclconf/go-cty v1.8.0/go.mod h1:vVKLxnk3puL4qRAv72AO+W99LUD4da90g3uUAzyuvAk= +github.com/zclconf/go-cty v1.11.0 h1:726SxLdi2SDnjY+BStqB9J1hNp4+2WlzyXLuimibIe0= +github.com/zclconf/go-cty v1.11.0/go.mod h1:s9IfD1LK5ccNMSWCVFCE2rJfHiZgi7JijgeWIMfhLvA= github.com/zclconf/go-cty-debug v0.0.0-20191215020915-b22d67c1ba0b/go.mod h1:ZRKQfBXbGkpdV6QMzT3rU1kSTAnfu1dO8dPKjYprgj8= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190426145343-a29dc8fdc734/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519 h1:7I4JAnoQBe7ZtJcBaYHi5UtiO8tQHbUSXxL+pnGRANg= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.0.0-20220829220503-c86fa9a7ed90 h1:Y/gsMcFOcR+6S6f3YeMKl5g+dZMEWqcz5Czj/GWYbkM= +golang.org/x/crypto v0.0.0-20220829220503-c86fa9a7ed90/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4 h1:6zppjxzCulZykYSLyVDYbneBfbaBIQPYMevg0bEwv2s= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/net v0.0.0-20180811021610-c39426892332/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -118,6 +143,8 @@ golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLL golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20220706163947-c90051bbdb60 h1:8NSylCMxLW4JvserAndSgFL7aPli6A68yf0bYFTcWCM= golang.org/x/net v0.0.0-20220706163947-c90051bbdb60/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.0.0-20220826154423-83b083e8dc8b h1:ZmngSVLe/wycRns9MKikG9OWIEjGcGAkacif7oYQaUY= +golang.org/x/net v0.0.0-20220826154423-83b083e8dc8b/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -126,6 +153,8 @@ golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20220704084225-05e143d24a9e h1:CsOuNlbOuf0mzxJIefr6Q4uAUetRUwZE4qt7VfzP+xo= golang.org/x/sys v0.0.0-20220704084225-05e143d24a9e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220829200755-d48e67d00261 h1:v6hYoSR9T5oet+pMXwUWkbiVqx/63mlHjefrHmxwfeY= +golang.org/x/sys v0.0.0-20220829200755-d48e67d00261/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= @@ -136,6 +165,8 @@ golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.1.11 h1:loJ25fNOEhSXfHrpoGj91eCUThwdNX6u24rO1xnNteY= golang.org/x/tools v0.1.11/go.mod h1:SgwaegtQh8clINPpECJMqnxLv9I09HLqnW3RMqW0CA4= +golang.org/x/tools v0.1.13-0.20220804200503-81c7dc4e4efa h1:uKcci2q7Qtp6nMTC/AAvfNUAldFtJuHWV9/5QWiypts= +golang.org/x/tools v0.1.13-0.20220804200503-81c7dc4e4efa/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= @@ -145,10 +176,11 @@ gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8 gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU= gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b h1:h8qDotaEPuJATrMmW04NCwg7v22aHH28wwpauUhK9Oo= gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= From 4c76f6b3674543a7821d4309b9167f209a4812eb Mon Sep 17 00:00:00 2001 From: Hayden <64056131+hay-kot@users.noreply.github.com> Date: Mon, 29 Aug 2022 18:40:54 -0800 Subject: [PATCH 003/530] update imports --- .github/workflows/publish.yaml | 4 +- README.md | 10 +-- backend/app/api/app.go | 14 ++-- backend/app/api/base/base_ctrl.go | 6 +- backend/app/api/base/base_ctrl_test.go | 2 +- backend/app/api/main.go | 14 ++-- backend/app/api/middleware.go | 10 +-- backend/app/api/routes.go | 8 +- backend/app/api/seed.go | 8 +- backend/app/api/v1/controller.go | 4 +- backend/app/api/v1/main_test.go | 6 +- backend/app/api/v1/v1_ctrl_admin.go | 10 +-- backend/app/api/v1/v1_ctrl_admin_test.go | 8 +- backend/app/api/v1/v1_ctrl_auth.go | 8 +- backend/app/api/v1/v1_ctrl_user.go | 8 +- backend/app/generator/main.go | 6 +- backend/ent/authtokens.go | 4 +- backend/ent/authtokens/where.go | 2 +- backend/ent/authtokens_create.go | 4 +- backend/ent/authtokens_delete.go | 4 +- backend/ent/authtokens_query.go | 6 +- backend/ent/authtokens_update.go | 6 +- backend/ent/client.go | 6 +- backend/ent/ent.go | 4 +- backend/ent/enttest/enttest.go | 4 +- backend/ent/hook/hook.go | 2 +- backend/ent/mutation.go | 6 +- backend/ent/runtime.go | 6 +- backend/ent/runtime/runtime.go | 2 +- backend/ent/user.go | 2 +- backend/ent/user/where.go | 2 +- backend/ent/user_create.go | 4 +- backend/ent/user_delete.go | 4 +- backend/ent/user_query.go | 6 +- backend/ent/user_update.go | 6 +- backend/go.mod | 2 +- backend/go.sum | 77 +------------------ backend/internal/mapper/users_automapper.go | 4 +- backend/internal/mocks/factories/users.go | 4 +- backend/internal/mocks/mock_logger.go | 2 +- backend/internal/mocks/mocker_services.go | 4 +- backend/internal/mocks/mocks_ent_repo.go | 4 +- backend/internal/repo/main_test.go | 2 +- backend/internal/repo/repos_all.go | 2 +- backend/internal/repo/token_ent.go | 8 +- backend/internal/repo/token_ent_test.go | 4 +- backend/internal/repo/token_interface.go | 2 +- backend/internal/repo/users_ent.go | 6 +- backend/internal/repo/users_ent_test.go | 4 +- backend/internal/repo/users_interface.go | 2 +- backend/internal/services/all.go | 2 +- backend/internal/services/contexts.go | 2 +- backend/internal/services/contexts_test.go | 2 +- backend/internal/services/service_admin.go | 4 +- backend/internal/services/service_user.go | 6 +- .../server/response_error_builder_test.go | 2 +- 56 files changed, 139 insertions(+), 212 deletions(-) diff --git a/.github/workflows/publish.yaml b/.github/workflows/publish.yaml index d1f25e9..53499bf 100644 --- a/.github/workflows/publish.yaml +++ b/.github/workflows/publish.yaml @@ -18,6 +18,6 @@ jobs: env: CR_PAT: ${{ secrets.CR_PAT }} - name: Build Docker Image - run: cd backend && docker build -t ghcr.io/hay-kot/go-web-template:latest . + run: cd backend && docker build -t ghcr.io/hay-kot/content:latest . - name: push to container registry - run: docker push ghcr.io/hay-kot/go-web-template:latest + run: docker push ghcr.io/hay-kot/content:latest diff --git a/README.md b/README.md index 9042c20..9bad2ec 100644 --- a/README.md +++ b/README.md @@ -1,13 +1,13 @@

Go Web Template

- - + + - - + +

- + This Go Web Template is a simple starter template for a Go web application. It includes a web server API, as well as a starter CLI to manage the web server/database inside the container. It should be noted that while while use of the standard library is a high priority, this template does make use of multiple external packages. It does however abide by the standard http handler pattern. - [Template Features](#template-features) diff --git a/backend/app/api/app.go b/backend/app/api/app.go index 5a062b0..8d087e4 100644 --- a/backend/app/api/app.go +++ b/backend/app/api/app.go @@ -3,13 +3,13 @@ package main import ( "time" - "github.com/hay-kot/git-web-template/backend/ent" - "github.com/hay-kot/git-web-template/backend/internal/config" - "github.com/hay-kot/git-web-template/backend/internal/repo" - "github.com/hay-kot/git-web-template/backend/internal/services" - "github.com/hay-kot/git-web-template/backend/pkgs/logger" - "github.com/hay-kot/git-web-template/backend/pkgs/mailer" - "github.com/hay-kot/git-web-template/backend/pkgs/server" + "github.com/hay-kot/content/backend/ent" + "github.com/hay-kot/content/backend/internal/config" + "github.com/hay-kot/content/backend/internal/repo" + "github.com/hay-kot/content/backend/internal/services" + "github.com/hay-kot/content/backend/pkgs/logger" + "github.com/hay-kot/content/backend/pkgs/mailer" + "github.com/hay-kot/content/backend/pkgs/server" ) type app struct { diff --git a/backend/app/api/base/base_ctrl.go b/backend/app/api/base/base_ctrl.go index 7649b8e..9f41d06 100644 --- a/backend/app/api/base/base_ctrl.go +++ b/backend/app/api/base/base_ctrl.go @@ -3,9 +3,9 @@ package base import ( "net/http" - "github.com/hay-kot/git-web-template/backend/internal/types" - "github.com/hay-kot/git-web-template/backend/pkgs/logger" - "github.com/hay-kot/git-web-template/backend/pkgs/server" + "github.com/hay-kot/content/backend/internal/types" + "github.com/hay-kot/content/backend/pkgs/logger" + "github.com/hay-kot/content/backend/pkgs/server" ) type ReadyFunc func() bool diff --git a/backend/app/api/base/base_ctrl_test.go b/backend/app/api/base/base_ctrl_test.go index 972f55e..71dca76 100644 --- a/backend/app/api/base/base_ctrl_test.go +++ b/backend/app/api/base/base_ctrl_test.go @@ -5,7 +5,7 @@ import ( "net/http/httptest" "testing" - "github.com/hay-kot/git-web-template/backend/internal/mocks" + "github.com/hay-kot/content/backend/internal/mocks" ) func GetTestHandler(t *testing.T) *BaseController { diff --git a/backend/app/api/main.go b/backend/app/api/main.go index 12570bd..cfd59fb 100644 --- a/backend/app/api/main.go +++ b/backend/app/api/main.go @@ -7,13 +7,13 @@ import ( "os" "time" - "github.com/hay-kot/git-web-template/backend/app/api/docs" - "github.com/hay-kot/git-web-template/backend/ent" - "github.com/hay-kot/git-web-template/backend/internal/config" - "github.com/hay-kot/git-web-template/backend/internal/repo" - "github.com/hay-kot/git-web-template/backend/internal/services" - "github.com/hay-kot/git-web-template/backend/pkgs/logger" - "github.com/hay-kot/git-web-template/backend/pkgs/server" + "github.com/hay-kot/content/backend/app/api/docs" + "github.com/hay-kot/content/backend/ent" + "github.com/hay-kot/content/backend/internal/config" + "github.com/hay-kot/content/backend/internal/repo" + "github.com/hay-kot/content/backend/internal/services" + "github.com/hay-kot/content/backend/pkgs/logger" + "github.com/hay-kot/content/backend/pkgs/server" _ "github.com/mattn/go-sqlite3" ) diff --git a/backend/app/api/middleware.go b/backend/app/api/middleware.go index 1a0d813..3e2da68 100644 --- a/backend/app/api/middleware.go +++ b/backend/app/api/middleware.go @@ -8,11 +8,11 @@ import ( "github.com/go-chi/chi/v5" "github.com/go-chi/chi/v5/middleware" - "github.com/hay-kot/git-web-template/backend/internal/config" - "github.com/hay-kot/git-web-template/backend/internal/services" - "github.com/hay-kot/git-web-template/backend/pkgs/hasher" - "github.com/hay-kot/git-web-template/backend/pkgs/logger" - "github.com/hay-kot/git-web-template/backend/pkgs/server" + "github.com/hay-kot/content/backend/internal/config" + "github.com/hay-kot/content/backend/internal/services" + "github.com/hay-kot/content/backend/pkgs/hasher" + "github.com/hay-kot/content/backend/pkgs/logger" + "github.com/hay-kot/content/backend/pkgs/server" ) func (a *app) setGlobalMiddleware(r *chi.Mux) { diff --git a/backend/app/api/routes.go b/backend/app/api/routes.go index e8887a5..b784c26 100644 --- a/backend/app/api/routes.go +++ b/backend/app/api/routes.go @@ -5,10 +5,10 @@ import ( "net/http" "github.com/go-chi/chi/v5" - "github.com/hay-kot/git-web-template/backend/app/api/base" - _ "github.com/hay-kot/git-web-template/backend/app/api/docs" - v1 "github.com/hay-kot/git-web-template/backend/app/api/v1" - "github.com/hay-kot/git-web-template/backend/internal/repo" + "github.com/hay-kot/content/backend/app/api/base" + _ "github.com/hay-kot/content/backend/app/api/docs" + v1 "github.com/hay-kot/content/backend/app/api/v1" + "github.com/hay-kot/content/backend/internal/repo" httpSwagger "github.com/swaggo/http-swagger" // http-swagger middleware ) diff --git a/backend/app/api/seed.go b/backend/app/api/seed.go index 3fbea74..7d66774 100644 --- a/backend/app/api/seed.go +++ b/backend/app/api/seed.go @@ -4,10 +4,10 @@ import ( "context" "github.com/google/uuid" - "github.com/hay-kot/git-web-template/backend/internal/repo" - "github.com/hay-kot/git-web-template/backend/internal/types" - "github.com/hay-kot/git-web-template/backend/pkgs/hasher" - "github.com/hay-kot/git-web-template/backend/pkgs/logger" + "github.com/hay-kot/content/backend/internal/repo" + "github.com/hay-kot/content/backend/internal/types" + "github.com/hay-kot/content/backend/pkgs/hasher" + "github.com/hay-kot/content/backend/pkgs/logger" ) const ( diff --git a/backend/app/api/v1/controller.go b/backend/app/api/v1/controller.go index 2d13045..b72cde7 100644 --- a/backend/app/api/v1/controller.go +++ b/backend/app/api/v1/controller.go @@ -1,8 +1,8 @@ package v1 import ( - "github.com/hay-kot/git-web-template/backend/internal/services" - "github.com/hay-kot/git-web-template/backend/pkgs/logger" + "github.com/hay-kot/content/backend/internal/services" + "github.com/hay-kot/content/backend/pkgs/logger" ) type V1Controller struct { diff --git a/backend/app/api/v1/main_test.go b/backend/app/api/v1/main_test.go index c9a1276..3713dee 100644 --- a/backend/app/api/v1/main_test.go +++ b/backend/app/api/v1/main_test.go @@ -4,9 +4,9 @@ import ( "context" "testing" - "github.com/hay-kot/git-web-template/backend/internal/mocks" - "github.com/hay-kot/git-web-template/backend/internal/mocks/factories" - "github.com/hay-kot/git-web-template/backend/internal/types" + "github.com/hay-kot/content/backend/internal/mocks" + "github.com/hay-kot/content/backend/internal/mocks/factories" + "github.com/hay-kot/content/backend/internal/types" ) var mockHandler = &V1Controller{} diff --git a/backend/app/api/v1/v1_ctrl_admin.go b/backend/app/api/v1/v1_ctrl_admin.go index 81afd43..4961c6b 100644 --- a/backend/app/api/v1/v1_ctrl_admin.go +++ b/backend/app/api/v1/v1_ctrl_admin.go @@ -6,11 +6,11 @@ import ( "github.com/go-chi/chi/v5" "github.com/google/uuid" - "github.com/hay-kot/git-web-template/backend/internal/services" - "github.com/hay-kot/git-web-template/backend/internal/types" - "github.com/hay-kot/git-web-template/backend/pkgs/hasher" - "github.com/hay-kot/git-web-template/backend/pkgs/logger" - "github.com/hay-kot/git-web-template/backend/pkgs/server" + "github.com/hay-kot/content/backend/internal/services" + "github.com/hay-kot/content/backend/internal/types" + "github.com/hay-kot/content/backend/pkgs/hasher" + "github.com/hay-kot/content/backend/pkgs/logger" + "github.com/hay-kot/content/backend/pkgs/server" ) // HandleAdminUserGetAll godoc diff --git a/backend/app/api/v1/v1_ctrl_admin_test.go b/backend/app/api/v1/v1_ctrl_admin_test.go index c0066c7..6c0d8e9 100644 --- a/backend/app/api/v1/v1_ctrl_admin_test.go +++ b/backend/app/api/v1/v1_ctrl_admin_test.go @@ -9,10 +9,10 @@ import ( "net/http/httptest" "testing" - "github.com/hay-kot/git-web-template/backend/internal/mocks/chimocker" - "github.com/hay-kot/git-web-template/backend/internal/mocks/factories" - "github.com/hay-kot/git-web-template/backend/internal/types" - "github.com/hay-kot/git-web-template/backend/pkgs/server" + "github.com/hay-kot/content/backend/internal/mocks/chimocker" + "github.com/hay-kot/content/backend/internal/mocks/factories" + "github.com/hay-kot/content/backend/internal/types" + "github.com/hay-kot/content/backend/pkgs/server" "github.com/stretchr/testify/assert" ) diff --git a/backend/app/api/v1/v1_ctrl_auth.go b/backend/app/api/v1/v1_ctrl_auth.go index f204e1e..c09e3f4 100644 --- a/backend/app/api/v1/v1_ctrl_auth.go +++ b/backend/app/api/v1/v1_ctrl_auth.go @@ -4,10 +4,10 @@ import ( "errors" "net/http" - "github.com/hay-kot/git-web-template/backend/internal/services" - "github.com/hay-kot/git-web-template/backend/internal/types" - "github.com/hay-kot/git-web-template/backend/pkgs/logger" - "github.com/hay-kot/git-web-template/backend/pkgs/server" + "github.com/hay-kot/content/backend/internal/services" + "github.com/hay-kot/content/backend/internal/types" + "github.com/hay-kot/content/backend/pkgs/logger" + "github.com/hay-kot/content/backend/pkgs/server" ) var ( diff --git a/backend/app/api/v1/v1_ctrl_user.go b/backend/app/api/v1/v1_ctrl_user.go index aed64b6..68c6be2 100644 --- a/backend/app/api/v1/v1_ctrl_user.go +++ b/backend/app/api/v1/v1_ctrl_user.go @@ -4,10 +4,10 @@ import ( "errors" "net/http" - "github.com/hay-kot/git-web-template/backend/internal/services" - "github.com/hay-kot/git-web-template/backend/internal/types" - "github.com/hay-kot/git-web-template/backend/pkgs/logger" - "github.com/hay-kot/git-web-template/backend/pkgs/server" + "github.com/hay-kot/content/backend/internal/services" + "github.com/hay-kot/content/backend/internal/types" + "github.com/hay-kot/content/backend/pkgs/logger" + "github.com/hay-kot/content/backend/pkgs/server" ) // HandleUserSelf godoc diff --git a/backend/app/generator/main.go b/backend/app/generator/main.go index d1d6efb..3cc45a6 100644 --- a/backend/app/generator/main.go +++ b/backend/app/generator/main.go @@ -4,9 +4,9 @@ import ( "time" "github.com/google/uuid" - "github.com/hay-kot/git-web-template/backend/ent" - "github.com/hay-kot/git-web-template/backend/internal/types" - "github.com/hay-kot/git-web-template/backend/pkgs/automapper" + "github.com/hay-kot/content/backend/ent" + "github.com/hay-kot/content/backend/internal/types" + "github.com/hay-kot/content/backend/pkgs/automapper" "github.com/tkrajina/typescriptify-golang-structs/typescriptify" ) diff --git a/backend/ent/authtokens.go b/backend/ent/authtokens.go index ecf611c..d318a43 100644 --- a/backend/ent/authtokens.go +++ b/backend/ent/authtokens.go @@ -9,8 +9,8 @@ import ( "entgo.io/ent/dialect/sql" "github.com/google/uuid" - "github.com/hay-kot/git-web-template/backend/ent/authtokens" - "github.com/hay-kot/git-web-template/backend/ent/user" + "github.com/hay-kot/content/backend/ent/authtokens" + "github.com/hay-kot/content/backend/ent/user" ) // AuthTokens is the model entity for the AuthTokens schema. diff --git a/backend/ent/authtokens/where.go b/backend/ent/authtokens/where.go index 5fda3f0..c38121f 100644 --- a/backend/ent/authtokens/where.go +++ b/backend/ent/authtokens/where.go @@ -7,7 +7,7 @@ import ( "entgo.io/ent/dialect/sql" "entgo.io/ent/dialect/sql/sqlgraph" - "github.com/hay-kot/git-web-template/backend/ent/predicate" + "github.com/hay-kot/content/backend/ent/predicate" ) // ID filters vertices based on their ID field. diff --git a/backend/ent/authtokens_create.go b/backend/ent/authtokens_create.go index 4cca125..aa64a3a 100644 --- a/backend/ent/authtokens_create.go +++ b/backend/ent/authtokens_create.go @@ -11,8 +11,8 @@ import ( "entgo.io/ent/dialect/sql/sqlgraph" "entgo.io/ent/schema/field" "github.com/google/uuid" - "github.com/hay-kot/git-web-template/backend/ent/authtokens" - "github.com/hay-kot/git-web-template/backend/ent/user" + "github.com/hay-kot/content/backend/ent/authtokens" + "github.com/hay-kot/content/backend/ent/user" ) // AuthTokensCreate is the builder for creating a AuthTokens entity. diff --git a/backend/ent/authtokens_delete.go b/backend/ent/authtokens_delete.go index 123ee17..9cc1ee4 100644 --- a/backend/ent/authtokens_delete.go +++ b/backend/ent/authtokens_delete.go @@ -9,8 +9,8 @@ import ( "entgo.io/ent/dialect/sql" "entgo.io/ent/dialect/sql/sqlgraph" "entgo.io/ent/schema/field" - "github.com/hay-kot/git-web-template/backend/ent/authtokens" - "github.com/hay-kot/git-web-template/backend/ent/predicate" + "github.com/hay-kot/content/backend/ent/authtokens" + "github.com/hay-kot/content/backend/ent/predicate" ) // AuthTokensDelete is the builder for deleting a AuthTokens entity. diff --git a/backend/ent/authtokens_query.go b/backend/ent/authtokens_query.go index 9e309c8..7c3041b 100644 --- a/backend/ent/authtokens_query.go +++ b/backend/ent/authtokens_query.go @@ -12,9 +12,9 @@ import ( "entgo.io/ent/dialect/sql/sqlgraph" "entgo.io/ent/schema/field" "github.com/google/uuid" - "github.com/hay-kot/git-web-template/backend/ent/authtokens" - "github.com/hay-kot/git-web-template/backend/ent/predicate" - "github.com/hay-kot/git-web-template/backend/ent/user" + "github.com/hay-kot/content/backend/ent/authtokens" + "github.com/hay-kot/content/backend/ent/predicate" + "github.com/hay-kot/content/backend/ent/user" ) // AuthTokensQuery is the builder for querying AuthTokens entities. diff --git a/backend/ent/authtokens_update.go b/backend/ent/authtokens_update.go index 243db3f..f5a99c7 100644 --- a/backend/ent/authtokens_update.go +++ b/backend/ent/authtokens_update.go @@ -12,9 +12,9 @@ import ( "entgo.io/ent/dialect/sql/sqlgraph" "entgo.io/ent/schema/field" "github.com/google/uuid" - "github.com/hay-kot/git-web-template/backend/ent/authtokens" - "github.com/hay-kot/git-web-template/backend/ent/predicate" - "github.com/hay-kot/git-web-template/backend/ent/user" + "github.com/hay-kot/content/backend/ent/authtokens" + "github.com/hay-kot/content/backend/ent/predicate" + "github.com/hay-kot/content/backend/ent/user" ) // AuthTokensUpdate is the builder for updating AuthTokens entities. diff --git a/backend/ent/client.go b/backend/ent/client.go index c6cf533..2b31566 100644 --- a/backend/ent/client.go +++ b/backend/ent/client.go @@ -8,10 +8,10 @@ import ( "log" "github.com/google/uuid" - "github.com/hay-kot/git-web-template/backend/ent/migrate" + "github.com/hay-kot/content/backend/ent/migrate" - "github.com/hay-kot/git-web-template/backend/ent/authtokens" - "github.com/hay-kot/git-web-template/backend/ent/user" + "github.com/hay-kot/content/backend/ent/authtokens" + "github.com/hay-kot/content/backend/ent/user" "entgo.io/ent/dialect" "entgo.io/ent/dialect/sql" diff --git a/backend/ent/ent.go b/backend/ent/ent.go index 2568a4e..9e5990e 100644 --- a/backend/ent/ent.go +++ b/backend/ent/ent.go @@ -8,8 +8,8 @@ import ( "entgo.io/ent" "entgo.io/ent/dialect/sql" - "github.com/hay-kot/git-web-template/backend/ent/authtokens" - "github.com/hay-kot/git-web-template/backend/ent/user" + "github.com/hay-kot/content/backend/ent/authtokens" + "github.com/hay-kot/content/backend/ent/user" ) // ent aliases to avoid import conflicts in user's code. diff --git a/backend/ent/enttest/enttest.go b/backend/ent/enttest/enttest.go index cc6930e..4c398dc 100644 --- a/backend/ent/enttest/enttest.go +++ b/backend/ent/enttest/enttest.go @@ -5,9 +5,9 @@ package enttest import ( "context" - "github.com/hay-kot/git-web-template/backend/ent" + "github.com/hay-kot/content/backend/ent" // required by schema hooks. - _ "github.com/hay-kot/git-web-template/backend/ent/runtime" + _ "github.com/hay-kot/content/backend/ent/runtime" "entgo.io/ent/dialect/sql/schema" ) diff --git a/backend/ent/hook/hook.go b/backend/ent/hook/hook.go index 1eefec3..362c3cf 100644 --- a/backend/ent/hook/hook.go +++ b/backend/ent/hook/hook.go @@ -6,7 +6,7 @@ import ( "context" "fmt" - "github.com/hay-kot/git-web-template/backend/ent" + "github.com/hay-kot/content/backend/ent" ) // The AuthTokensFunc type is an adapter to allow the use of ordinary diff --git a/backend/ent/mutation.go b/backend/ent/mutation.go index 3705cb4..851990d 100644 --- a/backend/ent/mutation.go +++ b/backend/ent/mutation.go @@ -10,9 +10,9 @@ import ( "time" "github.com/google/uuid" - "github.com/hay-kot/git-web-template/backend/ent/authtokens" - "github.com/hay-kot/git-web-template/backend/ent/predicate" - "github.com/hay-kot/git-web-template/backend/ent/user" + "github.com/hay-kot/content/backend/ent/authtokens" + "github.com/hay-kot/content/backend/ent/predicate" + "github.com/hay-kot/content/backend/ent/user" "entgo.io/ent" ) diff --git a/backend/ent/runtime.go b/backend/ent/runtime.go index 828477f..9df6038 100644 --- a/backend/ent/runtime.go +++ b/backend/ent/runtime.go @@ -6,9 +6,9 @@ import ( "time" "github.com/google/uuid" - "github.com/hay-kot/git-web-template/backend/ent/authtokens" - "github.com/hay-kot/git-web-template/backend/ent/schema" - "github.com/hay-kot/git-web-template/backend/ent/user" + "github.com/hay-kot/content/backend/ent/authtokens" + "github.com/hay-kot/content/backend/ent/schema" + "github.com/hay-kot/content/backend/ent/user" ) // The init function reads all schema descriptors with runtime code diff --git a/backend/ent/runtime/runtime.go b/backend/ent/runtime/runtime.go index 31da890..3fc430e 100644 --- a/backend/ent/runtime/runtime.go +++ b/backend/ent/runtime/runtime.go @@ -2,7 +2,7 @@ package runtime -// The schema-stitching logic is generated in github.com/hay-kot/git-web-template/backend/ent/runtime.go +// The schema-stitching logic is generated in github.com/hay-kot/content/backend/ent/runtime.go const ( Version = "v0.10.0" // Version of ent codegen. diff --git a/backend/ent/user.go b/backend/ent/user.go index 62eaf8f..bb6f0b3 100644 --- a/backend/ent/user.go +++ b/backend/ent/user.go @@ -8,7 +8,7 @@ import ( "entgo.io/ent/dialect/sql" "github.com/google/uuid" - "github.com/hay-kot/git-web-template/backend/ent/user" + "github.com/hay-kot/content/backend/ent/user" ) // User is the model entity for the User schema. diff --git a/backend/ent/user/where.go b/backend/ent/user/where.go index 36db52a..e17ff3a 100644 --- a/backend/ent/user/where.go +++ b/backend/ent/user/where.go @@ -6,7 +6,7 @@ import ( "entgo.io/ent/dialect/sql" "entgo.io/ent/dialect/sql/sqlgraph" "github.com/google/uuid" - "github.com/hay-kot/git-web-template/backend/ent/predicate" + "github.com/hay-kot/content/backend/ent/predicate" ) // ID filters vertices based on their ID field. diff --git a/backend/ent/user_create.go b/backend/ent/user_create.go index 95ad932..696290d 100644 --- a/backend/ent/user_create.go +++ b/backend/ent/user_create.go @@ -10,8 +10,8 @@ import ( "entgo.io/ent/dialect/sql/sqlgraph" "entgo.io/ent/schema/field" "github.com/google/uuid" - "github.com/hay-kot/git-web-template/backend/ent/authtokens" - "github.com/hay-kot/git-web-template/backend/ent/user" + "github.com/hay-kot/content/backend/ent/authtokens" + "github.com/hay-kot/content/backend/ent/user" ) // UserCreate is the builder for creating a User entity. diff --git a/backend/ent/user_delete.go b/backend/ent/user_delete.go index 6c5aafc..96d5d52 100644 --- a/backend/ent/user_delete.go +++ b/backend/ent/user_delete.go @@ -9,8 +9,8 @@ import ( "entgo.io/ent/dialect/sql" "entgo.io/ent/dialect/sql/sqlgraph" "entgo.io/ent/schema/field" - "github.com/hay-kot/git-web-template/backend/ent/predicate" - "github.com/hay-kot/git-web-template/backend/ent/user" + "github.com/hay-kot/content/backend/ent/predicate" + "github.com/hay-kot/content/backend/ent/user" ) // UserDelete is the builder for deleting a User entity. diff --git a/backend/ent/user_query.go b/backend/ent/user_query.go index 804688d..a40d95d 100644 --- a/backend/ent/user_query.go +++ b/backend/ent/user_query.go @@ -13,9 +13,9 @@ import ( "entgo.io/ent/dialect/sql/sqlgraph" "entgo.io/ent/schema/field" "github.com/google/uuid" - "github.com/hay-kot/git-web-template/backend/ent/authtokens" - "github.com/hay-kot/git-web-template/backend/ent/predicate" - "github.com/hay-kot/git-web-template/backend/ent/user" + "github.com/hay-kot/content/backend/ent/authtokens" + "github.com/hay-kot/content/backend/ent/predicate" + "github.com/hay-kot/content/backend/ent/user" ) // UserQuery is the builder for querying User entities. diff --git a/backend/ent/user_update.go b/backend/ent/user_update.go index d532fc5..9a6a3cb 100644 --- a/backend/ent/user_update.go +++ b/backend/ent/user_update.go @@ -10,9 +10,9 @@ import ( "entgo.io/ent/dialect/sql" "entgo.io/ent/dialect/sql/sqlgraph" "entgo.io/ent/schema/field" - "github.com/hay-kot/git-web-template/backend/ent/authtokens" - "github.com/hay-kot/git-web-template/backend/ent/predicate" - "github.com/hay-kot/git-web-template/backend/ent/user" + "github.com/hay-kot/content/backend/ent/authtokens" + "github.com/hay-kot/content/backend/ent/predicate" + "github.com/hay-kot/content/backend/ent/user" ) // UserUpdate is the builder for updating User entities. diff --git a/backend/go.mod b/backend/go.mod index 71654b3..7549d09 100644 --- a/backend/go.mod +++ b/backend/go.mod @@ -1,4 +1,4 @@ -module github.com/hay-kot/git-web-template/backend +module github.com/hay-kot/content/backend go 1.18 diff --git a/backend/go.sum b/backend/go.sum index 980a7f8..cc23f5f 100644 --- a/backend/go.sum +++ b/backend/go.sum @@ -1,20 +1,12 @@ -ariga.io/atlas v0.3.2-0.20220120225051-c3fac7d636dd h1:YxnJl3ySvwQ3C7Rspa4CrQtwrftTZ0F8WJ36CvY7nWE= -ariga.io/atlas v0.3.2-0.20220120225051-c3fac7d636dd/go.mod h1:XcLUpQX7Cq4qtagEHIleq3MJaBeeJ76BS8doc4gkOJk= ariga.io/atlas v0.6.3 h1:MtT4OxHqkW0XgYRjvqU4bmmv+42U1lvw9u8HzJ8yK9c= ariga.io/atlas v0.6.3/go.mod h1:ft47uSh5hWGDCmQC9DsztZg6Xk+KagM5Ts/mZYKb9JE= -entgo.io/ent v0.10.0 h1:9cBomE1fh+WX34DPYQL7tDNAIvhKa3tXvwxuLyhYCMo= -entgo.io/ent v0.10.0/go.mod h1:5bjIYdTizykmdtPY3knXrrGpxAh0cMjFfxdNnlNiUGU= entgo.io/ent v0.11.2 h1:UM2/BUhF2FfsxPHRxLjQbhqJNaDdVlOwNIAMLs2jyto= entgo.io/ent v0.11.2/go.mod h1:YGHEQnmmIUgtD5b1ICD5vg74dS3npkNnmC5K+0J+IHU= github.com/DATA-DOG/go-sqlmock v1.5.0 h1:Shsta01QNfFxHCfpW6YH2STWB0MudeXXEWMr20OEh60= github.com/KyleBanks/depth v1.2.1 h1:5h8fQADFrWtarTdtDudMmGsC7GPbOAu6RVB3ffsVFHc= github.com/KyleBanks/depth v1.2.1/go.mod h1:jzSb9d0L43HxTQfT+oSA1EEp2q+ne2uh6XgeJcm8brE= -github.com/agext/levenshtein v1.2.1 h1:QmvMAjj2aEICytGiWzmxoE0x2KZvE0fvmqMOfy2tjT8= -github.com/agext/levenshtein v1.2.1/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558= github.com/agext/levenshtein v1.2.3 h1:YB2fHEn0UJagG8T1rrWknE3ZQzWM06O8AMAatNn7lmo= github.com/agext/levenshtein v1.2.3/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558= -github.com/apparentlymart/go-dump v0.0.0-20180507223929-23540a00eaa3/go.mod h1:oL81AME2rN47vu18xqj1S1jPIPuN7afo62yKTNn3XMM= -github.com/apparentlymart/go-textseg v1.0.0/go.mod h1:z96Txxhf3xSFMPmb5X/1W05FF/Nj9VFpLOpjS5yuumk= github.com/apparentlymart/go-textseg/v13 v13.0.0 h1:Y+KvPE1NYz0xl601PVImeQfFyEy6iT90AvPUL1NNfNw= github.com/apparentlymart/go-textseg/v13 v13.0.0/go.mod h1:ZK2fH7c4NqDTLtiYLvIkEghdlcqw7yxLeM89kiTRPUo= github.com/ardanlabs/conf/v2 v2.2.0 h1:ar1+TYIYAh2Tdeg2DQroh7ruR56/vJR8BDfzDIrXgtk= @@ -32,30 +24,18 @@ github.com/go-openapi/jsonpointer v0.19.5 h1:gZr+CIYByUqjcgeLXnQu2gHYQC9o73G2XUe github.com/go-openapi/jsonpointer v0.19.5/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg= github.com/go-openapi/jsonreference v0.20.0 h1:MYlu0sBgChmCfJxxUKZ8g1cPWFOB37YSZqewK7OKeyA= github.com/go-openapi/jsonreference v0.20.0/go.mod h1:Ag74Ico3lPc+zR+qjn4XBUmXymS4zJbYVCZmcgkasdo= -github.com/go-openapi/spec v0.20.6 h1:ich1RQ3WDbfoeTqTAb+5EIxNmpKVJZWBNah9RAT0jIQ= -github.com/go-openapi/spec v0.20.6/go.mod h1:2OpW+JddWPrpXSCIX8eOx7lZ5iyuWj3RYR6VaaBKcWA= github.com/go-openapi/spec v0.20.7 h1:1Rlu/ZrOCCob0n+JKKJAWhNWMPW8bOZRg8FJaY+0SKI= github.com/go-openapi/spec v0.20.7/go.mod h1:2OpW+JddWPrpXSCIX8eOx7lZ5iyuWj3RYR6VaaBKcWA= github.com/go-openapi/swag v0.19.5/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk= github.com/go-openapi/swag v0.19.15/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ= -github.com/go-openapi/swag v0.21.1 h1:wm0rhTb5z7qpJRHBdPOMuY4QjVUMbF6/kwoYeRAOrKU= -github.com/go-openapi/swag v0.21.1/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ= github.com/go-openapi/swag v0.22.3 h1:yMBqmnQ0gyZvEb/+KzuWZOXgllrXT4SADYbvDaXHv/g= github.com/go-openapi/swag v0.22.3/go.mod h1:UzaqsxGiab7freDnrUUra0MwWfN/q7tE4j+VcZ0yl14= github.com/go-test/deep v1.0.3 h1:ZrJSEWsXzPOxaZnFteGEfooLba+ju3FYIbOrS+rQd68= -github.com/go-test/deep v1.0.3/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA= -github.com/golang/protobuf v1.1.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= -github.com/google/go-cmp v0.5.6 h1:BKbKCqvP6I+rmFHt06ZmyQtvB8xAkWdhFyr0ZUNZcxQ= -github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.8 h1:e6P7q2lk1O+qJJb4BtCQXlK8vWEO8V1ZeuEdJNOqZyg= github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I= github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/hashicorp/hcl/v2 v2.10.0 h1:1S1UnuhDGlv3gRFV4+0EdwB+znNP5HmcGbIqwnSCByg= -github.com/hashicorp/hcl/v2 v2.10.0/go.mod h1:FwWsfWEjyV/CMj8s/gqAuiviY72rJ1/oayI9WftqcKg= github.com/hashicorp/hcl/v2 v2.13.0 h1:0Apadu1w6M11dyGFxWnmhhcMjkbAiKCv7G1r/2QgCNc= github.com/hashicorp/hcl/v2 v2.13.0/go.mod h1:e4z5nxYlWNPdDSNYX+ph14EvWYMFm3eP0zIUqPc2jr0= github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= @@ -68,119 +48,66 @@ github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348 h1:MtvEpTB6LX3vkb4ax0b5D2DHbNAUsen0Gx5wZoq3lV4= -github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348/go.mod h1:B69LEHPfb2qLo0BaaOLcbitczOKLWTsrBG9LczfCD4k= github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= github.com/mailru/easyjson v0.7.6/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= -github.com/mattn/go-sqlite3 v1.14.10 h1:MLn+5bFRlWMGoSRmJour3CL1w/qL96mvipqpwQW/Sfk= -github.com/mattn/go-sqlite3 v1.14.10/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= github.com/mattn/go-sqlite3 v1.14.15 h1:vfoHhTN1af61xCRSWzFIWzx2YskyMTwHLrExkBOjvxI= github.com/mattn/go-sqlite3 v1.14.15/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg= -github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7 h1:DpOJ2HYzCv8LZP15IdmG+YdwD2luVPHITV96TkirNBM= -github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7/go.mod h1:ZXFpozHsX6DPmq2I0TCekCxypsnAUbP2oI0UX1GXzOo= github.com/mitchellh/go-wordwrap v1.0.1 h1:TLuKupo69TCn6TQSyGxwI1EblZZEsQ0vMlAFQflz0v0= github.com/mitchellh/go-wordwrap v1.0.1/go.mod h1:R62XHJLzvMFRBbcrT7m7WgmE1eOyTSsCt+hzestvNj0= -github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs= github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ= -github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= -github.com/spf13/pflag v1.0.2/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= -github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stretchr/testify v1.7.1-0.20210427113832-6241f9ab9942 h1:t0lM6y/M5IiUZyvbBTcngso8SZEZICH7is9B6g/obVU= -github.com/stretchr/testify v1.7.1-0.20210427113832-6241f9ab9942/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.8.0 h1:pSgiaMZlXftHpm5L7V1+rVB+AZJydKsMxsQBIJw4PKk= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= -github.com/swaggo/files v0.0.0-20220610200504-28940afbdbfe h1:K8pHPVoTgxFJt1lXuIzzOX7zZhZFldJQK/CgKx9BFIc= -github.com/swaggo/files v0.0.0-20220610200504-28940afbdbfe/go.mod h1:lKJPbtWzJ9JhsTN1k1gZgleJWY/cqq0psdoMmaThG3w= github.com/swaggo/files v0.0.0-20220728132757-551d4a08d97a h1:kAe4YSu0O0UFn1DowNo2MY5p6xzqtJ/wQ7LZynSvGaY= github.com/swaggo/files v0.0.0-20220728132757-551d4a08d97a/go.mod h1:lKJPbtWzJ9JhsTN1k1gZgleJWY/cqq0psdoMmaThG3w= -github.com/swaggo/http-swagger v1.3.0 h1:1+6M4qRorIbdyTWTsGrwnb0r9jGK5dcWN82O6oY/yHQ= -github.com/swaggo/http-swagger v1.3.0/go.mod h1:9glekdg40lwclrrKNRGgj/IMDxpNPZ3kzab4oPcF8EM= github.com/swaggo/http-swagger v1.3.3 h1:Hu5Z0L9ssyBLofaama21iYaF2VbWyA8jdohaaCGpHsc= github.com/swaggo/http-swagger v1.3.3/go.mod h1:sE+4PjD89IxMPm77FnkDz0sdO+p5lbXzrVWT6OTVVGo= -github.com/swaggo/swag v1.8.3 h1:3pZSSCQ//gAH88lfmxM3Cd1+JCsxV8Md6f36b9hrZ5s= -github.com/swaggo/swag v1.8.3/go.mod h1:jMLeXOOmYyjk8PvHTsXBdrubsNd9gUJTTCzL5iBnseg= github.com/swaggo/swag v1.8.5 h1:7NgtfXsXE+jrcOwRyiftGKW7Ppydj7tZiVenuRf1fE4= github.com/swaggo/swag v1.8.5/go.mod h1:jMLeXOOmYyjk8PvHTsXBdrubsNd9gUJTTCzL5iBnseg= -github.com/tkrajina/go-reflector v0.5.5 h1:gwoQFNye30Kk7NrExj8zm3zFtrGPqOkzFMLuQZg1DtQ= github.com/tkrajina/go-reflector v0.5.5/go.mod h1:ECbqLgccecY5kPmPmXg1MrHW585yMcDkVl6IvJe64T4= github.com/tkrajina/go-reflector v0.5.6 h1:hKQ0gyocG7vgMD2M3dRlYN6WBBOmdoOzJ6njQSepKdE= github.com/tkrajina/go-reflector v0.5.6/go.mod h1:ECbqLgccecY5kPmPmXg1MrHW585yMcDkVl6IvJe64T4= -github.com/tkrajina/typescriptify-golang-structs v0.1.7 h1:72jmiT/brlgtCPpwu4X0HkhMeUMtx8+xDiTMS93rFqY= -github.com/tkrajina/typescriptify-golang-structs v0.1.7/go.mod h1:sjU00nti/PMEOZb07KljFlR+lJ+RotsC0GBQMv9EKls= github.com/tkrajina/typescriptify-golang-structs v0.1.8 h1:p7nZ9GP86w5Jh/sWamy9NP7BM03NrHqAAm7elHa/PdA= github.com/tkrajina/typescriptify-golang-structs v0.1.8/go.mod h1:sjU00nti/PMEOZb07KljFlR+lJ+RotsC0GBQMv9EKls= -github.com/vmihailenco/msgpack v3.3.3+incompatible/go.mod h1:fy3FlTQTDXWkZ7Bh6AcGMlsjHatGryHQYUTf1ShIgkk= -github.com/vmihailenco/msgpack/v4 v4.3.12/go.mod h1:gborTTJjAo/GWTqqRjrLCn9pgNN+NXzzngzBKDPIqw4= -github.com/vmihailenco/tagparser v0.1.1/go.mod h1:OeAg3pn3UbLjkWt+rN9oFYB6u/cQgqMEUPoW2WPyhdI= -github.com/zclconf/go-cty v1.2.0/go.mod h1:hOPWgoHbaTUnI5k4D2ld+GRpFJSCe6bCM7m1q/N4PQ8= -github.com/zclconf/go-cty v1.8.0 h1:s4AvqaeQzJIu3ndv4gVIhplVD0krU+bgrcLSVUnaWuA= -github.com/zclconf/go-cty v1.8.0/go.mod h1:vVKLxnk3puL4qRAv72AO+W99LUD4da90g3uUAzyuvAk= github.com/zclconf/go-cty v1.11.0 h1:726SxLdi2SDnjY+BStqB9J1hNp4+2WlzyXLuimibIe0= github.com/zclconf/go-cty v1.11.0/go.mod h1:s9IfD1LK5ccNMSWCVFCE2rJfHiZgi7JijgeWIMfhLvA= -github.com/zclconf/go-cty-debug v0.0.0-20191215020915-b22d67c1ba0b/go.mod h1:ZRKQfBXbGkpdV6QMzT3rU1kSTAnfu1dO8dPKjYprgj8= -golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20190426145343-a29dc8fdc734/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20210921155107-089bfa567519 h1:7I4JAnoQBe7ZtJcBaYHi5UtiO8tQHbUSXxL+pnGRANg= -golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20220829220503-c86fa9a7ed90 h1:Y/gsMcFOcR+6S6f3YeMKl5g+dZMEWqcz5Czj/GWYbkM= golang.org/x/crypto v0.0.0-20220829220503-c86fa9a7ed90/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4 h1:6zppjxzCulZykYSLyVDYbneBfbaBIQPYMevg0bEwv2s= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= -golang.org/x/net v0.0.0-20180811021610-c39426892332/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= -golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20220706163947-c90051bbdb60 h1:8NSylCMxLW4JvserAndSgFL7aPli6A68yf0bYFTcWCM= -golang.org/x/net v0.0.0-20220706163947-c90051bbdb60/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.0.0-20220826154423-83b083e8dc8b h1:ZmngSVLe/wycRns9MKikG9OWIEjGcGAkacif7oYQaUY= golang.org/x/net v0.0.0-20220826154423-83b083e8dc8b/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk= -golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190502175342-a43fa875dd82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20220704084225-05e143d24a9e h1:CsOuNlbOuf0mzxJIefr6Q4uAUetRUwZE4qt7VfzP+xo= -golang.org/x/sys v0.0.0-20220704084225-05e143d24a9e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220829200755-d48e67d00261 h1:v6hYoSR9T5oet+pMXwUWkbiVqx/63mlHjefrHmxwfeY= golang.org/x/sys v0.0.0-20220829200755-d48e67d00261/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= -golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= -golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7 h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.1.11 h1:loJ25fNOEhSXfHrpoGj91eCUThwdNX6u24rO1xnNteY= -golang.org/x/tools v0.1.11/go.mod h1:SgwaegtQh8clINPpECJMqnxLv9I09HLqnW3RMqW0CA4= golang.org/x/tools v0.1.13-0.20220804200503-81c7dc4e4efa h1:uKcci2q7Qtp6nMTC/AAvfNUAldFtJuHWV9/5QWiypts= golang.org/x/tools v0.1.13-0.20220804200503-81c7dc4e4efa/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= -golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= -google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= -google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU= gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b h1:h8qDotaEPuJATrMmW04NCwg7v22aHH28wwpauUhK9Oo= -gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/backend/internal/mapper/users_automapper.go b/backend/internal/mapper/users_automapper.go index 4f97e09..78392f1 100644 --- a/backend/internal/mapper/users_automapper.go +++ b/backend/internal/mapper/users_automapper.go @@ -2,8 +2,8 @@ package mapper import ( - "github.com/hay-kot/git-web-template/backend/ent" - "github.com/hay-kot/git-web-template/backend/internal/types" + "github.com/hay-kot/content/backend/ent" + "github.com/hay-kot/content/backend/internal/types" ) func UserOutFromModel(from ent.User) types.UserOut { diff --git a/backend/internal/mocks/factories/users.go b/backend/internal/mocks/factories/users.go index 1642a7a..438b2d8 100644 --- a/backend/internal/mocks/factories/users.go +++ b/backend/internal/mocks/factories/users.go @@ -1,8 +1,8 @@ package factories import ( - "github.com/hay-kot/git-web-template/backend/internal/types" - "github.com/hay-kot/git-web-template/backend/pkgs/faker" + "github.com/hay-kot/content/backend/internal/types" + "github.com/hay-kot/content/backend/pkgs/faker" ) func UserFactory() types.UserCreate { diff --git a/backend/internal/mocks/mock_logger.go b/backend/internal/mocks/mock_logger.go index d367161..c3f0a1f 100644 --- a/backend/internal/mocks/mock_logger.go +++ b/backend/internal/mocks/mock_logger.go @@ -3,7 +3,7 @@ package mocks import ( "os" - "github.com/hay-kot/git-web-template/backend/pkgs/logger" + "github.com/hay-kot/content/backend/pkgs/logger" ) func GetStructLogger() *logger.Logger { diff --git a/backend/internal/mocks/mocker_services.go b/backend/internal/mocks/mocker_services.go index 3011fb1..e44b464 100644 --- a/backend/internal/mocks/mocker_services.go +++ b/backend/internal/mocks/mocker_services.go @@ -1,8 +1,8 @@ package mocks import ( - "github.com/hay-kot/git-web-template/backend/internal/repo" - "github.com/hay-kot/git-web-template/backend/internal/services" + "github.com/hay-kot/content/backend/internal/repo" + "github.com/hay-kot/content/backend/internal/services" ) func GetMockServices(repos *repo.AllRepos) *services.AllServices { diff --git a/backend/internal/mocks/mocks_ent_repo.go b/backend/internal/mocks/mocks_ent_repo.go index 9273502..e0b7e81 100644 --- a/backend/internal/mocks/mocks_ent_repo.go +++ b/backend/internal/mocks/mocks_ent_repo.go @@ -3,8 +3,8 @@ package mocks import ( "context" - "github.com/hay-kot/git-web-template/backend/ent" - "github.com/hay-kot/git-web-template/backend/internal/repo" + "github.com/hay-kot/content/backend/ent" + "github.com/hay-kot/content/backend/internal/repo" _ "github.com/mattn/go-sqlite3" ) diff --git a/backend/internal/repo/main_test.go b/backend/internal/repo/main_test.go index f516bc5..a28d0ef 100644 --- a/backend/internal/repo/main_test.go +++ b/backend/internal/repo/main_test.go @@ -8,7 +8,7 @@ import ( "testing" "time" - "github.com/hay-kot/git-web-template/backend/ent" + "github.com/hay-kot/content/backend/ent" _ "github.com/mattn/go-sqlite3" ) diff --git a/backend/internal/repo/repos_all.go b/backend/internal/repo/repos_all.go index faf50ee..7703aa3 100644 --- a/backend/internal/repo/repos_all.go +++ b/backend/internal/repo/repos_all.go @@ -1,6 +1,6 @@ package repo -import "github.com/hay-kot/git-web-template/backend/ent" +import "github.com/hay-kot/content/backend/ent" // AllRepos is a container for all the repository interfaces type AllRepos struct { diff --git a/backend/internal/repo/token_ent.go b/backend/internal/repo/token_ent.go index f96ed7c..7f3807d 100644 --- a/backend/internal/repo/token_ent.go +++ b/backend/internal/repo/token_ent.go @@ -4,10 +4,10 @@ import ( "context" "time" - "github.com/hay-kot/git-web-template/backend/ent" - "github.com/hay-kot/git-web-template/backend/ent/authtokens" - "github.com/hay-kot/git-web-template/backend/internal/mapper" - "github.com/hay-kot/git-web-template/backend/internal/types" + "github.com/hay-kot/content/backend/ent" + "github.com/hay-kot/content/backend/ent/authtokens" + "github.com/hay-kot/content/backend/internal/mapper" + "github.com/hay-kot/content/backend/internal/types" ) type EntTokenRepository struct { diff --git a/backend/internal/repo/token_ent_test.go b/backend/internal/repo/token_ent_test.go index ae01a5a..88584ef 100644 --- a/backend/internal/repo/token_ent_test.go +++ b/backend/internal/repo/token_ent_test.go @@ -5,8 +5,8 @@ import ( "testing" "time" - "github.com/hay-kot/git-web-template/backend/internal/types" - "github.com/hay-kot/git-web-template/backend/pkgs/hasher" + "github.com/hay-kot/content/backend/internal/types" + "github.com/hay-kot/content/backend/pkgs/hasher" "github.com/stretchr/testify/assert" ) diff --git a/backend/internal/repo/token_interface.go b/backend/internal/repo/token_interface.go index 4396063..f610d9d 100644 --- a/backend/internal/repo/token_interface.go +++ b/backend/internal/repo/token_interface.go @@ -3,7 +3,7 @@ package repo import ( "context" - "github.com/hay-kot/git-web-template/backend/internal/types" + "github.com/hay-kot/content/backend/internal/types" ) type TokenRepository interface { diff --git a/backend/internal/repo/users_ent.go b/backend/internal/repo/users_ent.go index 0131ea7..632c0fb 100644 --- a/backend/internal/repo/users_ent.go +++ b/backend/internal/repo/users_ent.go @@ -4,9 +4,9 @@ import ( "context" "github.com/google/uuid" - "github.com/hay-kot/git-web-template/backend/ent" - "github.com/hay-kot/git-web-template/backend/ent/user" - "github.com/hay-kot/git-web-template/backend/internal/types" + "github.com/hay-kot/content/backend/ent" + "github.com/hay-kot/content/backend/ent/user" + "github.com/hay-kot/content/backend/internal/types" ) type EntUserRepository struct { diff --git a/backend/internal/repo/users_ent_test.go b/backend/internal/repo/users_ent_test.go index 01a228e..2bf9687 100644 --- a/backend/internal/repo/users_ent_test.go +++ b/backend/internal/repo/users_ent_test.go @@ -5,8 +5,8 @@ import ( "fmt" "testing" - "github.com/hay-kot/git-web-template/backend/internal/types" - "github.com/hay-kot/git-web-template/backend/pkgs/faker" + "github.com/hay-kot/content/backend/internal/types" + "github.com/hay-kot/content/backend/pkgs/faker" "github.com/stretchr/testify/assert" ) diff --git a/backend/internal/repo/users_interface.go b/backend/internal/repo/users_interface.go index 161850d..2e594ba 100644 --- a/backend/internal/repo/users_interface.go +++ b/backend/internal/repo/users_interface.go @@ -4,7 +4,7 @@ import ( "context" "github.com/google/uuid" - "github.com/hay-kot/git-web-template/backend/internal/types" + "github.com/hay-kot/content/backend/internal/types" ) type UserRepository interface { diff --git a/backend/internal/services/all.go b/backend/internal/services/all.go index 3d4273d..39bc690 100644 --- a/backend/internal/services/all.go +++ b/backend/internal/services/all.go @@ -1,6 +1,6 @@ package services -import "github.com/hay-kot/git-web-template/backend/internal/repo" +import "github.com/hay-kot/content/backend/internal/repo" type AllServices struct { User *UserService diff --git a/backend/internal/services/contexts.go b/backend/internal/services/contexts.go index d6a0968..bc18fcc 100644 --- a/backend/internal/services/contexts.go +++ b/backend/internal/services/contexts.go @@ -3,7 +3,7 @@ package services import ( "context" - "github.com/hay-kot/git-web-template/backend/internal/types" + "github.com/hay-kot/content/backend/internal/types" ) type contextKeys struct { diff --git a/backend/internal/services/contexts_test.go b/backend/internal/services/contexts_test.go index 9cae289..cf5a862 100644 --- a/backend/internal/services/contexts_test.go +++ b/backend/internal/services/contexts_test.go @@ -5,7 +5,7 @@ import ( "testing" "github.com/google/uuid" - "github.com/hay-kot/git-web-template/backend/internal/types" + "github.com/hay-kot/content/backend/internal/types" "github.com/stretchr/testify/assert" ) diff --git a/backend/internal/services/service_admin.go b/backend/internal/services/service_admin.go index 3d60c18..6fda029 100644 --- a/backend/internal/services/service_admin.go +++ b/backend/internal/services/service_admin.go @@ -4,8 +4,8 @@ import ( "context" "github.com/google/uuid" - "github.com/hay-kot/git-web-template/backend/internal/repo" - "github.com/hay-kot/git-web-template/backend/internal/types" + "github.com/hay-kot/content/backend/internal/repo" + "github.com/hay-kot/content/backend/internal/types" ) type AdminService struct { diff --git a/backend/internal/services/service_user.go b/backend/internal/services/service_user.go index 4dfbc74..0bb8d5f 100644 --- a/backend/internal/services/service_user.go +++ b/backend/internal/services/service_user.go @@ -6,9 +6,9 @@ import ( "time" "github.com/google/uuid" - "github.com/hay-kot/git-web-template/backend/internal/repo" - "github.com/hay-kot/git-web-template/backend/internal/types" - "github.com/hay-kot/git-web-template/backend/pkgs/hasher" + "github.com/hay-kot/content/backend/internal/repo" + "github.com/hay-kot/content/backend/internal/types" + "github.com/hay-kot/content/backend/pkgs/hasher" ) var ( diff --git a/backend/pkgs/server/response_error_builder_test.go b/backend/pkgs/server/response_error_builder_test.go index 012e744..b556a18 100644 --- a/backend/pkgs/server/response_error_builder_test.go +++ b/backend/pkgs/server/response_error_builder_test.go @@ -7,7 +7,7 @@ import ( "net/http/httptest" "testing" - "github.com/hay-kot/git-web-template/backend/pkgs/faker" + "github.com/hay-kot/content/backend/pkgs/faker" "github.com/stretchr/testify/assert" ) From 63cfeffc4d18ec76a26dbe538511db9d190664ad Mon Sep 17 00:00:00 2001 From: Hayden <64056131+hay-kot@users.noreply.github.com> Date: Tue, 30 Aug 2022 10:04:50 -0800 Subject: [PATCH 004/530] generate database schemas --- backend/ent/authtokens.go | 66 +- backend/ent/authtokens/authtokens.go | 23 +- backend/ent/authtokens/where.go | 272 +- backend/ent/authtokens_create.go | 122 +- backend/ent/authtokens_delete.go | 10 +- backend/ent/authtokens_query.go | 535 +- backend/ent/authtokens_update.go | 104 +- backend/ent/client.go | 737 ++- backend/ent/config.go | 7 +- backend/ent/context.go | 2 +- backend/ent/ent.go | 220 +- backend/ent/enttest/enttest.go | 20 +- backend/ent/group.go | 210 + backend/ent/group/group.go | 120 + backend/ent/group/where.go | 511 ++ backend/ent/group_create.go | 494 ++ backend/ent/group_delete.go | 115 + backend/ent/group_query.go | 832 +++ backend/ent/group_update.go | 1125 ++++ backend/ent/hook/hook.go | 71 +- backend/ent/item.go | 385 ++ backend/ent/item/item.go | 165 + backend/ent/item/where.go | 1917 ++++++ backend/ent/item_create.go | 817 +++ backend/ent/item_delete.go | 115 + backend/ent/item_query.go | 861 +++ backend/ent/item_update.go | 1935 ++++++ backend/ent/itemfield.go | 236 + backend/ent/itemfield/itemfield.go | 127 + backend/ent/itemfield/where.go | 844 +++ backend/ent/itemfield_create.go | 516 ++ backend/ent/itemfield_delete.go | 115 + backend/ent/itemfield_query.go | 611 ++ backend/ent/itemfield_update.go | 836 +++ backend/ent/label.go | 204 + backend/ent/label/label.go | 98 + backend/ent/label/where.go | 659 ++ backend/ent/label_create.go | 444 ++ backend/ent/label_delete.go | 115 + backend/ent/label_query.go | 714 +++ backend/ent/label_update.go | 793 +++ backend/ent/location.go | 193 + backend/ent/location/location.go | 89 + backend/ent/location/where.go | 539 ++ backend/ent/location_create.go | 417 ++ backend/ent/location_delete.go | 115 + backend/ent/location_query.go | 687 +++ backend/ent/location_update.go | 717 +++ backend/ent/migrate/migrate.go | 27 +- backend/ent/migrate/schema.go | 199 +- backend/ent/mutation.go | 5273 ++++++++++++++++- backend/ent/predicate/predicate.go | 17 +- backend/ent/runtime.go | 352 +- backend/ent/runtime/runtime.go | 6 +- .../schema/{authtokens.go => auth_tokens.go} | 9 +- backend/ent/schema/group.go | 41 + backend/ent/schema/item.go | 80 + backend/ent/schema/item_field.go | 48 + backend/ent/schema/label.go | 40 + backend/ent/schema/location.go | 35 + backend/ent/schema/mixins/base.go | 42 + backend/ent/schema/user.go | 17 +- backend/ent/tx.go | 17 +- backend/ent/user.go | 81 +- backend/ent/user/user.go | 36 +- backend/ent/user/where.go | 222 +- backend/ent/user_create.go | 114 +- backend/ent/user_delete.go | 8 +- backend/ent/user_query.go | 608 +- backend/ent/user_update.go | 199 +- 70 files changed, 26933 insertions(+), 1398 deletions(-) create mode 100644 backend/ent/group.go create mode 100644 backend/ent/group/group.go create mode 100644 backend/ent/group/where.go create mode 100644 backend/ent/group_create.go create mode 100644 backend/ent/group_delete.go create mode 100644 backend/ent/group_query.go create mode 100644 backend/ent/group_update.go create mode 100644 backend/ent/item.go create mode 100644 backend/ent/item/item.go create mode 100644 backend/ent/item/where.go create mode 100644 backend/ent/item_create.go create mode 100644 backend/ent/item_delete.go create mode 100644 backend/ent/item_query.go create mode 100644 backend/ent/item_update.go create mode 100644 backend/ent/itemfield.go create mode 100644 backend/ent/itemfield/itemfield.go create mode 100644 backend/ent/itemfield/where.go create mode 100644 backend/ent/itemfield_create.go create mode 100644 backend/ent/itemfield_delete.go create mode 100644 backend/ent/itemfield_query.go create mode 100644 backend/ent/itemfield_update.go create mode 100644 backend/ent/label.go create mode 100644 backend/ent/label/label.go create mode 100644 backend/ent/label/where.go create mode 100644 backend/ent/label_create.go create mode 100644 backend/ent/label_delete.go create mode 100644 backend/ent/label_query.go create mode 100644 backend/ent/label_update.go create mode 100644 backend/ent/location.go create mode 100644 backend/ent/location/location.go create mode 100644 backend/ent/location/where.go create mode 100644 backend/ent/location_create.go create mode 100644 backend/ent/location_delete.go create mode 100644 backend/ent/location_query.go create mode 100644 backend/ent/location_update.go rename backend/ent/schema/{authtokens.go => auth_tokens.go} (84%) create mode 100644 backend/ent/schema/group.go create mode 100644 backend/ent/schema/item.go create mode 100644 backend/ent/schema/item_field.go create mode 100644 backend/ent/schema/label.go create mode 100644 backend/ent/schema/location.go create mode 100644 backend/ent/schema/mixins/base.go diff --git a/backend/ent/authtokens.go b/backend/ent/authtokens.go index d318a43..94784d8 100644 --- a/backend/ent/authtokens.go +++ b/backend/ent/authtokens.go @@ -1,4 +1,4 @@ -// Code generated by entc, DO NOT EDIT. +// Code generated by ent, DO NOT EDIT. package ent @@ -17,13 +17,15 @@ import ( type AuthTokens struct { config `json:"-"` // ID of the ent. - ID int `json:"id,omitempty"` + ID uuid.UUID `json:"id,omitempty"` + // CreatedAt holds the value of the "created_at" field. + CreatedAt time.Time `json:"created_at,omitempty"` + // UpdatedAt holds the value of the "updated_at" field. + UpdatedAt time.Time `json:"updated_at,omitempty"` // Token holds the value of the "token" field. Token []byte `json:"token,omitempty"` // ExpiresAt holds the value of the "expires_at" field. ExpiresAt time.Time `json:"expires_at,omitempty"` - // CreatedAt holds the value of the "created_at" field. - CreatedAt time.Time `json:"created_at,omitempty"` // Edges holds the relations/edges for other nodes in the graph. // The values are being populated by the AuthTokensQuery when eager-loading is set. Edges AuthTokensEdges `json:"edges"` @@ -44,8 +46,7 @@ type AuthTokensEdges struct { func (e AuthTokensEdges) UserOrErr() (*User, error) { if e.loadedTypes[0] { if e.User == nil { - // The edge user was loaded in eager-loading, - // but was not found. + // Edge was loaded but was not found. return nil, &NotFoundError{label: user.Label} } return e.User, nil @@ -60,10 +61,10 @@ func (*AuthTokens) scanValues(columns []string) ([]interface{}, error) { switch columns[i] { case authtokens.FieldToken: values[i] = new([]byte) - case authtokens.FieldID: - values[i] = new(sql.NullInt64) - case authtokens.FieldExpiresAt, authtokens.FieldCreatedAt: + case authtokens.FieldCreatedAt, authtokens.FieldUpdatedAt, authtokens.FieldExpiresAt: values[i] = new(sql.NullTime) + case authtokens.FieldID: + values[i] = new(uuid.UUID) case authtokens.ForeignKeys[0]: // user_auth_tokens values[i] = &sql.NullScanner{S: new(uuid.UUID)} default: @@ -82,11 +83,23 @@ func (at *AuthTokens) assignValues(columns []string, values []interface{}) error for i := range columns { switch columns[i] { case authtokens.FieldID: - value, ok := values[i].(*sql.NullInt64) - if !ok { - return fmt.Errorf("unexpected type %T for field id", value) + if value, ok := values[i].(*uuid.UUID); !ok { + return fmt.Errorf("unexpected type %T for field id", values[i]) + } else if value != nil { + at.ID = *value + } + case authtokens.FieldCreatedAt: + if value, ok := values[i].(*sql.NullTime); !ok { + return fmt.Errorf("unexpected type %T for field created_at", values[i]) + } else if value.Valid { + at.CreatedAt = value.Time + } + case authtokens.FieldUpdatedAt: + if value, ok := values[i].(*sql.NullTime); !ok { + return fmt.Errorf("unexpected type %T for field updated_at", values[i]) + } else if value.Valid { + at.UpdatedAt = value.Time } - at.ID = int(value.Int64) case authtokens.FieldToken: if value, ok := values[i].(*[]byte); !ok { return fmt.Errorf("unexpected type %T for field token", values[i]) @@ -99,12 +112,6 @@ func (at *AuthTokens) assignValues(columns []string, values []interface{}) error } else if value.Valid { at.ExpiresAt = value.Time } - case authtokens.FieldCreatedAt: - if value, ok := values[i].(*sql.NullTime); !ok { - return fmt.Errorf("unexpected type %T for field created_at", values[i]) - } else if value.Valid { - at.CreatedAt = value.Time - } case authtokens.ForeignKeys[0]: if value, ok := values[i].(*sql.NullScanner); !ok { return fmt.Errorf("unexpected type %T for field user_auth_tokens", values[i]) @@ -132,11 +139,11 @@ func (at *AuthTokens) Update() *AuthTokensUpdateOne { // Unwrap unwraps the AuthTokens entity that was returned from a transaction after it was closed, // so that all future queries will be executed through the driver which created the transaction. func (at *AuthTokens) Unwrap() *AuthTokens { - tx, ok := at.config.driver.(*txDriver) + _tx, ok := at.config.driver.(*txDriver) if !ok { panic("ent: AuthTokens is not a transactional entity") } - at.config.driver = tx.drv + at.config.driver = _tx.drv return at } @@ -144,13 +151,18 @@ func (at *AuthTokens) Unwrap() *AuthTokens { func (at *AuthTokens) String() string { var builder strings.Builder builder.WriteString("AuthTokens(") - builder.WriteString(fmt.Sprintf("id=%v", at.ID)) - builder.WriteString(", token=") - builder.WriteString(fmt.Sprintf("%v", at.Token)) - builder.WriteString(", expires_at=") - builder.WriteString(at.ExpiresAt.Format(time.ANSIC)) - builder.WriteString(", created_at=") + builder.WriteString(fmt.Sprintf("id=%v, ", at.ID)) + builder.WriteString("created_at=") builder.WriteString(at.CreatedAt.Format(time.ANSIC)) + builder.WriteString(", ") + builder.WriteString("updated_at=") + builder.WriteString(at.UpdatedAt.Format(time.ANSIC)) + builder.WriteString(", ") + builder.WriteString("token=") + builder.WriteString(fmt.Sprintf("%v", at.Token)) + builder.WriteString(", ") + builder.WriteString("expires_at=") + builder.WriteString(at.ExpiresAt.Format(time.ANSIC)) builder.WriteByte(')') return builder.String() } diff --git a/backend/ent/authtokens/authtokens.go b/backend/ent/authtokens/authtokens.go index 5c10d3a..af22805 100644 --- a/backend/ent/authtokens/authtokens.go +++ b/backend/ent/authtokens/authtokens.go @@ -1,9 +1,11 @@ -// Code generated by entc, DO NOT EDIT. +// Code generated by ent, DO NOT EDIT. package authtokens import ( "time" + + "github.com/google/uuid" ) const ( @@ -11,12 +13,14 @@ const ( Label = "auth_tokens" // FieldID holds the string denoting the id field in the database. FieldID = "id" + // FieldCreatedAt holds the string denoting the created_at field in the database. + FieldCreatedAt = "created_at" + // FieldUpdatedAt holds the string denoting the updated_at field in the database. + FieldUpdatedAt = "updated_at" // FieldToken holds the string denoting the token field in the database. FieldToken = "token" // FieldExpiresAt holds the string denoting the expires_at field in the database. FieldExpiresAt = "expires_at" - // FieldCreatedAt holds the string denoting the created_at field in the database. - FieldCreatedAt = "created_at" // EdgeUser holds the string denoting the user edge name in mutations. EdgeUser = "user" // Table holds the table name of the authtokens in the database. @@ -33,9 +37,10 @@ const ( // Columns holds all SQL columns for authtokens fields. var Columns = []string{ FieldID, + FieldCreatedAt, + FieldUpdatedAt, FieldToken, FieldExpiresAt, - FieldCreatedAt, } // ForeignKeys holds the SQL foreign-keys that are owned by the "auth_tokens" @@ -60,8 +65,14 @@ func ValidColumn(column string) bool { } var ( - // DefaultExpiresAt holds the default value on creation for the "expires_at" field. - DefaultExpiresAt func() time.Time // DefaultCreatedAt holds the default value on creation for the "created_at" field. DefaultCreatedAt func() time.Time + // DefaultUpdatedAt holds the default value on creation for the "updated_at" field. + DefaultUpdatedAt func() time.Time + // UpdateDefaultUpdatedAt holds the default value on update for the "updated_at" field. + UpdateDefaultUpdatedAt func() time.Time + // DefaultExpiresAt holds the default value on creation for the "expires_at" field. + DefaultExpiresAt func() time.Time + // DefaultID holds the default value on creation for the "id" field. + DefaultID func() uuid.UUID ) diff --git a/backend/ent/authtokens/where.go b/backend/ent/authtokens/where.go index c38121f..015b4af 100644 --- a/backend/ent/authtokens/where.go +++ b/backend/ent/authtokens/where.go @@ -1,4 +1,4 @@ -// Code generated by entc, DO NOT EDIT. +// Code generated by ent, DO NOT EDIT. package authtokens @@ -7,39 +7,34 @@ import ( "entgo.io/ent/dialect/sql" "entgo.io/ent/dialect/sql/sqlgraph" + "github.com/google/uuid" "github.com/hay-kot/content/backend/ent/predicate" ) // ID filters vertices based on their ID field. -func ID(id int) predicate.AuthTokens { +func ID(id uuid.UUID) predicate.AuthTokens { return predicate.AuthTokens(func(s *sql.Selector) { s.Where(sql.EQ(s.C(FieldID), id)) }) } // IDEQ applies the EQ predicate on the ID field. -func IDEQ(id int) predicate.AuthTokens { +func IDEQ(id uuid.UUID) predicate.AuthTokens { return predicate.AuthTokens(func(s *sql.Selector) { s.Where(sql.EQ(s.C(FieldID), id)) }) } // IDNEQ applies the NEQ predicate on the ID field. -func IDNEQ(id int) predicate.AuthTokens { +func IDNEQ(id uuid.UUID) predicate.AuthTokens { return predicate.AuthTokens(func(s *sql.Selector) { s.Where(sql.NEQ(s.C(FieldID), id)) }) } // IDIn applies the In predicate on the ID field. -func IDIn(ids ...int) predicate.AuthTokens { +func IDIn(ids ...uuid.UUID) predicate.AuthTokens { return predicate.AuthTokens(func(s *sql.Selector) { - // if not arguments were provided, append the FALSE constants, - // since we can't apply "IN ()". This will make this predicate falsy. - if len(ids) == 0 { - s.Where(sql.False()) - return - } v := make([]interface{}, len(ids)) for i := range v { v[i] = ids[i] @@ -49,14 +44,8 @@ func IDIn(ids ...int) predicate.AuthTokens { } // IDNotIn applies the NotIn predicate on the ID field. -func IDNotIn(ids ...int) predicate.AuthTokens { +func IDNotIn(ids ...uuid.UUID) predicate.AuthTokens { return predicate.AuthTokens(func(s *sql.Selector) { - // if not arguments were provided, append the FALSE constants, - // since we can't apply "IN ()". This will make this predicate falsy. - if len(ids) == 0 { - s.Where(sql.False()) - return - } v := make([]interface{}, len(ids)) for i := range v { v[i] = ids[i] @@ -66,33 +55,47 @@ func IDNotIn(ids ...int) predicate.AuthTokens { } // IDGT applies the GT predicate on the ID field. -func IDGT(id int) predicate.AuthTokens { +func IDGT(id uuid.UUID) predicate.AuthTokens { return predicate.AuthTokens(func(s *sql.Selector) { s.Where(sql.GT(s.C(FieldID), id)) }) } // IDGTE applies the GTE predicate on the ID field. -func IDGTE(id int) predicate.AuthTokens { +func IDGTE(id uuid.UUID) predicate.AuthTokens { return predicate.AuthTokens(func(s *sql.Selector) { s.Where(sql.GTE(s.C(FieldID), id)) }) } // IDLT applies the LT predicate on the ID field. -func IDLT(id int) predicate.AuthTokens { +func IDLT(id uuid.UUID) predicate.AuthTokens { return predicate.AuthTokens(func(s *sql.Selector) { s.Where(sql.LT(s.C(FieldID), id)) }) } // IDLTE applies the LTE predicate on the ID field. -func IDLTE(id int) predicate.AuthTokens { +func IDLTE(id uuid.UUID) predicate.AuthTokens { return predicate.AuthTokens(func(s *sql.Selector) { s.Where(sql.LTE(s.C(FieldID), id)) }) } +// CreatedAt applies equality check predicate on the "created_at" field. It's identical to CreatedAtEQ. +func CreatedAt(v time.Time) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldCreatedAt), v)) + }) +} + +// UpdatedAt applies equality check predicate on the "updated_at" field. It's identical to UpdatedAtEQ. +func UpdatedAt(v time.Time) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldUpdatedAt), v)) + }) +} + // Token applies equality check predicate on the "token" field. It's identical to TokenEQ. func Token(v []byte) predicate.AuthTokens { return predicate.AuthTokens(func(s *sql.Selector) { @@ -107,13 +110,134 @@ func ExpiresAt(v time.Time) predicate.AuthTokens { }) } -// CreatedAt applies equality check predicate on the "created_at" field. It's identical to CreatedAtEQ. -func CreatedAt(v time.Time) predicate.AuthTokens { +// CreatedAtEQ applies the EQ predicate on the "created_at" field. +func CreatedAtEQ(v time.Time) predicate.AuthTokens { return predicate.AuthTokens(func(s *sql.Selector) { s.Where(sql.EQ(s.C(FieldCreatedAt), v)) }) } +// CreatedAtNEQ applies the NEQ predicate on the "created_at" field. +func CreatedAtNEQ(v time.Time) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldCreatedAt), v)) + }) +} + +// CreatedAtIn applies the In predicate on the "created_at" field. +func CreatedAtIn(vs ...time.Time) predicate.AuthTokens { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.In(s.C(FieldCreatedAt), v...)) + }) +} + +// CreatedAtNotIn applies the NotIn predicate on the "created_at" field. +func CreatedAtNotIn(vs ...time.Time) predicate.AuthTokens { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.NotIn(s.C(FieldCreatedAt), v...)) + }) +} + +// CreatedAtGT applies the GT predicate on the "created_at" field. +func CreatedAtGT(v time.Time) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.GT(s.C(FieldCreatedAt), v)) + }) +} + +// CreatedAtGTE applies the GTE predicate on the "created_at" field. +func CreatedAtGTE(v time.Time) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.GTE(s.C(FieldCreatedAt), v)) + }) +} + +// CreatedAtLT applies the LT predicate on the "created_at" field. +func CreatedAtLT(v time.Time) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.LT(s.C(FieldCreatedAt), v)) + }) +} + +// CreatedAtLTE applies the LTE predicate on the "created_at" field. +func CreatedAtLTE(v time.Time) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.LTE(s.C(FieldCreatedAt), v)) + }) +} + +// UpdatedAtEQ applies the EQ predicate on the "updated_at" field. +func UpdatedAtEQ(v time.Time) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldUpdatedAt), v)) + }) +} + +// UpdatedAtNEQ applies the NEQ predicate on the "updated_at" field. +func UpdatedAtNEQ(v time.Time) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldUpdatedAt), v)) + }) +} + +// UpdatedAtIn applies the In predicate on the "updated_at" field. +func UpdatedAtIn(vs ...time.Time) predicate.AuthTokens { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.In(s.C(FieldUpdatedAt), v...)) + }) +} + +// UpdatedAtNotIn applies the NotIn predicate on the "updated_at" field. +func UpdatedAtNotIn(vs ...time.Time) predicate.AuthTokens { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.NotIn(s.C(FieldUpdatedAt), v...)) + }) +} + +// UpdatedAtGT applies the GT predicate on the "updated_at" field. +func UpdatedAtGT(v time.Time) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.GT(s.C(FieldUpdatedAt), v)) + }) +} + +// UpdatedAtGTE applies the GTE predicate on the "updated_at" field. +func UpdatedAtGTE(v time.Time) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.GTE(s.C(FieldUpdatedAt), v)) + }) +} + +// UpdatedAtLT applies the LT predicate on the "updated_at" field. +func UpdatedAtLT(v time.Time) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.LT(s.C(FieldUpdatedAt), v)) + }) +} + +// UpdatedAtLTE applies the LTE predicate on the "updated_at" field. +func UpdatedAtLTE(v time.Time) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.LTE(s.C(FieldUpdatedAt), v)) + }) +} + // TokenEQ applies the EQ predicate on the "token" field. func TokenEQ(v []byte) predicate.AuthTokens { return predicate.AuthTokens(func(s *sql.Selector) { @@ -135,12 +259,6 @@ func TokenIn(vs ...[]byte) predicate.AuthTokens { v[i] = vs[i] } return predicate.AuthTokens(func(s *sql.Selector) { - // if not arguments were provided, append the FALSE constants, - // since we can't apply "IN ()". This will make this predicate falsy. - if len(v) == 0 { - s.Where(sql.False()) - return - } s.Where(sql.In(s.C(FieldToken), v...)) }) } @@ -152,12 +270,6 @@ func TokenNotIn(vs ...[]byte) predicate.AuthTokens { v[i] = vs[i] } return predicate.AuthTokens(func(s *sql.Selector) { - // if not arguments were provided, append the FALSE constants, - // since we can't apply "IN ()". This will make this predicate falsy. - if len(v) == 0 { - s.Where(sql.False()) - return - } s.Where(sql.NotIn(s.C(FieldToken), v...)) }) } @@ -211,12 +323,6 @@ func ExpiresAtIn(vs ...time.Time) predicate.AuthTokens { v[i] = vs[i] } return predicate.AuthTokens(func(s *sql.Selector) { - // if not arguments were provided, append the FALSE constants, - // since we can't apply "IN ()". This will make this predicate falsy. - if len(v) == 0 { - s.Where(sql.False()) - return - } s.Where(sql.In(s.C(FieldExpiresAt), v...)) }) } @@ -228,12 +334,6 @@ func ExpiresAtNotIn(vs ...time.Time) predicate.AuthTokens { v[i] = vs[i] } return predicate.AuthTokens(func(s *sql.Selector) { - // if not arguments were provided, append the FALSE constants, - // since we can't apply "IN ()". This will make this predicate falsy. - if len(v) == 0 { - s.Where(sql.False()) - return - } s.Where(sql.NotIn(s.C(FieldExpiresAt), v...)) }) } @@ -266,82 +366,6 @@ func ExpiresAtLTE(v time.Time) predicate.AuthTokens { }) } -// CreatedAtEQ applies the EQ predicate on the "created_at" field. -func CreatedAtEQ(v time.Time) predicate.AuthTokens { - return predicate.AuthTokens(func(s *sql.Selector) { - s.Where(sql.EQ(s.C(FieldCreatedAt), v)) - }) -} - -// CreatedAtNEQ applies the NEQ predicate on the "created_at" field. -func CreatedAtNEQ(v time.Time) predicate.AuthTokens { - return predicate.AuthTokens(func(s *sql.Selector) { - s.Where(sql.NEQ(s.C(FieldCreatedAt), v)) - }) -} - -// CreatedAtIn applies the In predicate on the "created_at" field. -func CreatedAtIn(vs ...time.Time) predicate.AuthTokens { - v := make([]interface{}, len(vs)) - for i := range v { - v[i] = vs[i] - } - return predicate.AuthTokens(func(s *sql.Selector) { - // if not arguments were provided, append the FALSE constants, - // since we can't apply "IN ()". This will make this predicate falsy. - if len(v) == 0 { - s.Where(sql.False()) - return - } - s.Where(sql.In(s.C(FieldCreatedAt), v...)) - }) -} - -// CreatedAtNotIn applies the NotIn predicate on the "created_at" field. -func CreatedAtNotIn(vs ...time.Time) predicate.AuthTokens { - v := make([]interface{}, len(vs)) - for i := range v { - v[i] = vs[i] - } - return predicate.AuthTokens(func(s *sql.Selector) { - // if not arguments were provided, append the FALSE constants, - // since we can't apply "IN ()". This will make this predicate falsy. - if len(v) == 0 { - s.Where(sql.False()) - return - } - s.Where(sql.NotIn(s.C(FieldCreatedAt), v...)) - }) -} - -// CreatedAtGT applies the GT predicate on the "created_at" field. -func CreatedAtGT(v time.Time) predicate.AuthTokens { - return predicate.AuthTokens(func(s *sql.Selector) { - s.Where(sql.GT(s.C(FieldCreatedAt), v)) - }) -} - -// CreatedAtGTE applies the GTE predicate on the "created_at" field. -func CreatedAtGTE(v time.Time) predicate.AuthTokens { - return predicate.AuthTokens(func(s *sql.Selector) { - s.Where(sql.GTE(s.C(FieldCreatedAt), v)) - }) -} - -// CreatedAtLT applies the LT predicate on the "created_at" field. -func CreatedAtLT(v time.Time) predicate.AuthTokens { - return predicate.AuthTokens(func(s *sql.Selector) { - s.Where(sql.LT(s.C(FieldCreatedAt), v)) - }) -} - -// CreatedAtLTE applies the LTE predicate on the "created_at" field. -func CreatedAtLTE(v time.Time) predicate.AuthTokens { - return predicate.AuthTokens(func(s *sql.Selector) { - s.Where(sql.LTE(s.C(FieldCreatedAt), v)) - }) -} - // HasUser applies the HasEdge predicate on the "user" edge. func HasUser() predicate.AuthTokens { return predicate.AuthTokens(func(s *sql.Selector) { diff --git a/backend/ent/authtokens_create.go b/backend/ent/authtokens_create.go index aa64a3a..4336c2b 100644 --- a/backend/ent/authtokens_create.go +++ b/backend/ent/authtokens_create.go @@ -1,4 +1,4 @@ -// Code generated by entc, DO NOT EDIT. +// Code generated by ent, DO NOT EDIT. package ent @@ -22,6 +22,34 @@ type AuthTokensCreate struct { hooks []Hook } +// SetCreatedAt sets the "created_at" field. +func (atc *AuthTokensCreate) SetCreatedAt(t time.Time) *AuthTokensCreate { + atc.mutation.SetCreatedAt(t) + return atc +} + +// SetNillableCreatedAt sets the "created_at" field if the given value is not nil. +func (atc *AuthTokensCreate) SetNillableCreatedAt(t *time.Time) *AuthTokensCreate { + if t != nil { + atc.SetCreatedAt(*t) + } + return atc +} + +// SetUpdatedAt sets the "updated_at" field. +func (atc *AuthTokensCreate) SetUpdatedAt(t time.Time) *AuthTokensCreate { + atc.mutation.SetUpdatedAt(t) + return atc +} + +// SetNillableUpdatedAt sets the "updated_at" field if the given value is not nil. +func (atc *AuthTokensCreate) SetNillableUpdatedAt(t *time.Time) *AuthTokensCreate { + if t != nil { + atc.SetUpdatedAt(*t) + } + return atc +} + // SetToken sets the "token" field. func (atc *AuthTokensCreate) SetToken(b []byte) *AuthTokensCreate { atc.mutation.SetToken(b) @@ -42,16 +70,16 @@ func (atc *AuthTokensCreate) SetNillableExpiresAt(t *time.Time) *AuthTokensCreat return atc } -// SetCreatedAt sets the "created_at" field. -func (atc *AuthTokensCreate) SetCreatedAt(t time.Time) *AuthTokensCreate { - atc.mutation.SetCreatedAt(t) +// SetID sets the "id" field. +func (atc *AuthTokensCreate) SetID(u uuid.UUID) *AuthTokensCreate { + atc.mutation.SetID(u) return atc } -// SetNillableCreatedAt sets the "created_at" field if the given value is not nil. -func (atc *AuthTokensCreate) SetNillableCreatedAt(t *time.Time) *AuthTokensCreate { - if t != nil { - atc.SetCreatedAt(*t) +// SetNillableID sets the "id" field if the given value is not nil. +func (atc *AuthTokensCreate) SetNillableID(u *uuid.UUID) *AuthTokensCreate { + if u != nil { + atc.SetID(*u) } return atc } @@ -115,9 +143,15 @@ func (atc *AuthTokensCreate) Save(ctx context.Context) (*AuthTokens, error) { } mut = atc.hooks[i](mut) } - if _, err := mut.Mutate(ctx, atc.mutation); err != nil { + v, err := mut.Mutate(ctx, atc.mutation) + if err != nil { return nil, err } + nv, ok := v.(*AuthTokens) + if !ok { + return nil, fmt.Errorf("unexpected node type %T returned from AuthTokensMutation", v) + } + node = nv } return node, err } @@ -146,27 +180,38 @@ func (atc *AuthTokensCreate) ExecX(ctx context.Context) { // defaults sets the default values of the builder before save. func (atc *AuthTokensCreate) defaults() { + if _, ok := atc.mutation.CreatedAt(); !ok { + v := authtokens.DefaultCreatedAt() + atc.mutation.SetCreatedAt(v) + } + if _, ok := atc.mutation.UpdatedAt(); !ok { + v := authtokens.DefaultUpdatedAt() + atc.mutation.SetUpdatedAt(v) + } if _, ok := atc.mutation.ExpiresAt(); !ok { v := authtokens.DefaultExpiresAt() atc.mutation.SetExpiresAt(v) } - if _, ok := atc.mutation.CreatedAt(); !ok { - v := authtokens.DefaultCreatedAt() - atc.mutation.SetCreatedAt(v) + if _, ok := atc.mutation.ID(); !ok { + v := authtokens.DefaultID() + atc.mutation.SetID(v) } } // check runs all checks and user-defined validators on the builder. func (atc *AuthTokensCreate) check() error { + if _, ok := atc.mutation.CreatedAt(); !ok { + return &ValidationError{Name: "created_at", err: errors.New(`ent: missing required field "AuthTokens.created_at"`)} + } + if _, ok := atc.mutation.UpdatedAt(); !ok { + return &ValidationError{Name: "updated_at", err: errors.New(`ent: missing required field "AuthTokens.updated_at"`)} + } if _, ok := atc.mutation.Token(); !ok { return &ValidationError{Name: "token", err: errors.New(`ent: missing required field "AuthTokens.token"`)} } if _, ok := atc.mutation.ExpiresAt(); !ok { return &ValidationError{Name: "expires_at", err: errors.New(`ent: missing required field "AuthTokens.expires_at"`)} } - if _, ok := atc.mutation.CreatedAt(); !ok { - return &ValidationError{Name: "created_at", err: errors.New(`ent: missing required field "AuthTokens.created_at"`)} - } return nil } @@ -174,12 +219,17 @@ func (atc *AuthTokensCreate) sqlSave(ctx context.Context) (*AuthTokens, error) { _node, _spec := atc.createSpec() if err := sqlgraph.CreateNode(ctx, atc.driver, _spec); err != nil { if sqlgraph.IsConstraintError(err) { - err = &ConstraintError{err.Error(), err} + err = &ConstraintError{msg: err.Error(), wrap: err} } return nil, err } - id := _spec.ID.Value.(int64) - _node.ID = int(id) + if _spec.ID.Value != nil { + if id, ok := _spec.ID.Value.(*uuid.UUID); ok { + _node.ID = *id + } else if err := _node.ID.Scan(_spec.ID.Value); err != nil { + return nil, err + } + } return _node, nil } @@ -189,11 +239,31 @@ func (atc *AuthTokensCreate) createSpec() (*AuthTokens, *sqlgraph.CreateSpec) { _spec = &sqlgraph.CreateSpec{ Table: authtokens.Table, ID: &sqlgraph.FieldSpec{ - Type: field.TypeInt, + Type: field.TypeUUID, Column: authtokens.FieldID, }, } ) + if id, ok := atc.mutation.ID(); ok { + _node.ID = id + _spec.ID.Value = &id + } + if value, ok := atc.mutation.CreatedAt(); ok { + _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ + Type: field.TypeTime, + Value: value, + Column: authtokens.FieldCreatedAt, + }) + _node.CreatedAt = value + } + if value, ok := atc.mutation.UpdatedAt(); ok { + _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ + Type: field.TypeTime, + Value: value, + Column: authtokens.FieldUpdatedAt, + }) + _node.UpdatedAt = value + } if value, ok := atc.mutation.Token(); ok { _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ Type: field.TypeBytes, @@ -210,14 +280,6 @@ func (atc *AuthTokensCreate) createSpec() (*AuthTokens, *sqlgraph.CreateSpec) { }) _node.ExpiresAt = value } - if value, ok := atc.mutation.CreatedAt(); ok { - _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ - Type: field.TypeTime, - Value: value, - Column: authtokens.FieldCreatedAt, - }) - _node.CreatedAt = value - } if nodes := atc.mutation.UserIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ Rel: sqlgraph.M2O, @@ -274,7 +336,7 @@ func (atcb *AuthTokensCreateBulk) Save(ctx context.Context) ([]*AuthTokens, erro // Invoke the actual operation on the latest mutation in the chain. if err = sqlgraph.BatchCreate(ctx, atcb.driver, spec); err != nil { if sqlgraph.IsConstraintError(err) { - err = &ConstraintError{err.Error(), err} + err = &ConstraintError{msg: err.Error(), wrap: err} } } } @@ -283,10 +345,6 @@ func (atcb *AuthTokensCreateBulk) Save(ctx context.Context) ([]*AuthTokens, erro } mutation.id = &nodes[i].ID mutation.done = true - if specs[i].ID.Value != nil { - id := specs[i].ID.Value.(int64) - nodes[i].ID = int(id) - } return nodes[i], nil }) for i := len(builder.hooks) - 1; i >= 0; i-- { diff --git a/backend/ent/authtokens_delete.go b/backend/ent/authtokens_delete.go index 9cc1ee4..682f0d5 100644 --- a/backend/ent/authtokens_delete.go +++ b/backend/ent/authtokens_delete.go @@ -1,4 +1,4 @@ -// Code generated by entc, DO NOT EDIT. +// Code generated by ent, DO NOT EDIT. package ent @@ -72,7 +72,7 @@ func (atd *AuthTokensDelete) sqlExec(ctx context.Context) (int, error) { Node: &sqlgraph.NodeSpec{ Table: authtokens.Table, ID: &sqlgraph.FieldSpec{ - Type: field.TypeInt, + Type: field.TypeUUID, Column: authtokens.FieldID, }, }, @@ -84,7 +84,11 @@ func (atd *AuthTokensDelete) sqlExec(ctx context.Context) (int, error) { } } } - return sqlgraph.DeleteNodes(ctx, atd.driver, _spec) + affected, err := sqlgraph.DeleteNodes(ctx, atd.driver, _spec) + if err != nil && sqlgraph.IsConstraintError(err) { + err = &ConstraintError{msg: err.Error(), wrap: err} + } + return affected, err } // AuthTokensDeleteOne is the builder for deleting a single AuthTokens entity. diff --git a/backend/ent/authtokens_query.go b/backend/ent/authtokens_query.go index 7c3041b..38fa9c5 100644 --- a/backend/ent/authtokens_query.go +++ b/backend/ent/authtokens_query.go @@ -1,10 +1,9 @@ -// Code generated by entc, DO NOT EDIT. +// Code generated by ent, DO NOT EDIT. package ent import ( "context" - "errors" "fmt" "math" @@ -26,9 +25,8 @@ type AuthTokensQuery struct { order []OrderFunc fields []string predicates []predicate.AuthTokens - // eager-loading edges. - withUser *UserQuery - withFKs bool + withUser *UserQuery + withFKs bool // intermediate query (i.e. traversal path). sql *sql.Selector path func(context.Context) (*sql.Selector, error) @@ -111,8 +109,8 @@ func (atq *AuthTokensQuery) FirstX(ctx context.Context) *AuthTokens { // FirstID returns the first AuthTokens ID from the query. // Returns a *NotFoundError when no AuthTokens ID was found. -func (atq *AuthTokensQuery) FirstID(ctx context.Context) (id int, err error) { - var ids []int +func (atq *AuthTokensQuery) FirstID(ctx context.Context) (id uuid.UUID, err error) { + var ids []uuid.UUID if ids, err = atq.Limit(1).IDs(ctx); err != nil { return } @@ -124,7 +122,7 @@ func (atq *AuthTokensQuery) FirstID(ctx context.Context) (id int, err error) { } // FirstIDX is like FirstID, but panics if an error occurs. -func (atq *AuthTokensQuery) FirstIDX(ctx context.Context) int { +func (atq *AuthTokensQuery) FirstIDX(ctx context.Context) uuid.UUID { id, err := atq.FirstID(ctx) if err != nil && !IsNotFound(err) { panic(err) @@ -133,7 +131,7 @@ func (atq *AuthTokensQuery) FirstIDX(ctx context.Context) int { } // Only returns a single AuthTokens entity found by the query, ensuring it only returns one. -// Returns a *NotSingularError when exactly one AuthTokens entity is not found. +// Returns a *NotSingularError when more than one AuthTokens entity is found. // Returns a *NotFoundError when no AuthTokens entities are found. func (atq *AuthTokensQuery) Only(ctx context.Context) (*AuthTokens, error) { nodes, err := atq.Limit(2).All(ctx) @@ -160,10 +158,10 @@ func (atq *AuthTokensQuery) OnlyX(ctx context.Context) *AuthTokens { } // OnlyID is like Only, but returns the only AuthTokens ID in the query. -// Returns a *NotSingularError when exactly one AuthTokens ID is not found. +// Returns a *NotSingularError when more than one AuthTokens ID is found. // Returns a *NotFoundError when no entities are found. -func (atq *AuthTokensQuery) OnlyID(ctx context.Context) (id int, err error) { - var ids []int +func (atq *AuthTokensQuery) OnlyID(ctx context.Context) (id uuid.UUID, err error) { + var ids []uuid.UUID if ids, err = atq.Limit(2).IDs(ctx); err != nil { return } @@ -179,7 +177,7 @@ func (atq *AuthTokensQuery) OnlyID(ctx context.Context) (id int, err error) { } // OnlyIDX is like OnlyID, but panics if an error occurs. -func (atq *AuthTokensQuery) OnlyIDX(ctx context.Context) int { +func (atq *AuthTokensQuery) OnlyIDX(ctx context.Context) uuid.UUID { id, err := atq.OnlyID(ctx) if err != nil { panic(err) @@ -205,8 +203,8 @@ func (atq *AuthTokensQuery) AllX(ctx context.Context) []*AuthTokens { } // IDs executes the query and returns a list of AuthTokens IDs. -func (atq *AuthTokensQuery) IDs(ctx context.Context) ([]int, error) { - var ids []int +func (atq *AuthTokensQuery) IDs(ctx context.Context) ([]uuid.UUID, error) { + var ids []uuid.UUID if err := atq.Select(authtokens.FieldID).Scan(ctx, &ids); err != nil { return nil, err } @@ -214,7 +212,7 @@ func (atq *AuthTokensQuery) IDs(ctx context.Context) ([]int, error) { } // IDsX is like IDs, but panics if an error occurs. -func (atq *AuthTokensQuery) IDsX(ctx context.Context) []int { +func (atq *AuthTokensQuery) IDsX(ctx context.Context) []uuid.UUID { ids, err := atq.IDs(ctx) if err != nil { panic(err) @@ -270,8 +268,9 @@ func (atq *AuthTokensQuery) Clone() *AuthTokensQuery { predicates: append([]predicate.AuthTokens{}, atq.predicates...), withUser: atq.withUser.Clone(), // clone intermediate query. - sql: atq.sql.Clone(), - path: atq.path, + sql: atq.sql.Clone(), + path: atq.path, + unique: atq.unique, } } @@ -292,25 +291,26 @@ func (atq *AuthTokensQuery) WithUser(opts ...func(*UserQuery)) *AuthTokensQuery // Example: // // var v []struct { -// Token []byte `json:"token,omitempty"` +// CreatedAt time.Time `json:"created_at,omitempty"` // Count int `json:"count,omitempty"` // } // // client.AuthTokens.Query(). -// GroupBy(authtokens.FieldToken). +// GroupBy(authtokens.FieldCreatedAt). // Aggregate(ent.Count()). // Scan(ctx, &v) -// func (atq *AuthTokensQuery) GroupBy(field string, fields ...string) *AuthTokensGroupBy { - group := &AuthTokensGroupBy{config: atq.config} - group.fields = append([]string{field}, fields...) - group.path = func(ctx context.Context) (prev *sql.Selector, err error) { + grbuild := &AuthTokensGroupBy{config: atq.config} + grbuild.fields = append([]string{field}, fields...) + grbuild.path = func(ctx context.Context) (prev *sql.Selector, err error) { if err := atq.prepareQuery(ctx); err != nil { return nil, err } return atq.sqlQuery(ctx), nil } - return group + grbuild.label = authtokens.Label + grbuild.flds, grbuild.scan = &grbuild.fields, grbuild.Scan + return grbuild } // Select allows the selection one or more fields/columns for the given query, @@ -319,16 +319,18 @@ func (atq *AuthTokensQuery) GroupBy(field string, fields ...string) *AuthTokensG // Example: // // var v []struct { -// Token []byte `json:"token,omitempty"` +// CreatedAt time.Time `json:"created_at,omitempty"` // } // // client.AuthTokens.Query(). -// Select(authtokens.FieldToken). +// Select(authtokens.FieldCreatedAt). // Scan(ctx, &v) -// func (atq *AuthTokensQuery) Select(fields ...string) *AuthTokensSelect { atq.fields = append(atq.fields, fields...) - return &AuthTokensSelect{AuthTokensQuery: atq} + selbuild := &AuthTokensSelect{AuthTokensQuery: atq} + selbuild.label = authtokens.Label + selbuild.flds, selbuild.scan = &atq.fields, selbuild.Scan + return selbuild } func (atq *AuthTokensQuery) prepareQuery(ctx context.Context) error { @@ -347,7 +349,7 @@ func (atq *AuthTokensQuery) prepareQuery(ctx context.Context) error { return nil } -func (atq *AuthTokensQuery) sqlAll(ctx context.Context) ([]*AuthTokens, error) { +func (atq *AuthTokensQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*AuthTokens, error) { var ( nodes = []*AuthTokens{} withFKs = atq.withFKs @@ -363,55 +365,60 @@ func (atq *AuthTokensQuery) sqlAll(ctx context.Context) ([]*AuthTokens, error) { _spec.Node.Columns = append(_spec.Node.Columns, authtokens.ForeignKeys...) } _spec.ScanValues = func(columns []string) ([]interface{}, error) { - node := &AuthTokens{config: atq.config} - nodes = append(nodes, node) - return node.scanValues(columns) + return (*AuthTokens).scanValues(nil, columns) } _spec.Assign = func(columns []string, values []interface{}) error { - if len(nodes) == 0 { - return fmt.Errorf("ent: Assign called without calling ScanValues") - } - node := nodes[len(nodes)-1] + node := &AuthTokens{config: atq.config} + nodes = append(nodes, node) node.Edges.loadedTypes = loadedTypes return node.assignValues(columns, values) } + for i := range hooks { + hooks[i](ctx, _spec) + } if err := sqlgraph.QueryNodes(ctx, atq.driver, _spec); err != nil { return nil, err } if len(nodes) == 0 { return nodes, nil } - if query := atq.withUser; query != nil { - ids := make([]uuid.UUID, 0, len(nodes)) - nodeids := make(map[uuid.UUID][]*AuthTokens) - for i := range nodes { - if nodes[i].user_auth_tokens == nil { - continue - } - fk := *nodes[i].user_auth_tokens - if _, ok := nodeids[fk]; !ok { - ids = append(ids, fk) - } - nodeids[fk] = append(nodeids[fk], nodes[i]) - } - query.Where(user.IDIn(ids...)) - neighbors, err := query.All(ctx) - if err != nil { + if err := atq.loadUser(ctx, query, nodes, nil, + func(n *AuthTokens, e *User) { n.Edges.User = e }); err != nil { return nil, err } - for _, n := range neighbors { - nodes, ok := nodeids[n.ID] - if !ok { - return nil, fmt.Errorf(`unexpected foreign-key "user_auth_tokens" returned %v`, n.ID) - } - for i := range nodes { - nodes[i].Edges.User = n - } + } + return nodes, nil +} + +func (atq *AuthTokensQuery) loadUser(ctx context.Context, query *UserQuery, nodes []*AuthTokens, init func(*AuthTokens), assign func(*AuthTokens, *User)) error { + ids := make([]uuid.UUID, 0, len(nodes)) + nodeids := make(map[uuid.UUID][]*AuthTokens) + for i := range nodes { + if nodes[i].user_auth_tokens == nil { + continue + } + fk := *nodes[i].user_auth_tokens + if _, ok := nodeids[fk]; !ok { + ids = append(ids, fk) + } + nodeids[fk] = append(nodeids[fk], nodes[i]) + } + query.Where(user.IDIn(ids...)) + neighbors, err := query.All(ctx) + if err != nil { + return err + } + for _, n := range neighbors { + nodes, ok := nodeids[n.ID] + if !ok { + return fmt.Errorf(`unexpected foreign-key "user_auth_tokens" returned %v`, n.ID) + } + for i := range nodes { + assign(nodes[i], n) } } - - return nodes, nil + return nil } func (atq *AuthTokensQuery) sqlCount(ctx context.Context) (int, error) { @@ -437,7 +444,7 @@ func (atq *AuthTokensQuery) querySpec() *sqlgraph.QuerySpec { Table: authtokens.Table, Columns: authtokens.Columns, ID: &sqlgraph.FieldSpec{ - Type: field.TypeInt, + Type: field.TypeUUID, Column: authtokens.FieldID, }, }, @@ -514,6 +521,7 @@ func (atq *AuthTokensQuery) sqlQuery(ctx context.Context) *sql.Selector { // AuthTokensGroupBy is the group-by builder for AuthTokens entities. type AuthTokensGroupBy struct { config + selector fields []string fns []AggregateFunc // intermediate query (i.e. traversal path). @@ -537,209 +545,6 @@ func (atgb *AuthTokensGroupBy) Scan(ctx context.Context, v interface{}) error { return atgb.sqlScan(ctx, v) } -// ScanX is like Scan, but panics if an error occurs. -func (atgb *AuthTokensGroupBy) ScanX(ctx context.Context, v interface{}) { - if err := atgb.Scan(ctx, v); err != nil { - panic(err) - } -} - -// Strings returns list of strings from group-by. -// It is only allowed when executing a group-by query with one field. -func (atgb *AuthTokensGroupBy) Strings(ctx context.Context) ([]string, error) { - if len(atgb.fields) > 1 { - return nil, errors.New("ent: AuthTokensGroupBy.Strings is not achievable when grouping more than 1 field") - } - var v []string - if err := atgb.Scan(ctx, &v); err != nil { - return nil, err - } - return v, nil -} - -// StringsX is like Strings, but panics if an error occurs. -func (atgb *AuthTokensGroupBy) StringsX(ctx context.Context) []string { - v, err := atgb.Strings(ctx) - if err != nil { - panic(err) - } - return v -} - -// String returns a single string from a group-by query. -// It is only allowed when executing a group-by query with one field. -func (atgb *AuthTokensGroupBy) String(ctx context.Context) (_ string, err error) { - var v []string - if v, err = atgb.Strings(ctx); err != nil { - return - } - switch len(v) { - case 1: - return v[0], nil - case 0: - err = &NotFoundError{authtokens.Label} - default: - err = fmt.Errorf("ent: AuthTokensGroupBy.Strings returned %d results when one was expected", len(v)) - } - return -} - -// StringX is like String, but panics if an error occurs. -func (atgb *AuthTokensGroupBy) StringX(ctx context.Context) string { - v, err := atgb.String(ctx) - if err != nil { - panic(err) - } - return v -} - -// Ints returns list of ints from group-by. -// It is only allowed when executing a group-by query with one field. -func (atgb *AuthTokensGroupBy) Ints(ctx context.Context) ([]int, error) { - if len(atgb.fields) > 1 { - return nil, errors.New("ent: AuthTokensGroupBy.Ints is not achievable when grouping more than 1 field") - } - var v []int - if err := atgb.Scan(ctx, &v); err != nil { - return nil, err - } - return v, nil -} - -// IntsX is like Ints, but panics if an error occurs. -func (atgb *AuthTokensGroupBy) IntsX(ctx context.Context) []int { - v, err := atgb.Ints(ctx) - if err != nil { - panic(err) - } - return v -} - -// Int returns a single int from a group-by query. -// It is only allowed when executing a group-by query with one field. -func (atgb *AuthTokensGroupBy) Int(ctx context.Context) (_ int, err error) { - var v []int - if v, err = atgb.Ints(ctx); err != nil { - return - } - switch len(v) { - case 1: - return v[0], nil - case 0: - err = &NotFoundError{authtokens.Label} - default: - err = fmt.Errorf("ent: AuthTokensGroupBy.Ints returned %d results when one was expected", len(v)) - } - return -} - -// IntX is like Int, but panics if an error occurs. -func (atgb *AuthTokensGroupBy) IntX(ctx context.Context) int { - v, err := atgb.Int(ctx) - if err != nil { - panic(err) - } - return v -} - -// Float64s returns list of float64s from group-by. -// It is only allowed when executing a group-by query with one field. -func (atgb *AuthTokensGroupBy) Float64s(ctx context.Context) ([]float64, error) { - if len(atgb.fields) > 1 { - return nil, errors.New("ent: AuthTokensGroupBy.Float64s is not achievable when grouping more than 1 field") - } - var v []float64 - if err := atgb.Scan(ctx, &v); err != nil { - return nil, err - } - return v, nil -} - -// Float64sX is like Float64s, but panics if an error occurs. -func (atgb *AuthTokensGroupBy) Float64sX(ctx context.Context) []float64 { - v, err := atgb.Float64s(ctx) - if err != nil { - panic(err) - } - return v -} - -// Float64 returns a single float64 from a group-by query. -// It is only allowed when executing a group-by query with one field. -func (atgb *AuthTokensGroupBy) Float64(ctx context.Context) (_ float64, err error) { - var v []float64 - if v, err = atgb.Float64s(ctx); err != nil { - return - } - switch len(v) { - case 1: - return v[0], nil - case 0: - err = &NotFoundError{authtokens.Label} - default: - err = fmt.Errorf("ent: AuthTokensGroupBy.Float64s returned %d results when one was expected", len(v)) - } - return -} - -// Float64X is like Float64, but panics if an error occurs. -func (atgb *AuthTokensGroupBy) Float64X(ctx context.Context) float64 { - v, err := atgb.Float64(ctx) - if err != nil { - panic(err) - } - return v -} - -// Bools returns list of bools from group-by. -// It is only allowed when executing a group-by query with one field. -func (atgb *AuthTokensGroupBy) Bools(ctx context.Context) ([]bool, error) { - if len(atgb.fields) > 1 { - return nil, errors.New("ent: AuthTokensGroupBy.Bools is not achievable when grouping more than 1 field") - } - var v []bool - if err := atgb.Scan(ctx, &v); err != nil { - return nil, err - } - return v, nil -} - -// BoolsX is like Bools, but panics if an error occurs. -func (atgb *AuthTokensGroupBy) BoolsX(ctx context.Context) []bool { - v, err := atgb.Bools(ctx) - if err != nil { - panic(err) - } - return v -} - -// Bool returns a single bool from a group-by query. -// It is only allowed when executing a group-by query with one field. -func (atgb *AuthTokensGroupBy) Bool(ctx context.Context) (_ bool, err error) { - var v []bool - if v, err = atgb.Bools(ctx); err != nil { - return - } - switch len(v) { - case 1: - return v[0], nil - case 0: - err = &NotFoundError{authtokens.Label} - default: - err = fmt.Errorf("ent: AuthTokensGroupBy.Bools returned %d results when one was expected", len(v)) - } - return -} - -// BoolX is like Bool, but panics if an error occurs. -func (atgb *AuthTokensGroupBy) BoolX(ctx context.Context) bool { - v, err := atgb.Bool(ctx) - if err != nil { - panic(err) - } - return v -} - func (atgb *AuthTokensGroupBy) sqlScan(ctx context.Context, v interface{}) error { for _, f := range atgb.fields { if !authtokens.ValidColumn(f) { @@ -781,6 +586,7 @@ func (atgb *AuthTokensGroupBy) sqlQuery() *sql.Selector { // AuthTokensSelect is the builder for selecting fields of AuthTokens entities. type AuthTokensSelect struct { *AuthTokensQuery + selector // intermediate query (i.e. traversal path). sql *sql.Selector } @@ -794,201 +600,6 @@ func (ats *AuthTokensSelect) Scan(ctx context.Context, v interface{}) error { return ats.sqlScan(ctx, v) } -// ScanX is like Scan, but panics if an error occurs. -func (ats *AuthTokensSelect) ScanX(ctx context.Context, v interface{}) { - if err := ats.Scan(ctx, v); err != nil { - panic(err) - } -} - -// Strings returns list of strings from a selector. It is only allowed when selecting one field. -func (ats *AuthTokensSelect) Strings(ctx context.Context) ([]string, error) { - if len(ats.fields) > 1 { - return nil, errors.New("ent: AuthTokensSelect.Strings is not achievable when selecting more than 1 field") - } - var v []string - if err := ats.Scan(ctx, &v); err != nil { - return nil, err - } - return v, nil -} - -// StringsX is like Strings, but panics if an error occurs. -func (ats *AuthTokensSelect) StringsX(ctx context.Context) []string { - v, err := ats.Strings(ctx) - if err != nil { - panic(err) - } - return v -} - -// String returns a single string from a selector. It is only allowed when selecting one field. -func (ats *AuthTokensSelect) String(ctx context.Context) (_ string, err error) { - var v []string - if v, err = ats.Strings(ctx); err != nil { - return - } - switch len(v) { - case 1: - return v[0], nil - case 0: - err = &NotFoundError{authtokens.Label} - default: - err = fmt.Errorf("ent: AuthTokensSelect.Strings returned %d results when one was expected", len(v)) - } - return -} - -// StringX is like String, but panics if an error occurs. -func (ats *AuthTokensSelect) StringX(ctx context.Context) string { - v, err := ats.String(ctx) - if err != nil { - panic(err) - } - return v -} - -// Ints returns list of ints from a selector. It is only allowed when selecting one field. -func (ats *AuthTokensSelect) Ints(ctx context.Context) ([]int, error) { - if len(ats.fields) > 1 { - return nil, errors.New("ent: AuthTokensSelect.Ints is not achievable when selecting more than 1 field") - } - var v []int - if err := ats.Scan(ctx, &v); err != nil { - return nil, err - } - return v, nil -} - -// IntsX is like Ints, but panics if an error occurs. -func (ats *AuthTokensSelect) IntsX(ctx context.Context) []int { - v, err := ats.Ints(ctx) - if err != nil { - panic(err) - } - return v -} - -// Int returns a single int from a selector. It is only allowed when selecting one field. -func (ats *AuthTokensSelect) Int(ctx context.Context) (_ int, err error) { - var v []int - if v, err = ats.Ints(ctx); err != nil { - return - } - switch len(v) { - case 1: - return v[0], nil - case 0: - err = &NotFoundError{authtokens.Label} - default: - err = fmt.Errorf("ent: AuthTokensSelect.Ints returned %d results when one was expected", len(v)) - } - return -} - -// IntX is like Int, but panics if an error occurs. -func (ats *AuthTokensSelect) IntX(ctx context.Context) int { - v, err := ats.Int(ctx) - if err != nil { - panic(err) - } - return v -} - -// Float64s returns list of float64s from a selector. It is only allowed when selecting one field. -func (ats *AuthTokensSelect) Float64s(ctx context.Context) ([]float64, error) { - if len(ats.fields) > 1 { - return nil, errors.New("ent: AuthTokensSelect.Float64s is not achievable when selecting more than 1 field") - } - var v []float64 - if err := ats.Scan(ctx, &v); err != nil { - return nil, err - } - return v, nil -} - -// Float64sX is like Float64s, but panics if an error occurs. -func (ats *AuthTokensSelect) Float64sX(ctx context.Context) []float64 { - v, err := ats.Float64s(ctx) - if err != nil { - panic(err) - } - return v -} - -// Float64 returns a single float64 from a selector. It is only allowed when selecting one field. -func (ats *AuthTokensSelect) Float64(ctx context.Context) (_ float64, err error) { - var v []float64 - if v, err = ats.Float64s(ctx); err != nil { - return - } - switch len(v) { - case 1: - return v[0], nil - case 0: - err = &NotFoundError{authtokens.Label} - default: - err = fmt.Errorf("ent: AuthTokensSelect.Float64s returned %d results when one was expected", len(v)) - } - return -} - -// Float64X is like Float64, but panics if an error occurs. -func (ats *AuthTokensSelect) Float64X(ctx context.Context) float64 { - v, err := ats.Float64(ctx) - if err != nil { - panic(err) - } - return v -} - -// Bools returns list of bools from a selector. It is only allowed when selecting one field. -func (ats *AuthTokensSelect) Bools(ctx context.Context) ([]bool, error) { - if len(ats.fields) > 1 { - return nil, errors.New("ent: AuthTokensSelect.Bools is not achievable when selecting more than 1 field") - } - var v []bool - if err := ats.Scan(ctx, &v); err != nil { - return nil, err - } - return v, nil -} - -// BoolsX is like Bools, but panics if an error occurs. -func (ats *AuthTokensSelect) BoolsX(ctx context.Context) []bool { - v, err := ats.Bools(ctx) - if err != nil { - panic(err) - } - return v -} - -// Bool returns a single bool from a selector. It is only allowed when selecting one field. -func (ats *AuthTokensSelect) Bool(ctx context.Context) (_ bool, err error) { - var v []bool - if v, err = ats.Bools(ctx); err != nil { - return - } - switch len(v) { - case 1: - return v[0], nil - case 0: - err = &NotFoundError{authtokens.Label} - default: - err = fmt.Errorf("ent: AuthTokensSelect.Bools returned %d results when one was expected", len(v)) - } - return -} - -// BoolX is like Bool, but panics if an error occurs. -func (ats *AuthTokensSelect) BoolX(ctx context.Context) bool { - v, err := ats.Bool(ctx) - if err != nil { - panic(err) - } - return v -} - func (ats *AuthTokensSelect) sqlScan(ctx context.Context, v interface{}) error { rows := &sql.Rows{} query, args := ats.sql.Query() diff --git a/backend/ent/authtokens_update.go b/backend/ent/authtokens_update.go index f5a99c7..0c64f16 100644 --- a/backend/ent/authtokens_update.go +++ b/backend/ent/authtokens_update.go @@ -1,4 +1,4 @@ -// Code generated by entc, DO NOT EDIT. +// Code generated by ent, DO NOT EDIT. package ent @@ -30,6 +30,12 @@ func (atu *AuthTokensUpdate) Where(ps ...predicate.AuthTokens) *AuthTokensUpdate return atu } +// SetUpdatedAt sets the "updated_at" field. +func (atu *AuthTokensUpdate) SetUpdatedAt(t time.Time) *AuthTokensUpdate { + atu.mutation.SetUpdatedAt(t) + return atu +} + // SetToken sets the "token" field. func (atu *AuthTokensUpdate) SetToken(b []byte) *AuthTokensUpdate { atu.mutation.SetToken(b) @@ -50,20 +56,6 @@ func (atu *AuthTokensUpdate) SetNillableExpiresAt(t *time.Time) *AuthTokensUpdat return atu } -// SetCreatedAt sets the "created_at" field. -func (atu *AuthTokensUpdate) SetCreatedAt(t time.Time) *AuthTokensUpdate { - atu.mutation.SetCreatedAt(t) - return atu -} - -// SetNillableCreatedAt sets the "created_at" field if the given value is not nil. -func (atu *AuthTokensUpdate) SetNillableCreatedAt(t *time.Time) *AuthTokensUpdate { - if t != nil { - atu.SetCreatedAt(*t) - } - return atu -} - // SetUserID sets the "user" edge to the User entity by ID. func (atu *AuthTokensUpdate) SetUserID(id uuid.UUID) *AuthTokensUpdate { atu.mutation.SetUserID(id) @@ -100,6 +92,7 @@ func (atu *AuthTokensUpdate) Save(ctx context.Context) (int, error) { err error affected int ) + atu.defaults() if len(atu.hooks) == 0 { affected, err = atu.sqlSave(ctx) } else { @@ -148,13 +141,21 @@ func (atu *AuthTokensUpdate) ExecX(ctx context.Context) { } } +// defaults sets the default values of the builder before save. +func (atu *AuthTokensUpdate) defaults() { + if _, ok := atu.mutation.UpdatedAt(); !ok { + v := authtokens.UpdateDefaultUpdatedAt() + atu.mutation.SetUpdatedAt(v) + } +} + func (atu *AuthTokensUpdate) sqlSave(ctx context.Context) (n int, err error) { _spec := &sqlgraph.UpdateSpec{ Node: &sqlgraph.NodeSpec{ Table: authtokens.Table, Columns: authtokens.Columns, ID: &sqlgraph.FieldSpec{ - Type: field.TypeInt, + Type: field.TypeUUID, Column: authtokens.FieldID, }, }, @@ -166,6 +167,13 @@ func (atu *AuthTokensUpdate) sqlSave(ctx context.Context) (n int, err error) { } } } + if value, ok := atu.mutation.UpdatedAt(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeTime, + Value: value, + Column: authtokens.FieldUpdatedAt, + }) + } if value, ok := atu.mutation.Token(); ok { _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ Type: field.TypeBytes, @@ -180,13 +188,6 @@ func (atu *AuthTokensUpdate) sqlSave(ctx context.Context) (n int, err error) { Column: authtokens.FieldExpiresAt, }) } - if value, ok := atu.mutation.CreatedAt(); ok { - _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ - Type: field.TypeTime, - Value: value, - Column: authtokens.FieldCreatedAt, - }) - } if atu.mutation.UserCleared() { edge := &sqlgraph.EdgeSpec{ Rel: sqlgraph.M2O, @@ -226,7 +227,7 @@ func (atu *AuthTokensUpdate) sqlSave(ctx context.Context) (n int, err error) { if _, ok := err.(*sqlgraph.NotFoundError); ok { err = &NotFoundError{authtokens.Label} } else if sqlgraph.IsConstraintError(err) { - err = &ConstraintError{err.Error(), err} + err = &ConstraintError{msg: err.Error(), wrap: err} } return 0, err } @@ -241,6 +242,12 @@ type AuthTokensUpdateOne struct { mutation *AuthTokensMutation } +// SetUpdatedAt sets the "updated_at" field. +func (atuo *AuthTokensUpdateOne) SetUpdatedAt(t time.Time) *AuthTokensUpdateOne { + atuo.mutation.SetUpdatedAt(t) + return atuo +} + // SetToken sets the "token" field. func (atuo *AuthTokensUpdateOne) SetToken(b []byte) *AuthTokensUpdateOne { atuo.mutation.SetToken(b) @@ -261,20 +268,6 @@ func (atuo *AuthTokensUpdateOne) SetNillableExpiresAt(t *time.Time) *AuthTokensU return atuo } -// SetCreatedAt sets the "created_at" field. -func (atuo *AuthTokensUpdateOne) SetCreatedAt(t time.Time) *AuthTokensUpdateOne { - atuo.mutation.SetCreatedAt(t) - return atuo -} - -// SetNillableCreatedAt sets the "created_at" field if the given value is not nil. -func (atuo *AuthTokensUpdateOne) SetNillableCreatedAt(t *time.Time) *AuthTokensUpdateOne { - if t != nil { - atuo.SetCreatedAt(*t) - } - return atuo -} - // SetUserID sets the "user" edge to the User entity by ID. func (atuo *AuthTokensUpdateOne) SetUserID(id uuid.UUID) *AuthTokensUpdateOne { atuo.mutation.SetUserID(id) @@ -318,6 +311,7 @@ func (atuo *AuthTokensUpdateOne) Save(ctx context.Context) (*AuthTokens, error) err error node *AuthTokens ) + atuo.defaults() if len(atuo.hooks) == 0 { node, err = atuo.sqlSave(ctx) } else { @@ -337,9 +331,15 @@ func (atuo *AuthTokensUpdateOne) Save(ctx context.Context) (*AuthTokens, error) } mut = atuo.hooks[i](mut) } - if _, err := mut.Mutate(ctx, atuo.mutation); err != nil { + v, err := mut.Mutate(ctx, atuo.mutation) + if err != nil { return nil, err } + nv, ok := v.(*AuthTokens) + if !ok { + return nil, fmt.Errorf("unexpected node type %T returned from AuthTokensMutation", v) + } + node = nv } return node, err } @@ -366,13 +366,21 @@ func (atuo *AuthTokensUpdateOne) ExecX(ctx context.Context) { } } +// defaults sets the default values of the builder before save. +func (atuo *AuthTokensUpdateOne) defaults() { + if _, ok := atuo.mutation.UpdatedAt(); !ok { + v := authtokens.UpdateDefaultUpdatedAt() + atuo.mutation.SetUpdatedAt(v) + } +} + func (atuo *AuthTokensUpdateOne) sqlSave(ctx context.Context) (_node *AuthTokens, err error) { _spec := &sqlgraph.UpdateSpec{ Node: &sqlgraph.NodeSpec{ Table: authtokens.Table, Columns: authtokens.Columns, ID: &sqlgraph.FieldSpec{ - Type: field.TypeInt, + Type: field.TypeUUID, Column: authtokens.FieldID, }, }, @@ -401,6 +409,13 @@ func (atuo *AuthTokensUpdateOne) sqlSave(ctx context.Context) (_node *AuthTokens } } } + if value, ok := atuo.mutation.UpdatedAt(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeTime, + Value: value, + Column: authtokens.FieldUpdatedAt, + }) + } if value, ok := atuo.mutation.Token(); ok { _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ Type: field.TypeBytes, @@ -415,13 +430,6 @@ func (atuo *AuthTokensUpdateOne) sqlSave(ctx context.Context) (_node *AuthTokens Column: authtokens.FieldExpiresAt, }) } - if value, ok := atuo.mutation.CreatedAt(); ok { - _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ - Type: field.TypeTime, - Value: value, - Column: authtokens.FieldCreatedAt, - }) - } if atuo.mutation.UserCleared() { edge := &sqlgraph.EdgeSpec{ Rel: sqlgraph.M2O, @@ -464,7 +472,7 @@ func (atuo *AuthTokensUpdateOne) sqlSave(ctx context.Context) (_node *AuthTokens if _, ok := err.(*sqlgraph.NotFoundError); ok { err = &NotFoundError{authtokens.Label} } else if sqlgraph.IsConstraintError(err) { - err = &ConstraintError{err.Error(), err} + err = &ConstraintError{msg: err.Error(), wrap: err} } return nil, err } diff --git a/backend/ent/client.go b/backend/ent/client.go index 2b31566..5c55b71 100644 --- a/backend/ent/client.go +++ b/backend/ent/client.go @@ -1,9 +1,10 @@ -// Code generated by entc, DO NOT EDIT. +// Code generated by ent, DO NOT EDIT. package ent import ( "context" + "errors" "fmt" "log" @@ -11,6 +12,11 @@ import ( "github.com/hay-kot/content/backend/ent/migrate" "github.com/hay-kot/content/backend/ent/authtokens" + "github.com/hay-kot/content/backend/ent/group" + "github.com/hay-kot/content/backend/ent/item" + "github.com/hay-kot/content/backend/ent/itemfield" + "github.com/hay-kot/content/backend/ent/label" + "github.com/hay-kot/content/backend/ent/location" "github.com/hay-kot/content/backend/ent/user" "entgo.io/ent/dialect" @@ -25,6 +31,16 @@ type Client struct { Schema *migrate.Schema // AuthTokens is the client for interacting with the AuthTokens builders. AuthTokens *AuthTokensClient + // Group is the client for interacting with the Group builders. + Group *GroupClient + // Item is the client for interacting with the Item builders. + Item *ItemClient + // ItemField is the client for interacting with the ItemField builders. + ItemField *ItemFieldClient + // Label is the client for interacting with the Label builders. + Label *LabelClient + // Location is the client for interacting with the Location builders. + Location *LocationClient // User is the client for interacting with the User builders. User *UserClient } @@ -41,6 +57,11 @@ func NewClient(opts ...Option) *Client { func (c *Client) init() { c.Schema = migrate.NewSchema(c.driver) c.AuthTokens = NewAuthTokensClient(c.config) + c.Group = NewGroupClient(c.config) + c.Item = NewItemClient(c.config) + c.ItemField = NewItemFieldClient(c.config) + c.Label = NewLabelClient(c.config) + c.Location = NewLocationClient(c.config) c.User = NewUserClient(c.config) } @@ -64,7 +85,7 @@ func Open(driverName, dataSourceName string, options ...Option) (*Client, error) // is used until the transaction is committed or rolled back. func (c *Client) Tx(ctx context.Context) (*Tx, error) { if _, ok := c.driver.(*txDriver); ok { - return nil, fmt.Errorf("ent: cannot start a transaction within a transaction") + return nil, errors.New("ent: cannot start a transaction within a transaction") } tx, err := newTx(ctx, c.driver) if err != nil { @@ -76,6 +97,11 @@ func (c *Client) Tx(ctx context.Context) (*Tx, error) { ctx: ctx, config: cfg, AuthTokens: NewAuthTokensClient(cfg), + Group: NewGroupClient(cfg), + Item: NewItemClient(cfg), + ItemField: NewItemFieldClient(cfg), + Label: NewLabelClient(cfg), + Location: NewLocationClient(cfg), User: NewUserClient(cfg), }, nil } @@ -83,7 +109,7 @@ func (c *Client) Tx(ctx context.Context) (*Tx, error) { // BeginTx returns a transactional client with specified options. func (c *Client) BeginTx(ctx context.Context, opts *sql.TxOptions) (*Tx, error) { if _, ok := c.driver.(*txDriver); ok { - return nil, fmt.Errorf("ent: cannot start a transaction within a transaction") + return nil, errors.New("ent: cannot start a transaction within a transaction") } tx, err := c.driver.(interface { BeginTx(context.Context, *sql.TxOptions) (dialect.Tx, error) @@ -97,6 +123,11 @@ func (c *Client) BeginTx(ctx context.Context, opts *sql.TxOptions) (*Tx, error) ctx: ctx, config: cfg, AuthTokens: NewAuthTokensClient(cfg), + Group: NewGroupClient(cfg), + Item: NewItemClient(cfg), + ItemField: NewItemFieldClient(cfg), + Label: NewLabelClient(cfg), + Location: NewLocationClient(cfg), User: NewUserClient(cfg), }, nil } @@ -107,7 +138,6 @@ func (c *Client) BeginTx(ctx context.Context, opts *sql.TxOptions) (*Tx, error) // AuthTokens. // Query(). // Count(ctx) -// func (c *Client) Debug() *Client { if c.debug { return c @@ -128,6 +158,11 @@ func (c *Client) Close() error { // In order to add hooks to a specific client, call: `client.Node.Use(...)`. func (c *Client) Use(hooks ...Hook) { c.AuthTokens.Use(hooks...) + c.Group.Use(hooks...) + c.Item.Use(hooks...) + c.ItemField.Use(hooks...) + c.Label.Use(hooks...) + c.Location.Use(hooks...) c.User.Use(hooks...) } @@ -147,7 +182,7 @@ func (c *AuthTokensClient) Use(hooks ...Hook) { c.hooks.AuthTokens = append(c.hooks.AuthTokens, hooks...) } -// Create returns a create builder for AuthTokens. +// Create returns a builder for creating a AuthTokens entity. func (c *AuthTokensClient) Create() *AuthTokensCreate { mutation := newAuthTokensMutation(c.config, OpCreate) return &AuthTokensCreate{config: c.config, hooks: c.Hooks(), mutation: mutation} @@ -171,7 +206,7 @@ func (c *AuthTokensClient) UpdateOne(at *AuthTokens) *AuthTokensUpdateOne { } // UpdateOneID returns an update builder for the given id. -func (c *AuthTokensClient) UpdateOneID(id int) *AuthTokensUpdateOne { +func (c *AuthTokensClient) UpdateOneID(id uuid.UUID) *AuthTokensUpdateOne { mutation := newAuthTokensMutation(c.config, OpUpdateOne, withAuthTokensID(id)) return &AuthTokensUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation} } @@ -182,13 +217,13 @@ func (c *AuthTokensClient) Delete() *AuthTokensDelete { return &AuthTokensDelete{config: c.config, hooks: c.Hooks(), mutation: mutation} } -// DeleteOne returns a delete builder for the given entity. +// DeleteOne returns a builder for deleting the given entity. func (c *AuthTokensClient) DeleteOne(at *AuthTokens) *AuthTokensDeleteOne { return c.DeleteOneID(at.ID) } -// DeleteOneID returns a delete builder for the given id. -func (c *AuthTokensClient) DeleteOneID(id int) *AuthTokensDeleteOne { +// DeleteOne returns a builder for deleting the given entity by its id. +func (c *AuthTokensClient) DeleteOneID(id uuid.UUID) *AuthTokensDeleteOne { builder := c.Delete().Where(authtokens.ID(id)) builder.mutation.id = &id builder.mutation.op = OpDeleteOne @@ -203,12 +238,12 @@ func (c *AuthTokensClient) Query() *AuthTokensQuery { } // Get returns a AuthTokens entity by its id. -func (c *AuthTokensClient) Get(ctx context.Context, id int) (*AuthTokens, error) { +func (c *AuthTokensClient) Get(ctx context.Context, id uuid.UUID) (*AuthTokens, error) { return c.Query().Where(authtokens.ID(id)).Only(ctx) } // GetX is like Get, but panics if an error occurs. -func (c *AuthTokensClient) GetX(ctx context.Context, id int) *AuthTokens { +func (c *AuthTokensClient) GetX(ctx context.Context, id uuid.UUID) *AuthTokens { obj, err := c.Get(ctx, id) if err != nil { panic(err) @@ -237,6 +272,664 @@ func (c *AuthTokensClient) Hooks() []Hook { return c.hooks.AuthTokens } +// GroupClient is a client for the Group schema. +type GroupClient struct { + config +} + +// NewGroupClient returns a client for the Group from the given config. +func NewGroupClient(c config) *GroupClient { + return &GroupClient{config: c} +} + +// Use adds a list of mutation hooks to the hooks stack. +// A call to `Use(f, g, h)` equals to `group.Hooks(f(g(h())))`. +func (c *GroupClient) Use(hooks ...Hook) { + c.hooks.Group = append(c.hooks.Group, hooks...) +} + +// Create returns a builder for creating a Group entity. +func (c *GroupClient) Create() *GroupCreate { + mutation := newGroupMutation(c.config, OpCreate) + return &GroupCreate{config: c.config, hooks: c.Hooks(), mutation: mutation} +} + +// CreateBulk returns a builder for creating a bulk of Group entities. +func (c *GroupClient) CreateBulk(builders ...*GroupCreate) *GroupCreateBulk { + return &GroupCreateBulk{config: c.config, builders: builders} +} + +// Update returns an update builder for Group. +func (c *GroupClient) Update() *GroupUpdate { + mutation := newGroupMutation(c.config, OpUpdate) + return &GroupUpdate{config: c.config, hooks: c.Hooks(), mutation: mutation} +} + +// UpdateOne returns an update builder for the given entity. +func (c *GroupClient) UpdateOne(gr *Group) *GroupUpdateOne { + mutation := newGroupMutation(c.config, OpUpdateOne, withGroup(gr)) + return &GroupUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation} +} + +// UpdateOneID returns an update builder for the given id. +func (c *GroupClient) UpdateOneID(id uuid.UUID) *GroupUpdateOne { + mutation := newGroupMutation(c.config, OpUpdateOne, withGroupID(id)) + return &GroupUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation} +} + +// Delete returns a delete builder for Group. +func (c *GroupClient) Delete() *GroupDelete { + mutation := newGroupMutation(c.config, OpDelete) + return &GroupDelete{config: c.config, hooks: c.Hooks(), mutation: mutation} +} + +// DeleteOne returns a builder for deleting the given entity. +func (c *GroupClient) DeleteOne(gr *Group) *GroupDeleteOne { + return c.DeleteOneID(gr.ID) +} + +// DeleteOne returns a builder for deleting the given entity by its id. +func (c *GroupClient) DeleteOneID(id uuid.UUID) *GroupDeleteOne { + builder := c.Delete().Where(group.ID(id)) + builder.mutation.id = &id + builder.mutation.op = OpDeleteOne + return &GroupDeleteOne{builder} +} + +// Query returns a query builder for Group. +func (c *GroupClient) Query() *GroupQuery { + return &GroupQuery{ + config: c.config, + } +} + +// Get returns a Group entity by its id. +func (c *GroupClient) Get(ctx context.Context, id uuid.UUID) (*Group, error) { + return c.Query().Where(group.ID(id)).Only(ctx) +} + +// GetX is like Get, but panics if an error occurs. +func (c *GroupClient) GetX(ctx context.Context, id uuid.UUID) *Group { + obj, err := c.Get(ctx, id) + if err != nil { + panic(err) + } + return obj +} + +// QueryUsers queries the users edge of a Group. +func (c *GroupClient) QueryUsers(gr *Group) *UserQuery { + query := &UserQuery{config: c.config} + query.path = func(ctx context.Context) (fromV *sql.Selector, _ error) { + id := gr.ID + step := sqlgraph.NewStep( + sqlgraph.From(group.Table, group.FieldID, id), + sqlgraph.To(user.Table, user.FieldID), + sqlgraph.Edge(sqlgraph.O2M, false, group.UsersTable, group.UsersColumn), + ) + fromV = sqlgraph.Neighbors(gr.driver.Dialect(), step) + return fromV, nil + } + return query +} + +// QueryLocations queries the locations edge of a Group. +func (c *GroupClient) QueryLocations(gr *Group) *LocationQuery { + query := &LocationQuery{config: c.config} + query.path = func(ctx context.Context) (fromV *sql.Selector, _ error) { + id := gr.ID + step := sqlgraph.NewStep( + sqlgraph.From(group.Table, group.FieldID, id), + sqlgraph.To(location.Table, location.FieldID), + sqlgraph.Edge(sqlgraph.O2M, false, group.LocationsTable, group.LocationsColumn), + ) + fromV = sqlgraph.Neighbors(gr.driver.Dialect(), step) + return fromV, nil + } + return query +} + +// QueryItems queries the items edge of a Group. +func (c *GroupClient) QueryItems(gr *Group) *ItemQuery { + query := &ItemQuery{config: c.config} + query.path = func(ctx context.Context) (fromV *sql.Selector, _ error) { + id := gr.ID + step := sqlgraph.NewStep( + sqlgraph.From(group.Table, group.FieldID, id), + sqlgraph.To(item.Table, item.FieldID), + sqlgraph.Edge(sqlgraph.O2M, false, group.ItemsTable, group.ItemsColumn), + ) + fromV = sqlgraph.Neighbors(gr.driver.Dialect(), step) + return fromV, nil + } + return query +} + +// QueryLabels queries the labels edge of a Group. +func (c *GroupClient) QueryLabels(gr *Group) *LabelQuery { + query := &LabelQuery{config: c.config} + query.path = func(ctx context.Context) (fromV *sql.Selector, _ error) { + id := gr.ID + step := sqlgraph.NewStep( + sqlgraph.From(group.Table, group.FieldID, id), + sqlgraph.To(label.Table, label.FieldID), + sqlgraph.Edge(sqlgraph.O2M, false, group.LabelsTable, group.LabelsColumn), + ) + fromV = sqlgraph.Neighbors(gr.driver.Dialect(), step) + return fromV, nil + } + return query +} + +// Hooks returns the client hooks. +func (c *GroupClient) Hooks() []Hook { + return c.hooks.Group +} + +// ItemClient is a client for the Item schema. +type ItemClient struct { + config +} + +// NewItemClient returns a client for the Item from the given config. +func NewItemClient(c config) *ItemClient { + return &ItemClient{config: c} +} + +// Use adds a list of mutation hooks to the hooks stack. +// A call to `Use(f, g, h)` equals to `item.Hooks(f(g(h())))`. +func (c *ItemClient) Use(hooks ...Hook) { + c.hooks.Item = append(c.hooks.Item, hooks...) +} + +// Create returns a builder for creating a Item entity. +func (c *ItemClient) Create() *ItemCreate { + mutation := newItemMutation(c.config, OpCreate) + return &ItemCreate{config: c.config, hooks: c.Hooks(), mutation: mutation} +} + +// CreateBulk returns a builder for creating a bulk of Item entities. +func (c *ItemClient) CreateBulk(builders ...*ItemCreate) *ItemCreateBulk { + return &ItemCreateBulk{config: c.config, builders: builders} +} + +// Update returns an update builder for Item. +func (c *ItemClient) Update() *ItemUpdate { + mutation := newItemMutation(c.config, OpUpdate) + return &ItemUpdate{config: c.config, hooks: c.Hooks(), mutation: mutation} +} + +// UpdateOne returns an update builder for the given entity. +func (c *ItemClient) UpdateOne(i *Item) *ItemUpdateOne { + mutation := newItemMutation(c.config, OpUpdateOne, withItem(i)) + return &ItemUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation} +} + +// UpdateOneID returns an update builder for the given id. +func (c *ItemClient) UpdateOneID(id uuid.UUID) *ItemUpdateOne { + mutation := newItemMutation(c.config, OpUpdateOne, withItemID(id)) + return &ItemUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation} +} + +// Delete returns a delete builder for Item. +func (c *ItemClient) Delete() *ItemDelete { + mutation := newItemMutation(c.config, OpDelete) + return &ItemDelete{config: c.config, hooks: c.Hooks(), mutation: mutation} +} + +// DeleteOne returns a builder for deleting the given entity. +func (c *ItemClient) DeleteOne(i *Item) *ItemDeleteOne { + return c.DeleteOneID(i.ID) +} + +// DeleteOne returns a builder for deleting the given entity by its id. +func (c *ItemClient) DeleteOneID(id uuid.UUID) *ItemDeleteOne { + builder := c.Delete().Where(item.ID(id)) + builder.mutation.id = &id + builder.mutation.op = OpDeleteOne + return &ItemDeleteOne{builder} +} + +// Query returns a query builder for Item. +func (c *ItemClient) Query() *ItemQuery { + return &ItemQuery{ + config: c.config, + } +} + +// Get returns a Item entity by its id. +func (c *ItemClient) Get(ctx context.Context, id uuid.UUID) (*Item, error) { + return c.Query().Where(item.ID(id)).Only(ctx) +} + +// GetX is like Get, but panics if an error occurs. +func (c *ItemClient) GetX(ctx context.Context, id uuid.UUID) *Item { + obj, err := c.Get(ctx, id) + if err != nil { + panic(err) + } + return obj +} + +// QueryGroup queries the group edge of a Item. +func (c *ItemClient) QueryGroup(i *Item) *GroupQuery { + query := &GroupQuery{config: c.config} + query.path = func(ctx context.Context) (fromV *sql.Selector, _ error) { + id := i.ID + step := sqlgraph.NewStep( + sqlgraph.From(item.Table, item.FieldID, id), + sqlgraph.To(group.Table, group.FieldID), + sqlgraph.Edge(sqlgraph.M2O, true, item.GroupTable, item.GroupColumn), + ) + fromV = sqlgraph.Neighbors(i.driver.Dialect(), step) + return fromV, nil + } + return query +} + +// QueryLocation queries the location edge of a Item. +func (c *ItemClient) QueryLocation(i *Item) *LocationQuery { + query := &LocationQuery{config: c.config} + query.path = func(ctx context.Context) (fromV *sql.Selector, _ error) { + id := i.ID + step := sqlgraph.NewStep( + sqlgraph.From(item.Table, item.FieldID, id), + sqlgraph.To(location.Table, location.FieldID), + sqlgraph.Edge(sqlgraph.M2O, true, item.LocationTable, item.LocationColumn), + ) + fromV = sqlgraph.Neighbors(i.driver.Dialect(), step) + return fromV, nil + } + return query +} + +// QueryFields queries the fields edge of a Item. +func (c *ItemClient) QueryFields(i *Item) *ItemFieldQuery { + query := &ItemFieldQuery{config: c.config} + query.path = func(ctx context.Context) (fromV *sql.Selector, _ error) { + id := i.ID + step := sqlgraph.NewStep( + sqlgraph.From(item.Table, item.FieldID, id), + sqlgraph.To(itemfield.Table, itemfield.FieldID), + sqlgraph.Edge(sqlgraph.O2M, false, item.FieldsTable, item.FieldsColumn), + ) + fromV = sqlgraph.Neighbors(i.driver.Dialect(), step) + return fromV, nil + } + return query +} + +// QueryLabel queries the label edge of a Item. +func (c *ItemClient) QueryLabel(i *Item) *LabelQuery { + query := &LabelQuery{config: c.config} + query.path = func(ctx context.Context) (fromV *sql.Selector, _ error) { + id := i.ID + step := sqlgraph.NewStep( + sqlgraph.From(item.Table, item.FieldID, id), + sqlgraph.To(label.Table, label.FieldID), + sqlgraph.Edge(sqlgraph.M2M, true, item.LabelTable, item.LabelPrimaryKey...), + ) + fromV = sqlgraph.Neighbors(i.driver.Dialect(), step) + return fromV, nil + } + return query +} + +// Hooks returns the client hooks. +func (c *ItemClient) Hooks() []Hook { + return c.hooks.Item +} + +// ItemFieldClient is a client for the ItemField schema. +type ItemFieldClient struct { + config +} + +// NewItemFieldClient returns a client for the ItemField from the given config. +func NewItemFieldClient(c config) *ItemFieldClient { + return &ItemFieldClient{config: c} +} + +// Use adds a list of mutation hooks to the hooks stack. +// A call to `Use(f, g, h)` equals to `itemfield.Hooks(f(g(h())))`. +func (c *ItemFieldClient) Use(hooks ...Hook) { + c.hooks.ItemField = append(c.hooks.ItemField, hooks...) +} + +// Create returns a builder for creating a ItemField entity. +func (c *ItemFieldClient) Create() *ItemFieldCreate { + mutation := newItemFieldMutation(c.config, OpCreate) + return &ItemFieldCreate{config: c.config, hooks: c.Hooks(), mutation: mutation} +} + +// CreateBulk returns a builder for creating a bulk of ItemField entities. +func (c *ItemFieldClient) CreateBulk(builders ...*ItemFieldCreate) *ItemFieldCreateBulk { + return &ItemFieldCreateBulk{config: c.config, builders: builders} +} + +// Update returns an update builder for ItemField. +func (c *ItemFieldClient) Update() *ItemFieldUpdate { + mutation := newItemFieldMutation(c.config, OpUpdate) + return &ItemFieldUpdate{config: c.config, hooks: c.Hooks(), mutation: mutation} +} + +// UpdateOne returns an update builder for the given entity. +func (c *ItemFieldClient) UpdateOne(_if *ItemField) *ItemFieldUpdateOne { + mutation := newItemFieldMutation(c.config, OpUpdateOne, withItemField(_if)) + return &ItemFieldUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation} +} + +// UpdateOneID returns an update builder for the given id. +func (c *ItemFieldClient) UpdateOneID(id uuid.UUID) *ItemFieldUpdateOne { + mutation := newItemFieldMutation(c.config, OpUpdateOne, withItemFieldID(id)) + return &ItemFieldUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation} +} + +// Delete returns a delete builder for ItemField. +func (c *ItemFieldClient) Delete() *ItemFieldDelete { + mutation := newItemFieldMutation(c.config, OpDelete) + return &ItemFieldDelete{config: c.config, hooks: c.Hooks(), mutation: mutation} +} + +// DeleteOne returns a builder for deleting the given entity. +func (c *ItemFieldClient) DeleteOne(_if *ItemField) *ItemFieldDeleteOne { + return c.DeleteOneID(_if.ID) +} + +// DeleteOne returns a builder for deleting the given entity by its id. +func (c *ItemFieldClient) DeleteOneID(id uuid.UUID) *ItemFieldDeleteOne { + builder := c.Delete().Where(itemfield.ID(id)) + builder.mutation.id = &id + builder.mutation.op = OpDeleteOne + return &ItemFieldDeleteOne{builder} +} + +// Query returns a query builder for ItemField. +func (c *ItemFieldClient) Query() *ItemFieldQuery { + return &ItemFieldQuery{ + config: c.config, + } +} + +// Get returns a ItemField entity by its id. +func (c *ItemFieldClient) Get(ctx context.Context, id uuid.UUID) (*ItemField, error) { + return c.Query().Where(itemfield.ID(id)).Only(ctx) +} + +// GetX is like Get, but panics if an error occurs. +func (c *ItemFieldClient) GetX(ctx context.Context, id uuid.UUID) *ItemField { + obj, err := c.Get(ctx, id) + if err != nil { + panic(err) + } + return obj +} + +// QueryItem queries the item edge of a ItemField. +func (c *ItemFieldClient) QueryItem(_if *ItemField) *ItemQuery { + query := &ItemQuery{config: c.config} + query.path = func(ctx context.Context) (fromV *sql.Selector, _ error) { + id := _if.ID + step := sqlgraph.NewStep( + sqlgraph.From(itemfield.Table, itemfield.FieldID, id), + sqlgraph.To(item.Table, item.FieldID), + sqlgraph.Edge(sqlgraph.M2O, true, itemfield.ItemTable, itemfield.ItemColumn), + ) + fromV = sqlgraph.Neighbors(_if.driver.Dialect(), step) + return fromV, nil + } + return query +} + +// Hooks returns the client hooks. +func (c *ItemFieldClient) Hooks() []Hook { + return c.hooks.ItemField +} + +// LabelClient is a client for the Label schema. +type LabelClient struct { + config +} + +// NewLabelClient returns a client for the Label from the given config. +func NewLabelClient(c config) *LabelClient { + return &LabelClient{config: c} +} + +// Use adds a list of mutation hooks to the hooks stack. +// A call to `Use(f, g, h)` equals to `label.Hooks(f(g(h())))`. +func (c *LabelClient) Use(hooks ...Hook) { + c.hooks.Label = append(c.hooks.Label, hooks...) +} + +// Create returns a builder for creating a Label entity. +func (c *LabelClient) Create() *LabelCreate { + mutation := newLabelMutation(c.config, OpCreate) + return &LabelCreate{config: c.config, hooks: c.Hooks(), mutation: mutation} +} + +// CreateBulk returns a builder for creating a bulk of Label entities. +func (c *LabelClient) CreateBulk(builders ...*LabelCreate) *LabelCreateBulk { + return &LabelCreateBulk{config: c.config, builders: builders} +} + +// Update returns an update builder for Label. +func (c *LabelClient) Update() *LabelUpdate { + mutation := newLabelMutation(c.config, OpUpdate) + return &LabelUpdate{config: c.config, hooks: c.Hooks(), mutation: mutation} +} + +// UpdateOne returns an update builder for the given entity. +func (c *LabelClient) UpdateOne(l *Label) *LabelUpdateOne { + mutation := newLabelMutation(c.config, OpUpdateOne, withLabel(l)) + return &LabelUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation} +} + +// UpdateOneID returns an update builder for the given id. +func (c *LabelClient) UpdateOneID(id uuid.UUID) *LabelUpdateOne { + mutation := newLabelMutation(c.config, OpUpdateOne, withLabelID(id)) + return &LabelUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation} +} + +// Delete returns a delete builder for Label. +func (c *LabelClient) Delete() *LabelDelete { + mutation := newLabelMutation(c.config, OpDelete) + return &LabelDelete{config: c.config, hooks: c.Hooks(), mutation: mutation} +} + +// DeleteOne returns a builder for deleting the given entity. +func (c *LabelClient) DeleteOne(l *Label) *LabelDeleteOne { + return c.DeleteOneID(l.ID) +} + +// DeleteOne returns a builder for deleting the given entity by its id. +func (c *LabelClient) DeleteOneID(id uuid.UUID) *LabelDeleteOne { + builder := c.Delete().Where(label.ID(id)) + builder.mutation.id = &id + builder.mutation.op = OpDeleteOne + return &LabelDeleteOne{builder} +} + +// Query returns a query builder for Label. +func (c *LabelClient) Query() *LabelQuery { + return &LabelQuery{ + config: c.config, + } +} + +// Get returns a Label entity by its id. +func (c *LabelClient) Get(ctx context.Context, id uuid.UUID) (*Label, error) { + return c.Query().Where(label.ID(id)).Only(ctx) +} + +// GetX is like Get, but panics if an error occurs. +func (c *LabelClient) GetX(ctx context.Context, id uuid.UUID) *Label { + obj, err := c.Get(ctx, id) + if err != nil { + panic(err) + } + return obj +} + +// QueryGroup queries the group edge of a Label. +func (c *LabelClient) QueryGroup(l *Label) *GroupQuery { + query := &GroupQuery{config: c.config} + query.path = func(ctx context.Context) (fromV *sql.Selector, _ error) { + id := l.ID + step := sqlgraph.NewStep( + sqlgraph.From(label.Table, label.FieldID, id), + sqlgraph.To(group.Table, group.FieldID), + sqlgraph.Edge(sqlgraph.M2O, true, label.GroupTable, label.GroupColumn), + ) + fromV = sqlgraph.Neighbors(l.driver.Dialect(), step) + return fromV, nil + } + return query +} + +// QueryItems queries the items edge of a Label. +func (c *LabelClient) QueryItems(l *Label) *ItemQuery { + query := &ItemQuery{config: c.config} + query.path = func(ctx context.Context) (fromV *sql.Selector, _ error) { + id := l.ID + step := sqlgraph.NewStep( + sqlgraph.From(label.Table, label.FieldID, id), + sqlgraph.To(item.Table, item.FieldID), + sqlgraph.Edge(sqlgraph.M2M, false, label.ItemsTable, label.ItemsPrimaryKey...), + ) + fromV = sqlgraph.Neighbors(l.driver.Dialect(), step) + return fromV, nil + } + return query +} + +// Hooks returns the client hooks. +func (c *LabelClient) Hooks() []Hook { + return c.hooks.Label +} + +// LocationClient is a client for the Location schema. +type LocationClient struct { + config +} + +// NewLocationClient returns a client for the Location from the given config. +func NewLocationClient(c config) *LocationClient { + return &LocationClient{config: c} +} + +// Use adds a list of mutation hooks to the hooks stack. +// A call to `Use(f, g, h)` equals to `location.Hooks(f(g(h())))`. +func (c *LocationClient) Use(hooks ...Hook) { + c.hooks.Location = append(c.hooks.Location, hooks...) +} + +// Create returns a builder for creating a Location entity. +func (c *LocationClient) Create() *LocationCreate { + mutation := newLocationMutation(c.config, OpCreate) + return &LocationCreate{config: c.config, hooks: c.Hooks(), mutation: mutation} +} + +// CreateBulk returns a builder for creating a bulk of Location entities. +func (c *LocationClient) CreateBulk(builders ...*LocationCreate) *LocationCreateBulk { + return &LocationCreateBulk{config: c.config, builders: builders} +} + +// Update returns an update builder for Location. +func (c *LocationClient) Update() *LocationUpdate { + mutation := newLocationMutation(c.config, OpUpdate) + return &LocationUpdate{config: c.config, hooks: c.Hooks(), mutation: mutation} +} + +// UpdateOne returns an update builder for the given entity. +func (c *LocationClient) UpdateOne(l *Location) *LocationUpdateOne { + mutation := newLocationMutation(c.config, OpUpdateOne, withLocation(l)) + return &LocationUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation} +} + +// UpdateOneID returns an update builder for the given id. +func (c *LocationClient) UpdateOneID(id uuid.UUID) *LocationUpdateOne { + mutation := newLocationMutation(c.config, OpUpdateOne, withLocationID(id)) + return &LocationUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation} +} + +// Delete returns a delete builder for Location. +func (c *LocationClient) Delete() *LocationDelete { + mutation := newLocationMutation(c.config, OpDelete) + return &LocationDelete{config: c.config, hooks: c.Hooks(), mutation: mutation} +} + +// DeleteOne returns a builder for deleting the given entity. +func (c *LocationClient) DeleteOne(l *Location) *LocationDeleteOne { + return c.DeleteOneID(l.ID) +} + +// DeleteOne returns a builder for deleting the given entity by its id. +func (c *LocationClient) DeleteOneID(id uuid.UUID) *LocationDeleteOne { + builder := c.Delete().Where(location.ID(id)) + builder.mutation.id = &id + builder.mutation.op = OpDeleteOne + return &LocationDeleteOne{builder} +} + +// Query returns a query builder for Location. +func (c *LocationClient) Query() *LocationQuery { + return &LocationQuery{ + config: c.config, + } +} + +// Get returns a Location entity by its id. +func (c *LocationClient) Get(ctx context.Context, id uuid.UUID) (*Location, error) { + return c.Query().Where(location.ID(id)).Only(ctx) +} + +// GetX is like Get, but panics if an error occurs. +func (c *LocationClient) GetX(ctx context.Context, id uuid.UUID) *Location { + obj, err := c.Get(ctx, id) + if err != nil { + panic(err) + } + return obj +} + +// QueryGroup queries the group edge of a Location. +func (c *LocationClient) QueryGroup(l *Location) *GroupQuery { + query := &GroupQuery{config: c.config} + query.path = func(ctx context.Context) (fromV *sql.Selector, _ error) { + id := l.ID + step := sqlgraph.NewStep( + sqlgraph.From(location.Table, location.FieldID, id), + sqlgraph.To(group.Table, group.FieldID), + sqlgraph.Edge(sqlgraph.M2O, true, location.GroupTable, location.GroupColumn), + ) + fromV = sqlgraph.Neighbors(l.driver.Dialect(), step) + return fromV, nil + } + return query +} + +// QueryItems queries the items edge of a Location. +func (c *LocationClient) QueryItems(l *Location) *ItemQuery { + query := &ItemQuery{config: c.config} + query.path = func(ctx context.Context) (fromV *sql.Selector, _ error) { + id := l.ID + step := sqlgraph.NewStep( + sqlgraph.From(location.Table, location.FieldID, id), + sqlgraph.To(item.Table, item.FieldID), + sqlgraph.Edge(sqlgraph.O2M, false, location.ItemsTable, location.ItemsColumn), + ) + fromV = sqlgraph.Neighbors(l.driver.Dialect(), step) + return fromV, nil + } + return query +} + +// Hooks returns the client hooks. +func (c *LocationClient) Hooks() []Hook { + return c.hooks.Location +} + // UserClient is a client for the User schema. type UserClient struct { config @@ -253,7 +946,7 @@ func (c *UserClient) Use(hooks ...Hook) { c.hooks.User = append(c.hooks.User, hooks...) } -// Create returns a create builder for User. +// Create returns a builder for creating a User entity. func (c *UserClient) Create() *UserCreate { mutation := newUserMutation(c.config, OpCreate) return &UserCreate{config: c.config, hooks: c.Hooks(), mutation: mutation} @@ -288,12 +981,12 @@ func (c *UserClient) Delete() *UserDelete { return &UserDelete{config: c.config, hooks: c.Hooks(), mutation: mutation} } -// DeleteOne returns a delete builder for the given entity. +// DeleteOne returns a builder for deleting the given entity. func (c *UserClient) DeleteOne(u *User) *UserDeleteOne { return c.DeleteOneID(u.ID) } -// DeleteOneID returns a delete builder for the given id. +// DeleteOne returns a builder for deleting the given entity by its id. func (c *UserClient) DeleteOneID(id uuid.UUID) *UserDeleteOne { builder := c.Delete().Where(user.ID(id)) builder.mutation.id = &id @@ -322,6 +1015,22 @@ func (c *UserClient) GetX(ctx context.Context, id uuid.UUID) *User { return obj } +// QueryGroup queries the group edge of a User. +func (c *UserClient) QueryGroup(u *User) *GroupQuery { + query := &GroupQuery{config: c.config} + query.path = func(ctx context.Context) (fromV *sql.Selector, _ error) { + id := u.ID + step := sqlgraph.NewStep( + sqlgraph.From(user.Table, user.FieldID, id), + sqlgraph.To(group.Table, group.FieldID), + sqlgraph.Edge(sqlgraph.M2O, true, user.GroupTable, user.GroupColumn), + ) + fromV = sqlgraph.Neighbors(u.driver.Dialect(), step) + return fromV, nil + } + return query +} + // QueryAuthTokens queries the auth_tokens edge of a User. func (c *UserClient) QueryAuthTokens(u *User) *AuthTokensQuery { query := &AuthTokensQuery{config: c.config} diff --git a/backend/ent/config.go b/backend/ent/config.go index 550e16e..b0dbf9f 100644 --- a/backend/ent/config.go +++ b/backend/ent/config.go @@ -1,4 +1,4 @@ -// Code generated by entc, DO NOT EDIT. +// Code generated by ent, DO NOT EDIT. package ent @@ -25,6 +25,11 @@ type config struct { // hooks per client, for fast access. type hooks struct { AuthTokens []ent.Hook + Group []ent.Hook + Item []ent.Hook + ItemField []ent.Hook + Label []ent.Hook + Location []ent.Hook User []ent.Hook } diff --git a/backend/ent/context.go b/backend/ent/context.go index 0840726..7811bfa 100644 --- a/backend/ent/context.go +++ b/backend/ent/context.go @@ -1,4 +1,4 @@ -// Code generated by entc, DO NOT EDIT. +// Code generated by ent, DO NOT EDIT. package ent diff --git a/backend/ent/ent.go b/backend/ent/ent.go index 9e5990e..f976756 100644 --- a/backend/ent/ent.go +++ b/backend/ent/ent.go @@ -1,14 +1,21 @@ -// Code generated by entc, DO NOT EDIT. +// Code generated by ent, DO NOT EDIT. package ent import ( + "context" "errors" "fmt" "entgo.io/ent" "entgo.io/ent/dialect/sql" + "entgo.io/ent/dialect/sql/sqlgraph" "github.com/hay-kot/content/backend/ent/authtokens" + "github.com/hay-kot/content/backend/ent/group" + "github.com/hay-kot/content/backend/ent/item" + "github.com/hay-kot/content/backend/ent/itemfield" + "github.com/hay-kot/content/backend/ent/label" + "github.com/hay-kot/content/backend/ent/location" "github.com/hay-kot/content/backend/ent/user" ) @@ -31,6 +38,11 @@ type OrderFunc func(*sql.Selector) func columnChecker(table string) func(string) error { checks := map[string]func(string) bool{ authtokens.Table: authtokens.ValidColumn, + group.Table: group.ValidColumn, + item.Table: item.ValidColumn, + itemfield.Table: itemfield.ValidColumn, + label.Table: label.ValidColumn, + location.Table: location.ValidColumn, user.Table: user.ValidColumn, } check, ok := checks[table] @@ -81,7 +93,6 @@ type AggregateFunc func(*sql.Selector) string // GroupBy(field1, field2). // Aggregate(ent.As(ent.Sum(field1), "sum_field1"), (ent.As(ent.Sum(field2), "sum_field2")). // Scan(ctx, &v) -// func As(fn AggregateFunc, end string) AggregateFunc { return func(s *sql.Selector) string { return sql.As(fn(s), end) @@ -259,3 +270,208 @@ func IsConstraintError(err error) bool { var e *ConstraintError return errors.As(err, &e) } + +// selector embedded by the different Select/GroupBy builders. +type selector struct { + label string + flds *[]string + scan func(context.Context, interface{}) error +} + +// ScanX is like Scan, but panics if an error occurs. +func (s *selector) ScanX(ctx context.Context, v interface{}) { + if err := s.scan(ctx, v); err != nil { + panic(err) + } +} + +// Strings returns list of strings from a selector. It is only allowed when selecting one field. +func (s *selector) Strings(ctx context.Context) ([]string, error) { + if len(*s.flds) > 1 { + return nil, errors.New("ent: Strings is not achievable when selecting more than 1 field") + } + var v []string + if err := s.scan(ctx, &v); err != nil { + return nil, err + } + return v, nil +} + +// StringsX is like Strings, but panics if an error occurs. +func (s *selector) StringsX(ctx context.Context) []string { + v, err := s.Strings(ctx) + if err != nil { + panic(err) + } + return v +} + +// String returns a single string from a selector. It is only allowed when selecting one field. +func (s *selector) String(ctx context.Context) (_ string, err error) { + var v []string + if v, err = s.Strings(ctx); err != nil { + return + } + switch len(v) { + case 1: + return v[0], nil + case 0: + err = &NotFoundError{s.label} + default: + err = fmt.Errorf("ent: Strings returned %d results when one was expected", len(v)) + } + return +} + +// StringX is like String, but panics if an error occurs. +func (s *selector) StringX(ctx context.Context) string { + v, err := s.String(ctx) + if err != nil { + panic(err) + } + return v +} + +// Ints returns list of ints from a selector. It is only allowed when selecting one field. +func (s *selector) Ints(ctx context.Context) ([]int, error) { + if len(*s.flds) > 1 { + return nil, errors.New("ent: Ints is not achievable when selecting more than 1 field") + } + var v []int + if err := s.scan(ctx, &v); err != nil { + return nil, err + } + return v, nil +} + +// IntsX is like Ints, but panics if an error occurs. +func (s *selector) IntsX(ctx context.Context) []int { + v, err := s.Ints(ctx) + if err != nil { + panic(err) + } + return v +} + +// Int returns a single int from a selector. It is only allowed when selecting one field. +func (s *selector) Int(ctx context.Context) (_ int, err error) { + var v []int + if v, err = s.Ints(ctx); err != nil { + return + } + switch len(v) { + case 1: + return v[0], nil + case 0: + err = &NotFoundError{s.label} + default: + err = fmt.Errorf("ent: Ints returned %d results when one was expected", len(v)) + } + return +} + +// IntX is like Int, but panics if an error occurs. +func (s *selector) IntX(ctx context.Context) int { + v, err := s.Int(ctx) + if err != nil { + panic(err) + } + return v +} + +// Float64s returns list of float64s from a selector. It is only allowed when selecting one field. +func (s *selector) Float64s(ctx context.Context) ([]float64, error) { + if len(*s.flds) > 1 { + return nil, errors.New("ent: Float64s is not achievable when selecting more than 1 field") + } + var v []float64 + if err := s.scan(ctx, &v); err != nil { + return nil, err + } + return v, nil +} + +// Float64sX is like Float64s, but panics if an error occurs. +func (s *selector) Float64sX(ctx context.Context) []float64 { + v, err := s.Float64s(ctx) + if err != nil { + panic(err) + } + return v +} + +// Float64 returns a single float64 from a selector. It is only allowed when selecting one field. +func (s *selector) Float64(ctx context.Context) (_ float64, err error) { + var v []float64 + if v, err = s.Float64s(ctx); err != nil { + return + } + switch len(v) { + case 1: + return v[0], nil + case 0: + err = &NotFoundError{s.label} + default: + err = fmt.Errorf("ent: Float64s returned %d results when one was expected", len(v)) + } + return +} + +// Float64X is like Float64, but panics if an error occurs. +func (s *selector) Float64X(ctx context.Context) float64 { + v, err := s.Float64(ctx) + if err != nil { + panic(err) + } + return v +} + +// Bools returns list of bools from a selector. It is only allowed when selecting one field. +func (s *selector) Bools(ctx context.Context) ([]bool, error) { + if len(*s.flds) > 1 { + return nil, errors.New("ent: Bools is not achievable when selecting more than 1 field") + } + var v []bool + if err := s.scan(ctx, &v); err != nil { + return nil, err + } + return v, nil +} + +// BoolsX is like Bools, but panics if an error occurs. +func (s *selector) BoolsX(ctx context.Context) []bool { + v, err := s.Bools(ctx) + if err != nil { + panic(err) + } + return v +} + +// Bool returns a single bool from a selector. It is only allowed when selecting one field. +func (s *selector) Bool(ctx context.Context) (_ bool, err error) { + var v []bool + if v, err = s.Bools(ctx); err != nil { + return + } + switch len(v) { + case 1: + return v[0], nil + case 0: + err = &NotFoundError{s.label} + default: + err = fmt.Errorf("ent: Bools returned %d results when one was expected", len(v)) + } + return +} + +// BoolX is like Bool, but panics if an error occurs. +func (s *selector) BoolX(ctx context.Context) bool { + v, err := s.Bool(ctx) + if err != nil { + panic(err) + } + return v +} + +// queryHook describes an internal hook for the different sqlAll methods. +type queryHook func(context.Context, *sqlgraph.QuerySpec) diff --git a/backend/ent/enttest/enttest.go b/backend/ent/enttest/enttest.go index 4c398dc..b14ee60 100644 --- a/backend/ent/enttest/enttest.go +++ b/backend/ent/enttest/enttest.go @@ -1,4 +1,4 @@ -// Code generated by entc, DO NOT EDIT. +// Code generated by ent, DO NOT EDIT. package enttest @@ -10,6 +10,7 @@ import ( _ "github.com/hay-kot/content/backend/ent/runtime" "entgo.io/ent/dialect/sql/schema" + "github.com/hay-kot/content/backend/ent/migrate" ) type ( @@ -59,10 +60,7 @@ func Open(t TestingT, driverName, dataSourceName string, opts ...Option) *ent.Cl t.Error(err) t.FailNow() } - if err := c.Schema.Create(context.Background(), o.migrateOpts...); err != nil { - t.Error(err) - t.FailNow() - } + migrateSchema(t, c, o) return c } @@ -70,9 +68,17 @@ func Open(t TestingT, driverName, dataSourceName string, opts ...Option) *ent.Cl func NewClient(t TestingT, opts ...Option) *ent.Client { o := newOptions(opts) c := ent.NewClient(o.opts...) - if err := c.Schema.Create(context.Background(), o.migrateOpts...); err != nil { + migrateSchema(t, c, o) + return c +} +func migrateSchema(t TestingT, c *ent.Client, o *options) { + tables, err := schema.CopyTables(migrate.Tables) + if err != nil { + t.Error(err) + t.FailNow() + } + if err := migrate.Create(context.Background(), c.Schema, tables, o.migrateOpts...); err != nil { t.Error(err) t.FailNow() } - return c } diff --git a/backend/ent/group.go b/backend/ent/group.go new file mode 100644 index 0000000..6724ec5 --- /dev/null +++ b/backend/ent/group.go @@ -0,0 +1,210 @@ +// Code generated by ent, DO NOT EDIT. + +package ent + +import ( + "fmt" + "strings" + "time" + + "entgo.io/ent/dialect/sql" + "github.com/google/uuid" + "github.com/hay-kot/content/backend/ent/group" +) + +// Group is the model entity for the Group schema. +type Group struct { + config `json:"-"` + // ID of the ent. + ID uuid.UUID `json:"id,omitempty"` + // CreatedAt holds the value of the "created_at" field. + CreatedAt time.Time `json:"created_at,omitempty"` + // UpdatedAt holds the value of the "updated_at" field. + UpdatedAt time.Time `json:"updated_at,omitempty"` + // Name holds the value of the "name" field. + Name string `json:"name,omitempty"` + // Currency holds the value of the "currency" field. + Currency group.Currency `json:"currency,omitempty"` + // Edges holds the relations/edges for other nodes in the graph. + // The values are being populated by the GroupQuery when eager-loading is set. + Edges GroupEdges `json:"edges"` +} + +// GroupEdges holds the relations/edges for other nodes in the graph. +type GroupEdges struct { + // Users holds the value of the users edge. + Users []*User `json:"users,omitempty"` + // Locations holds the value of the locations edge. + Locations []*Location `json:"locations,omitempty"` + // Items holds the value of the items edge. + Items []*Item `json:"items,omitempty"` + // Labels holds the value of the labels edge. + Labels []*Label `json:"labels,omitempty"` + // loadedTypes holds the information for reporting if a + // type was loaded (or requested) in eager-loading or not. + loadedTypes [4]bool +} + +// UsersOrErr returns the Users value or an error if the edge +// was not loaded in eager-loading. +func (e GroupEdges) UsersOrErr() ([]*User, error) { + if e.loadedTypes[0] { + return e.Users, nil + } + return nil, &NotLoadedError{edge: "users"} +} + +// LocationsOrErr returns the Locations value or an error if the edge +// was not loaded in eager-loading. +func (e GroupEdges) LocationsOrErr() ([]*Location, error) { + if e.loadedTypes[1] { + return e.Locations, nil + } + return nil, &NotLoadedError{edge: "locations"} +} + +// ItemsOrErr returns the Items value or an error if the edge +// was not loaded in eager-loading. +func (e GroupEdges) ItemsOrErr() ([]*Item, error) { + if e.loadedTypes[2] { + return e.Items, nil + } + return nil, &NotLoadedError{edge: "items"} +} + +// LabelsOrErr returns the Labels value or an error if the edge +// was not loaded in eager-loading. +func (e GroupEdges) LabelsOrErr() ([]*Label, error) { + if e.loadedTypes[3] { + return e.Labels, nil + } + return nil, &NotLoadedError{edge: "labels"} +} + +// scanValues returns the types for scanning values from sql.Rows. +func (*Group) scanValues(columns []string) ([]interface{}, error) { + values := make([]interface{}, len(columns)) + for i := range columns { + switch columns[i] { + case group.FieldName, group.FieldCurrency: + values[i] = new(sql.NullString) + case group.FieldCreatedAt, group.FieldUpdatedAt: + values[i] = new(sql.NullTime) + case group.FieldID: + values[i] = new(uuid.UUID) + default: + return nil, fmt.Errorf("unexpected column %q for type Group", columns[i]) + } + } + return values, nil +} + +// assignValues assigns the values that were returned from sql.Rows (after scanning) +// to the Group fields. +func (gr *Group) assignValues(columns []string, values []interface{}) error { + if m, n := len(values), len(columns); m < n { + return fmt.Errorf("mismatch number of scan values: %d != %d", m, n) + } + for i := range columns { + switch columns[i] { + case group.FieldID: + if value, ok := values[i].(*uuid.UUID); !ok { + return fmt.Errorf("unexpected type %T for field id", values[i]) + } else if value != nil { + gr.ID = *value + } + case group.FieldCreatedAt: + if value, ok := values[i].(*sql.NullTime); !ok { + return fmt.Errorf("unexpected type %T for field created_at", values[i]) + } else if value.Valid { + gr.CreatedAt = value.Time + } + case group.FieldUpdatedAt: + if value, ok := values[i].(*sql.NullTime); !ok { + return fmt.Errorf("unexpected type %T for field updated_at", values[i]) + } else if value.Valid { + gr.UpdatedAt = value.Time + } + case group.FieldName: + if value, ok := values[i].(*sql.NullString); !ok { + return fmt.Errorf("unexpected type %T for field name", values[i]) + } else if value.Valid { + gr.Name = value.String + } + case group.FieldCurrency: + if value, ok := values[i].(*sql.NullString); !ok { + return fmt.Errorf("unexpected type %T for field currency", values[i]) + } else if value.Valid { + gr.Currency = group.Currency(value.String) + } + } + } + return nil +} + +// QueryUsers queries the "users" edge of the Group entity. +func (gr *Group) QueryUsers() *UserQuery { + return (&GroupClient{config: gr.config}).QueryUsers(gr) +} + +// QueryLocations queries the "locations" edge of the Group entity. +func (gr *Group) QueryLocations() *LocationQuery { + return (&GroupClient{config: gr.config}).QueryLocations(gr) +} + +// QueryItems queries the "items" edge of the Group entity. +func (gr *Group) QueryItems() *ItemQuery { + return (&GroupClient{config: gr.config}).QueryItems(gr) +} + +// QueryLabels queries the "labels" edge of the Group entity. +func (gr *Group) QueryLabels() *LabelQuery { + return (&GroupClient{config: gr.config}).QueryLabels(gr) +} + +// Update returns a builder for updating this Group. +// Note that you need to call Group.Unwrap() before calling this method if this Group +// was returned from a transaction, and the transaction was committed or rolled back. +func (gr *Group) Update() *GroupUpdateOne { + return (&GroupClient{config: gr.config}).UpdateOne(gr) +} + +// Unwrap unwraps the Group entity that was returned from a transaction after it was closed, +// so that all future queries will be executed through the driver which created the transaction. +func (gr *Group) Unwrap() *Group { + _tx, ok := gr.config.driver.(*txDriver) + if !ok { + panic("ent: Group is not a transactional entity") + } + gr.config.driver = _tx.drv + return gr +} + +// String implements the fmt.Stringer. +func (gr *Group) String() string { + var builder strings.Builder + builder.WriteString("Group(") + builder.WriteString(fmt.Sprintf("id=%v, ", gr.ID)) + builder.WriteString("created_at=") + builder.WriteString(gr.CreatedAt.Format(time.ANSIC)) + builder.WriteString(", ") + builder.WriteString("updated_at=") + builder.WriteString(gr.UpdatedAt.Format(time.ANSIC)) + builder.WriteString(", ") + builder.WriteString("name=") + builder.WriteString(gr.Name) + builder.WriteString(", ") + builder.WriteString("currency=") + builder.WriteString(fmt.Sprintf("%v", gr.Currency)) + builder.WriteByte(')') + return builder.String() +} + +// Groups is a parsable slice of Group. +type Groups []*Group + +func (gr Groups) config(cfg config) { + for _i := range gr { + gr[_i].config = cfg + } +} diff --git a/backend/ent/group/group.go b/backend/ent/group/group.go new file mode 100644 index 0000000..c7ccf72 --- /dev/null +++ b/backend/ent/group/group.go @@ -0,0 +1,120 @@ +// Code generated by ent, DO NOT EDIT. + +package group + +import ( + "fmt" + "time" + + "github.com/google/uuid" +) + +const ( + // Label holds the string label denoting the group type in the database. + Label = "group" + // FieldID holds the string denoting the id field in the database. + FieldID = "id" + // FieldCreatedAt holds the string denoting the created_at field in the database. + FieldCreatedAt = "created_at" + // FieldUpdatedAt holds the string denoting the updated_at field in the database. + FieldUpdatedAt = "updated_at" + // FieldName holds the string denoting the name field in the database. + FieldName = "name" + // FieldCurrency holds the string denoting the currency field in the database. + FieldCurrency = "currency" + // EdgeUsers holds the string denoting the users edge name in mutations. + EdgeUsers = "users" + // EdgeLocations holds the string denoting the locations edge name in mutations. + EdgeLocations = "locations" + // EdgeItems holds the string denoting the items edge name in mutations. + EdgeItems = "items" + // EdgeLabels holds the string denoting the labels edge name in mutations. + EdgeLabels = "labels" + // Table holds the table name of the group in the database. + Table = "groups" + // UsersTable is the table that holds the users relation/edge. + UsersTable = "users" + // UsersInverseTable is the table name for the User entity. + // It exists in this package in order to avoid circular dependency with the "user" package. + UsersInverseTable = "users" + // UsersColumn is the table column denoting the users relation/edge. + UsersColumn = "group_users" + // LocationsTable is the table that holds the locations relation/edge. + LocationsTable = "locations" + // LocationsInverseTable is the table name for the Location entity. + // It exists in this package in order to avoid circular dependency with the "location" package. + LocationsInverseTable = "locations" + // LocationsColumn is the table column denoting the locations relation/edge. + LocationsColumn = "group_locations" + // ItemsTable is the table that holds the items relation/edge. + ItemsTable = "items" + // ItemsInverseTable is the table name for the Item entity. + // It exists in this package in order to avoid circular dependency with the "item" package. + ItemsInverseTable = "items" + // ItemsColumn is the table column denoting the items relation/edge. + ItemsColumn = "group_items" + // LabelsTable is the table that holds the labels relation/edge. + LabelsTable = "labels" + // LabelsInverseTable is the table name for the Label entity. + // It exists in this package in order to avoid circular dependency with the "label" package. + LabelsInverseTable = "labels" + // LabelsColumn is the table column denoting the labels relation/edge. + LabelsColumn = "group_labels" +) + +// Columns holds all SQL columns for group fields. +var Columns = []string{ + FieldID, + FieldCreatedAt, + FieldUpdatedAt, + FieldName, + FieldCurrency, +} + +// ValidColumn reports if the column name is valid (part of the table columns). +func ValidColumn(column string) bool { + for i := range Columns { + if column == Columns[i] { + return true + } + } + return false +} + +var ( + // DefaultCreatedAt holds the default value on creation for the "created_at" field. + DefaultCreatedAt func() time.Time + // DefaultUpdatedAt holds the default value on creation for the "updated_at" field. + DefaultUpdatedAt func() time.Time + // UpdateDefaultUpdatedAt holds the default value on update for the "updated_at" field. + UpdateDefaultUpdatedAt func() time.Time + // NameValidator is a validator for the "name" field. It is called by the builders before save. + NameValidator func(string) error + // DefaultID holds the default value on creation for the "id" field. + DefaultID func() uuid.UUID +) + +// Currency defines the type for the "currency" enum field. +type Currency string + +// CurrencyUsd is the default value of the Currency enum. +const DefaultCurrency = CurrencyUsd + +// Currency values. +const ( + CurrencyUsd Currency = "usd" +) + +func (c Currency) String() string { + return string(c) +} + +// CurrencyValidator is a validator for the "currency" field enum values. It is called by the builders before save. +func CurrencyValidator(c Currency) error { + switch c { + case CurrencyUsd: + return nil + default: + return fmt.Errorf("group: invalid enum value for currency field: %q", c) + } +} diff --git a/backend/ent/group/where.go b/backend/ent/group/where.go new file mode 100644 index 0000000..35e3c7d --- /dev/null +++ b/backend/ent/group/where.go @@ -0,0 +1,511 @@ +// Code generated by ent, DO NOT EDIT. + +package group + +import ( + "time" + + "entgo.io/ent/dialect/sql" + "entgo.io/ent/dialect/sql/sqlgraph" + "github.com/google/uuid" + "github.com/hay-kot/content/backend/ent/predicate" +) + +// ID filters vertices based on their ID field. +func ID(id uuid.UUID) predicate.Group { + return predicate.Group(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldID), id)) + }) +} + +// IDEQ applies the EQ predicate on the ID field. +func IDEQ(id uuid.UUID) predicate.Group { + return predicate.Group(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldID), id)) + }) +} + +// IDNEQ applies the NEQ predicate on the ID field. +func IDNEQ(id uuid.UUID) predicate.Group { + return predicate.Group(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldID), id)) + }) +} + +// IDIn applies the In predicate on the ID field. +func IDIn(ids ...uuid.UUID) predicate.Group { + return predicate.Group(func(s *sql.Selector) { + v := make([]interface{}, len(ids)) + for i := range v { + v[i] = ids[i] + } + s.Where(sql.In(s.C(FieldID), v...)) + }) +} + +// IDNotIn applies the NotIn predicate on the ID field. +func IDNotIn(ids ...uuid.UUID) predicate.Group { + return predicate.Group(func(s *sql.Selector) { + v := make([]interface{}, len(ids)) + for i := range v { + v[i] = ids[i] + } + s.Where(sql.NotIn(s.C(FieldID), v...)) + }) +} + +// IDGT applies the GT predicate on the ID field. +func IDGT(id uuid.UUID) predicate.Group { + return predicate.Group(func(s *sql.Selector) { + s.Where(sql.GT(s.C(FieldID), id)) + }) +} + +// IDGTE applies the GTE predicate on the ID field. +func IDGTE(id uuid.UUID) predicate.Group { + return predicate.Group(func(s *sql.Selector) { + s.Where(sql.GTE(s.C(FieldID), id)) + }) +} + +// IDLT applies the LT predicate on the ID field. +func IDLT(id uuid.UUID) predicate.Group { + return predicate.Group(func(s *sql.Selector) { + s.Where(sql.LT(s.C(FieldID), id)) + }) +} + +// IDLTE applies the LTE predicate on the ID field. +func IDLTE(id uuid.UUID) predicate.Group { + return predicate.Group(func(s *sql.Selector) { + s.Where(sql.LTE(s.C(FieldID), id)) + }) +} + +// CreatedAt applies equality check predicate on the "created_at" field. It's identical to CreatedAtEQ. +func CreatedAt(v time.Time) predicate.Group { + return predicate.Group(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldCreatedAt), v)) + }) +} + +// UpdatedAt applies equality check predicate on the "updated_at" field. It's identical to UpdatedAtEQ. +func UpdatedAt(v time.Time) predicate.Group { + return predicate.Group(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldUpdatedAt), v)) + }) +} + +// Name applies equality check predicate on the "name" field. It's identical to NameEQ. +func Name(v string) predicate.Group { + return predicate.Group(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldName), v)) + }) +} + +// CreatedAtEQ applies the EQ predicate on the "created_at" field. +func CreatedAtEQ(v time.Time) predicate.Group { + return predicate.Group(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldCreatedAt), v)) + }) +} + +// CreatedAtNEQ applies the NEQ predicate on the "created_at" field. +func CreatedAtNEQ(v time.Time) predicate.Group { + return predicate.Group(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldCreatedAt), v)) + }) +} + +// CreatedAtIn applies the In predicate on the "created_at" field. +func CreatedAtIn(vs ...time.Time) predicate.Group { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Group(func(s *sql.Selector) { + s.Where(sql.In(s.C(FieldCreatedAt), v...)) + }) +} + +// CreatedAtNotIn applies the NotIn predicate on the "created_at" field. +func CreatedAtNotIn(vs ...time.Time) predicate.Group { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Group(func(s *sql.Selector) { + s.Where(sql.NotIn(s.C(FieldCreatedAt), v...)) + }) +} + +// CreatedAtGT applies the GT predicate on the "created_at" field. +func CreatedAtGT(v time.Time) predicate.Group { + return predicate.Group(func(s *sql.Selector) { + s.Where(sql.GT(s.C(FieldCreatedAt), v)) + }) +} + +// CreatedAtGTE applies the GTE predicate on the "created_at" field. +func CreatedAtGTE(v time.Time) predicate.Group { + return predicate.Group(func(s *sql.Selector) { + s.Where(sql.GTE(s.C(FieldCreatedAt), v)) + }) +} + +// CreatedAtLT applies the LT predicate on the "created_at" field. +func CreatedAtLT(v time.Time) predicate.Group { + return predicate.Group(func(s *sql.Selector) { + s.Where(sql.LT(s.C(FieldCreatedAt), v)) + }) +} + +// CreatedAtLTE applies the LTE predicate on the "created_at" field. +func CreatedAtLTE(v time.Time) predicate.Group { + return predicate.Group(func(s *sql.Selector) { + s.Where(sql.LTE(s.C(FieldCreatedAt), v)) + }) +} + +// UpdatedAtEQ applies the EQ predicate on the "updated_at" field. +func UpdatedAtEQ(v time.Time) predicate.Group { + return predicate.Group(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldUpdatedAt), v)) + }) +} + +// UpdatedAtNEQ applies the NEQ predicate on the "updated_at" field. +func UpdatedAtNEQ(v time.Time) predicate.Group { + return predicate.Group(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldUpdatedAt), v)) + }) +} + +// UpdatedAtIn applies the In predicate on the "updated_at" field. +func UpdatedAtIn(vs ...time.Time) predicate.Group { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Group(func(s *sql.Selector) { + s.Where(sql.In(s.C(FieldUpdatedAt), v...)) + }) +} + +// UpdatedAtNotIn applies the NotIn predicate on the "updated_at" field. +func UpdatedAtNotIn(vs ...time.Time) predicate.Group { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Group(func(s *sql.Selector) { + s.Where(sql.NotIn(s.C(FieldUpdatedAt), v...)) + }) +} + +// UpdatedAtGT applies the GT predicate on the "updated_at" field. +func UpdatedAtGT(v time.Time) predicate.Group { + return predicate.Group(func(s *sql.Selector) { + s.Where(sql.GT(s.C(FieldUpdatedAt), v)) + }) +} + +// UpdatedAtGTE applies the GTE predicate on the "updated_at" field. +func UpdatedAtGTE(v time.Time) predicate.Group { + return predicate.Group(func(s *sql.Selector) { + s.Where(sql.GTE(s.C(FieldUpdatedAt), v)) + }) +} + +// UpdatedAtLT applies the LT predicate on the "updated_at" field. +func UpdatedAtLT(v time.Time) predicate.Group { + return predicate.Group(func(s *sql.Selector) { + s.Where(sql.LT(s.C(FieldUpdatedAt), v)) + }) +} + +// UpdatedAtLTE applies the LTE predicate on the "updated_at" field. +func UpdatedAtLTE(v time.Time) predicate.Group { + return predicate.Group(func(s *sql.Selector) { + s.Where(sql.LTE(s.C(FieldUpdatedAt), v)) + }) +} + +// NameEQ applies the EQ predicate on the "name" field. +func NameEQ(v string) predicate.Group { + return predicate.Group(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldName), v)) + }) +} + +// NameNEQ applies the NEQ predicate on the "name" field. +func NameNEQ(v string) predicate.Group { + return predicate.Group(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldName), v)) + }) +} + +// NameIn applies the In predicate on the "name" field. +func NameIn(vs ...string) predicate.Group { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Group(func(s *sql.Selector) { + s.Where(sql.In(s.C(FieldName), v...)) + }) +} + +// NameNotIn applies the NotIn predicate on the "name" field. +func NameNotIn(vs ...string) predicate.Group { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Group(func(s *sql.Selector) { + s.Where(sql.NotIn(s.C(FieldName), v...)) + }) +} + +// NameGT applies the GT predicate on the "name" field. +func NameGT(v string) predicate.Group { + return predicate.Group(func(s *sql.Selector) { + s.Where(sql.GT(s.C(FieldName), v)) + }) +} + +// NameGTE applies the GTE predicate on the "name" field. +func NameGTE(v string) predicate.Group { + return predicate.Group(func(s *sql.Selector) { + s.Where(sql.GTE(s.C(FieldName), v)) + }) +} + +// NameLT applies the LT predicate on the "name" field. +func NameLT(v string) predicate.Group { + return predicate.Group(func(s *sql.Selector) { + s.Where(sql.LT(s.C(FieldName), v)) + }) +} + +// NameLTE applies the LTE predicate on the "name" field. +func NameLTE(v string) predicate.Group { + return predicate.Group(func(s *sql.Selector) { + s.Where(sql.LTE(s.C(FieldName), v)) + }) +} + +// NameContains applies the Contains predicate on the "name" field. +func NameContains(v string) predicate.Group { + return predicate.Group(func(s *sql.Selector) { + s.Where(sql.Contains(s.C(FieldName), v)) + }) +} + +// NameHasPrefix applies the HasPrefix predicate on the "name" field. +func NameHasPrefix(v string) predicate.Group { + return predicate.Group(func(s *sql.Selector) { + s.Where(sql.HasPrefix(s.C(FieldName), v)) + }) +} + +// NameHasSuffix applies the HasSuffix predicate on the "name" field. +func NameHasSuffix(v string) predicate.Group { + return predicate.Group(func(s *sql.Selector) { + s.Where(sql.HasSuffix(s.C(FieldName), v)) + }) +} + +// NameEqualFold applies the EqualFold predicate on the "name" field. +func NameEqualFold(v string) predicate.Group { + return predicate.Group(func(s *sql.Selector) { + s.Where(sql.EqualFold(s.C(FieldName), v)) + }) +} + +// NameContainsFold applies the ContainsFold predicate on the "name" field. +func NameContainsFold(v string) predicate.Group { + return predicate.Group(func(s *sql.Selector) { + s.Where(sql.ContainsFold(s.C(FieldName), v)) + }) +} + +// CurrencyEQ applies the EQ predicate on the "currency" field. +func CurrencyEQ(v Currency) predicate.Group { + return predicate.Group(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldCurrency), v)) + }) +} + +// CurrencyNEQ applies the NEQ predicate on the "currency" field. +func CurrencyNEQ(v Currency) predicate.Group { + return predicate.Group(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldCurrency), v)) + }) +} + +// CurrencyIn applies the In predicate on the "currency" field. +func CurrencyIn(vs ...Currency) predicate.Group { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Group(func(s *sql.Selector) { + s.Where(sql.In(s.C(FieldCurrency), v...)) + }) +} + +// CurrencyNotIn applies the NotIn predicate on the "currency" field. +func CurrencyNotIn(vs ...Currency) predicate.Group { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Group(func(s *sql.Selector) { + s.Where(sql.NotIn(s.C(FieldCurrency), v...)) + }) +} + +// HasUsers applies the HasEdge predicate on the "users" edge. +func HasUsers() predicate.Group { + return predicate.Group(func(s *sql.Selector) { + step := sqlgraph.NewStep( + sqlgraph.From(Table, FieldID), + sqlgraph.To(UsersTable, FieldID), + sqlgraph.Edge(sqlgraph.O2M, false, UsersTable, UsersColumn), + ) + sqlgraph.HasNeighbors(s, step) + }) +} + +// HasUsersWith applies the HasEdge predicate on the "users" edge with a given conditions (other predicates). +func HasUsersWith(preds ...predicate.User) predicate.Group { + return predicate.Group(func(s *sql.Selector) { + step := sqlgraph.NewStep( + sqlgraph.From(Table, FieldID), + sqlgraph.To(UsersInverseTable, FieldID), + sqlgraph.Edge(sqlgraph.O2M, false, UsersTable, UsersColumn), + ) + sqlgraph.HasNeighborsWith(s, step, func(s *sql.Selector) { + for _, p := range preds { + p(s) + } + }) + }) +} + +// HasLocations applies the HasEdge predicate on the "locations" edge. +func HasLocations() predicate.Group { + return predicate.Group(func(s *sql.Selector) { + step := sqlgraph.NewStep( + sqlgraph.From(Table, FieldID), + sqlgraph.To(LocationsTable, FieldID), + sqlgraph.Edge(sqlgraph.O2M, false, LocationsTable, LocationsColumn), + ) + sqlgraph.HasNeighbors(s, step) + }) +} + +// HasLocationsWith applies the HasEdge predicate on the "locations" edge with a given conditions (other predicates). +func HasLocationsWith(preds ...predicate.Location) predicate.Group { + return predicate.Group(func(s *sql.Selector) { + step := sqlgraph.NewStep( + sqlgraph.From(Table, FieldID), + sqlgraph.To(LocationsInverseTable, FieldID), + sqlgraph.Edge(sqlgraph.O2M, false, LocationsTable, LocationsColumn), + ) + sqlgraph.HasNeighborsWith(s, step, func(s *sql.Selector) { + for _, p := range preds { + p(s) + } + }) + }) +} + +// HasItems applies the HasEdge predicate on the "items" edge. +func HasItems() predicate.Group { + return predicate.Group(func(s *sql.Selector) { + step := sqlgraph.NewStep( + sqlgraph.From(Table, FieldID), + sqlgraph.To(ItemsTable, FieldID), + sqlgraph.Edge(sqlgraph.O2M, false, ItemsTable, ItemsColumn), + ) + sqlgraph.HasNeighbors(s, step) + }) +} + +// HasItemsWith applies the HasEdge predicate on the "items" edge with a given conditions (other predicates). +func HasItemsWith(preds ...predicate.Item) predicate.Group { + return predicate.Group(func(s *sql.Selector) { + step := sqlgraph.NewStep( + sqlgraph.From(Table, FieldID), + sqlgraph.To(ItemsInverseTable, FieldID), + sqlgraph.Edge(sqlgraph.O2M, false, ItemsTable, ItemsColumn), + ) + sqlgraph.HasNeighborsWith(s, step, func(s *sql.Selector) { + for _, p := range preds { + p(s) + } + }) + }) +} + +// HasLabels applies the HasEdge predicate on the "labels" edge. +func HasLabels() predicate.Group { + return predicate.Group(func(s *sql.Selector) { + step := sqlgraph.NewStep( + sqlgraph.From(Table, FieldID), + sqlgraph.To(LabelsTable, FieldID), + sqlgraph.Edge(sqlgraph.O2M, false, LabelsTable, LabelsColumn), + ) + sqlgraph.HasNeighbors(s, step) + }) +} + +// HasLabelsWith applies the HasEdge predicate on the "labels" edge with a given conditions (other predicates). +func HasLabelsWith(preds ...predicate.Label) predicate.Group { + return predicate.Group(func(s *sql.Selector) { + step := sqlgraph.NewStep( + sqlgraph.From(Table, FieldID), + sqlgraph.To(LabelsInverseTable, FieldID), + sqlgraph.Edge(sqlgraph.O2M, false, LabelsTable, LabelsColumn), + ) + sqlgraph.HasNeighborsWith(s, step, func(s *sql.Selector) { + for _, p := range preds { + p(s) + } + }) + }) +} + +// And groups predicates with the AND operator between them. +func And(predicates ...predicate.Group) predicate.Group { + return predicate.Group(func(s *sql.Selector) { + s1 := s.Clone().SetP(nil) + for _, p := range predicates { + p(s1) + } + s.Where(s1.P()) + }) +} + +// Or groups predicates with the OR operator between them. +func Or(predicates ...predicate.Group) predicate.Group { + return predicate.Group(func(s *sql.Selector) { + s1 := s.Clone().SetP(nil) + for i, p := range predicates { + if i > 0 { + s1.Or() + } + p(s1) + } + s.Where(s1.P()) + }) +} + +// Not applies the not operator on the given predicate. +func Not(p predicate.Group) predicate.Group { + return predicate.Group(func(s *sql.Selector) { + p(s.Not()) + }) +} diff --git a/backend/ent/group_create.go b/backend/ent/group_create.go new file mode 100644 index 0000000..eda86d6 --- /dev/null +++ b/backend/ent/group_create.go @@ -0,0 +1,494 @@ +// Code generated by ent, DO NOT EDIT. + +package ent + +import ( + "context" + "errors" + "fmt" + "time" + + "entgo.io/ent/dialect/sql/sqlgraph" + "entgo.io/ent/schema/field" + "github.com/google/uuid" + "github.com/hay-kot/content/backend/ent/group" + "github.com/hay-kot/content/backend/ent/item" + "github.com/hay-kot/content/backend/ent/label" + "github.com/hay-kot/content/backend/ent/location" + "github.com/hay-kot/content/backend/ent/user" +) + +// GroupCreate is the builder for creating a Group entity. +type GroupCreate struct { + config + mutation *GroupMutation + hooks []Hook +} + +// SetCreatedAt sets the "created_at" field. +func (gc *GroupCreate) SetCreatedAt(t time.Time) *GroupCreate { + gc.mutation.SetCreatedAt(t) + return gc +} + +// SetNillableCreatedAt sets the "created_at" field if the given value is not nil. +func (gc *GroupCreate) SetNillableCreatedAt(t *time.Time) *GroupCreate { + if t != nil { + gc.SetCreatedAt(*t) + } + return gc +} + +// SetUpdatedAt sets the "updated_at" field. +func (gc *GroupCreate) SetUpdatedAt(t time.Time) *GroupCreate { + gc.mutation.SetUpdatedAt(t) + return gc +} + +// SetNillableUpdatedAt sets the "updated_at" field if the given value is not nil. +func (gc *GroupCreate) SetNillableUpdatedAt(t *time.Time) *GroupCreate { + if t != nil { + gc.SetUpdatedAt(*t) + } + return gc +} + +// SetName sets the "name" field. +func (gc *GroupCreate) SetName(s string) *GroupCreate { + gc.mutation.SetName(s) + return gc +} + +// SetCurrency sets the "currency" field. +func (gc *GroupCreate) SetCurrency(gr group.Currency) *GroupCreate { + gc.mutation.SetCurrency(gr) + return gc +} + +// SetNillableCurrency sets the "currency" field if the given value is not nil. +func (gc *GroupCreate) SetNillableCurrency(gr *group.Currency) *GroupCreate { + if gr != nil { + gc.SetCurrency(*gr) + } + return gc +} + +// SetID sets the "id" field. +func (gc *GroupCreate) SetID(u uuid.UUID) *GroupCreate { + gc.mutation.SetID(u) + return gc +} + +// SetNillableID sets the "id" field if the given value is not nil. +func (gc *GroupCreate) SetNillableID(u *uuid.UUID) *GroupCreate { + if u != nil { + gc.SetID(*u) + } + return gc +} + +// AddUserIDs adds the "users" edge to the User entity by IDs. +func (gc *GroupCreate) AddUserIDs(ids ...uuid.UUID) *GroupCreate { + gc.mutation.AddUserIDs(ids...) + return gc +} + +// AddUsers adds the "users" edges to the User entity. +func (gc *GroupCreate) AddUsers(u ...*User) *GroupCreate { + ids := make([]uuid.UUID, len(u)) + for i := range u { + ids[i] = u[i].ID + } + return gc.AddUserIDs(ids...) +} + +// AddLocationIDs adds the "locations" edge to the Location entity by IDs. +func (gc *GroupCreate) AddLocationIDs(ids ...uuid.UUID) *GroupCreate { + gc.mutation.AddLocationIDs(ids...) + return gc +} + +// AddLocations adds the "locations" edges to the Location entity. +func (gc *GroupCreate) AddLocations(l ...*Location) *GroupCreate { + ids := make([]uuid.UUID, len(l)) + for i := range l { + ids[i] = l[i].ID + } + return gc.AddLocationIDs(ids...) +} + +// AddItemIDs adds the "items" edge to the Item entity by IDs. +func (gc *GroupCreate) AddItemIDs(ids ...uuid.UUID) *GroupCreate { + gc.mutation.AddItemIDs(ids...) + return gc +} + +// AddItems adds the "items" edges to the Item entity. +func (gc *GroupCreate) AddItems(i ...*Item) *GroupCreate { + ids := make([]uuid.UUID, len(i)) + for j := range i { + ids[j] = i[j].ID + } + return gc.AddItemIDs(ids...) +} + +// AddLabelIDs adds the "labels" edge to the Label entity by IDs. +func (gc *GroupCreate) AddLabelIDs(ids ...uuid.UUID) *GroupCreate { + gc.mutation.AddLabelIDs(ids...) + return gc +} + +// AddLabels adds the "labels" edges to the Label entity. +func (gc *GroupCreate) AddLabels(l ...*Label) *GroupCreate { + ids := make([]uuid.UUID, len(l)) + for i := range l { + ids[i] = l[i].ID + } + return gc.AddLabelIDs(ids...) +} + +// Mutation returns the GroupMutation object of the builder. +func (gc *GroupCreate) Mutation() *GroupMutation { + return gc.mutation +} + +// Save creates the Group in the database. +func (gc *GroupCreate) Save(ctx context.Context) (*Group, error) { + var ( + err error + node *Group + ) + gc.defaults() + if len(gc.hooks) == 0 { + if err = gc.check(); err != nil { + return nil, err + } + node, err = gc.sqlSave(ctx) + } else { + var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) { + mutation, ok := m.(*GroupMutation) + if !ok { + return nil, fmt.Errorf("unexpected mutation type %T", m) + } + if err = gc.check(); err != nil { + return nil, err + } + gc.mutation = mutation + if node, err = gc.sqlSave(ctx); err != nil { + return nil, err + } + mutation.id = &node.ID + mutation.done = true + return node, err + }) + for i := len(gc.hooks) - 1; i >= 0; i-- { + if gc.hooks[i] == nil { + return nil, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)") + } + mut = gc.hooks[i](mut) + } + v, err := mut.Mutate(ctx, gc.mutation) + if err != nil { + return nil, err + } + nv, ok := v.(*Group) + if !ok { + return nil, fmt.Errorf("unexpected node type %T returned from GroupMutation", v) + } + node = nv + } + return node, err +} + +// SaveX calls Save and panics if Save returns an error. +func (gc *GroupCreate) SaveX(ctx context.Context) *Group { + v, err := gc.Save(ctx) + if err != nil { + panic(err) + } + return v +} + +// Exec executes the query. +func (gc *GroupCreate) Exec(ctx context.Context) error { + _, err := gc.Save(ctx) + return err +} + +// ExecX is like Exec, but panics if an error occurs. +func (gc *GroupCreate) ExecX(ctx context.Context) { + if err := gc.Exec(ctx); err != nil { + panic(err) + } +} + +// defaults sets the default values of the builder before save. +func (gc *GroupCreate) defaults() { + if _, ok := gc.mutation.CreatedAt(); !ok { + v := group.DefaultCreatedAt() + gc.mutation.SetCreatedAt(v) + } + if _, ok := gc.mutation.UpdatedAt(); !ok { + v := group.DefaultUpdatedAt() + gc.mutation.SetUpdatedAt(v) + } + if _, ok := gc.mutation.Currency(); !ok { + v := group.DefaultCurrency + gc.mutation.SetCurrency(v) + } + if _, ok := gc.mutation.ID(); !ok { + v := group.DefaultID() + gc.mutation.SetID(v) + } +} + +// check runs all checks and user-defined validators on the builder. +func (gc *GroupCreate) check() error { + if _, ok := gc.mutation.CreatedAt(); !ok { + return &ValidationError{Name: "created_at", err: errors.New(`ent: missing required field "Group.created_at"`)} + } + if _, ok := gc.mutation.UpdatedAt(); !ok { + return &ValidationError{Name: "updated_at", err: errors.New(`ent: missing required field "Group.updated_at"`)} + } + if _, ok := gc.mutation.Name(); !ok { + return &ValidationError{Name: "name", err: errors.New(`ent: missing required field "Group.name"`)} + } + if v, ok := gc.mutation.Name(); ok { + if err := group.NameValidator(v); err != nil { + return &ValidationError{Name: "name", err: fmt.Errorf(`ent: validator failed for field "Group.name": %w`, err)} + } + } + if _, ok := gc.mutation.Currency(); !ok { + return &ValidationError{Name: "currency", err: errors.New(`ent: missing required field "Group.currency"`)} + } + if v, ok := gc.mutation.Currency(); ok { + if err := group.CurrencyValidator(v); err != nil { + return &ValidationError{Name: "currency", err: fmt.Errorf(`ent: validator failed for field "Group.currency": %w`, err)} + } + } + return nil +} + +func (gc *GroupCreate) sqlSave(ctx context.Context) (*Group, error) { + _node, _spec := gc.createSpec() + if err := sqlgraph.CreateNode(ctx, gc.driver, _spec); err != nil { + if sqlgraph.IsConstraintError(err) { + err = &ConstraintError{msg: err.Error(), wrap: err} + } + return nil, err + } + if _spec.ID.Value != nil { + if id, ok := _spec.ID.Value.(*uuid.UUID); ok { + _node.ID = *id + } else if err := _node.ID.Scan(_spec.ID.Value); err != nil { + return nil, err + } + } + return _node, nil +} + +func (gc *GroupCreate) createSpec() (*Group, *sqlgraph.CreateSpec) { + var ( + _node = &Group{config: gc.config} + _spec = &sqlgraph.CreateSpec{ + Table: group.Table, + ID: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: group.FieldID, + }, + } + ) + if id, ok := gc.mutation.ID(); ok { + _node.ID = id + _spec.ID.Value = &id + } + if value, ok := gc.mutation.CreatedAt(); ok { + _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ + Type: field.TypeTime, + Value: value, + Column: group.FieldCreatedAt, + }) + _node.CreatedAt = value + } + if value, ok := gc.mutation.UpdatedAt(); ok { + _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ + Type: field.TypeTime, + Value: value, + Column: group.FieldUpdatedAt, + }) + _node.UpdatedAt = value + } + if value, ok := gc.mutation.Name(); ok { + _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: group.FieldName, + }) + _node.Name = value + } + if value, ok := gc.mutation.Currency(); ok { + _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ + Type: field.TypeEnum, + Value: value, + Column: group.FieldCurrency, + }) + _node.Currency = value + } + if nodes := gc.mutation.UsersIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: group.UsersTable, + Columns: []string{group.UsersColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: user.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges = append(_spec.Edges, edge) + } + if nodes := gc.mutation.LocationsIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: group.LocationsTable, + Columns: []string{group.LocationsColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: location.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges = append(_spec.Edges, edge) + } + if nodes := gc.mutation.ItemsIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: group.ItemsTable, + Columns: []string{group.ItemsColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: item.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges = append(_spec.Edges, edge) + } + if nodes := gc.mutation.LabelsIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: group.LabelsTable, + Columns: []string{group.LabelsColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: label.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges = append(_spec.Edges, edge) + } + return _node, _spec +} + +// GroupCreateBulk is the builder for creating many Group entities in bulk. +type GroupCreateBulk struct { + config + builders []*GroupCreate +} + +// Save creates the Group entities in the database. +func (gcb *GroupCreateBulk) Save(ctx context.Context) ([]*Group, error) { + specs := make([]*sqlgraph.CreateSpec, len(gcb.builders)) + nodes := make([]*Group, len(gcb.builders)) + mutators := make([]Mutator, len(gcb.builders)) + for i := range gcb.builders { + func(i int, root context.Context) { + builder := gcb.builders[i] + builder.defaults() + var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) { + mutation, ok := m.(*GroupMutation) + if !ok { + return nil, fmt.Errorf("unexpected mutation type %T", m) + } + if err := builder.check(); err != nil { + return nil, err + } + builder.mutation = mutation + nodes[i], specs[i] = builder.createSpec() + var err error + if i < len(mutators)-1 { + _, err = mutators[i+1].Mutate(root, gcb.builders[i+1].mutation) + } else { + spec := &sqlgraph.BatchCreateSpec{Nodes: specs} + // Invoke the actual operation on the latest mutation in the chain. + if err = sqlgraph.BatchCreate(ctx, gcb.driver, spec); err != nil { + if sqlgraph.IsConstraintError(err) { + err = &ConstraintError{msg: err.Error(), wrap: err} + } + } + } + if err != nil { + return nil, err + } + mutation.id = &nodes[i].ID + mutation.done = true + return nodes[i], nil + }) + for i := len(builder.hooks) - 1; i >= 0; i-- { + mut = builder.hooks[i](mut) + } + mutators[i] = mut + }(i, ctx) + } + if len(mutators) > 0 { + if _, err := mutators[0].Mutate(ctx, gcb.builders[0].mutation); err != nil { + return nil, err + } + } + return nodes, nil +} + +// SaveX is like Save, but panics if an error occurs. +func (gcb *GroupCreateBulk) SaveX(ctx context.Context) []*Group { + v, err := gcb.Save(ctx) + if err != nil { + panic(err) + } + return v +} + +// Exec executes the query. +func (gcb *GroupCreateBulk) Exec(ctx context.Context) error { + _, err := gcb.Save(ctx) + return err +} + +// ExecX is like Exec, but panics if an error occurs. +func (gcb *GroupCreateBulk) ExecX(ctx context.Context) { + if err := gcb.Exec(ctx); err != nil { + panic(err) + } +} diff --git a/backend/ent/group_delete.go b/backend/ent/group_delete.go new file mode 100644 index 0000000..a15128f --- /dev/null +++ b/backend/ent/group_delete.go @@ -0,0 +1,115 @@ +// Code generated by ent, DO NOT EDIT. + +package ent + +import ( + "context" + "fmt" + + "entgo.io/ent/dialect/sql" + "entgo.io/ent/dialect/sql/sqlgraph" + "entgo.io/ent/schema/field" + "github.com/hay-kot/content/backend/ent/group" + "github.com/hay-kot/content/backend/ent/predicate" +) + +// GroupDelete is the builder for deleting a Group entity. +type GroupDelete struct { + config + hooks []Hook + mutation *GroupMutation +} + +// Where appends a list predicates to the GroupDelete builder. +func (gd *GroupDelete) Where(ps ...predicate.Group) *GroupDelete { + gd.mutation.Where(ps...) + return gd +} + +// Exec executes the deletion query and returns how many vertices were deleted. +func (gd *GroupDelete) Exec(ctx context.Context) (int, error) { + var ( + err error + affected int + ) + if len(gd.hooks) == 0 { + affected, err = gd.sqlExec(ctx) + } else { + var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) { + mutation, ok := m.(*GroupMutation) + if !ok { + return nil, fmt.Errorf("unexpected mutation type %T", m) + } + gd.mutation = mutation + affected, err = gd.sqlExec(ctx) + mutation.done = true + return affected, err + }) + for i := len(gd.hooks) - 1; i >= 0; i-- { + if gd.hooks[i] == nil { + return 0, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)") + } + mut = gd.hooks[i](mut) + } + if _, err := mut.Mutate(ctx, gd.mutation); err != nil { + return 0, err + } + } + return affected, err +} + +// ExecX is like Exec, but panics if an error occurs. +func (gd *GroupDelete) ExecX(ctx context.Context) int { + n, err := gd.Exec(ctx) + if err != nil { + panic(err) + } + return n +} + +func (gd *GroupDelete) sqlExec(ctx context.Context) (int, error) { + _spec := &sqlgraph.DeleteSpec{ + Node: &sqlgraph.NodeSpec{ + Table: group.Table, + ID: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: group.FieldID, + }, + }, + } + if ps := gd.mutation.predicates; len(ps) > 0 { + _spec.Predicate = func(selector *sql.Selector) { + for i := range ps { + ps[i](selector) + } + } + } + affected, err := sqlgraph.DeleteNodes(ctx, gd.driver, _spec) + if err != nil && sqlgraph.IsConstraintError(err) { + err = &ConstraintError{msg: err.Error(), wrap: err} + } + return affected, err +} + +// GroupDeleteOne is the builder for deleting a single Group entity. +type GroupDeleteOne struct { + gd *GroupDelete +} + +// Exec executes the deletion query. +func (gdo *GroupDeleteOne) Exec(ctx context.Context) error { + n, err := gdo.gd.Exec(ctx) + switch { + case err != nil: + return err + case n == 0: + return &NotFoundError{group.Label} + default: + return nil + } +} + +// ExecX is like Exec, but panics if an error occurs. +func (gdo *GroupDeleteOne) ExecX(ctx context.Context) { + gdo.gd.ExecX(ctx) +} diff --git a/backend/ent/group_query.go b/backend/ent/group_query.go new file mode 100644 index 0000000..8206f68 --- /dev/null +++ b/backend/ent/group_query.go @@ -0,0 +1,832 @@ +// Code generated by ent, DO NOT EDIT. + +package ent + +import ( + "context" + "database/sql/driver" + "fmt" + "math" + + "entgo.io/ent/dialect/sql" + "entgo.io/ent/dialect/sql/sqlgraph" + "entgo.io/ent/schema/field" + "github.com/google/uuid" + "github.com/hay-kot/content/backend/ent/group" + "github.com/hay-kot/content/backend/ent/item" + "github.com/hay-kot/content/backend/ent/label" + "github.com/hay-kot/content/backend/ent/location" + "github.com/hay-kot/content/backend/ent/predicate" + "github.com/hay-kot/content/backend/ent/user" +) + +// GroupQuery is the builder for querying Group entities. +type GroupQuery struct { + config + limit *int + offset *int + unique *bool + order []OrderFunc + fields []string + predicates []predicate.Group + withUsers *UserQuery + withLocations *LocationQuery + withItems *ItemQuery + withLabels *LabelQuery + // intermediate query (i.e. traversal path). + sql *sql.Selector + path func(context.Context) (*sql.Selector, error) +} + +// Where adds a new predicate for the GroupQuery builder. +func (gq *GroupQuery) Where(ps ...predicate.Group) *GroupQuery { + gq.predicates = append(gq.predicates, ps...) + return gq +} + +// Limit adds a limit step to the query. +func (gq *GroupQuery) Limit(limit int) *GroupQuery { + gq.limit = &limit + return gq +} + +// Offset adds an offset step to the query. +func (gq *GroupQuery) Offset(offset int) *GroupQuery { + gq.offset = &offset + return gq +} + +// Unique configures the query builder to filter duplicate records on query. +// By default, unique is set to true, and can be disabled using this method. +func (gq *GroupQuery) Unique(unique bool) *GroupQuery { + gq.unique = &unique + return gq +} + +// Order adds an order step to the query. +func (gq *GroupQuery) Order(o ...OrderFunc) *GroupQuery { + gq.order = append(gq.order, o...) + return gq +} + +// QueryUsers chains the current query on the "users" edge. +func (gq *GroupQuery) QueryUsers() *UserQuery { + query := &UserQuery{config: gq.config} + query.path = func(ctx context.Context) (fromU *sql.Selector, err error) { + if err := gq.prepareQuery(ctx); err != nil { + return nil, err + } + selector := gq.sqlQuery(ctx) + if err := selector.Err(); err != nil { + return nil, err + } + step := sqlgraph.NewStep( + sqlgraph.From(group.Table, group.FieldID, selector), + sqlgraph.To(user.Table, user.FieldID), + sqlgraph.Edge(sqlgraph.O2M, false, group.UsersTable, group.UsersColumn), + ) + fromU = sqlgraph.SetNeighbors(gq.driver.Dialect(), step) + return fromU, nil + } + return query +} + +// QueryLocations chains the current query on the "locations" edge. +func (gq *GroupQuery) QueryLocations() *LocationQuery { + query := &LocationQuery{config: gq.config} + query.path = func(ctx context.Context) (fromU *sql.Selector, err error) { + if err := gq.prepareQuery(ctx); err != nil { + return nil, err + } + selector := gq.sqlQuery(ctx) + if err := selector.Err(); err != nil { + return nil, err + } + step := sqlgraph.NewStep( + sqlgraph.From(group.Table, group.FieldID, selector), + sqlgraph.To(location.Table, location.FieldID), + sqlgraph.Edge(sqlgraph.O2M, false, group.LocationsTable, group.LocationsColumn), + ) + fromU = sqlgraph.SetNeighbors(gq.driver.Dialect(), step) + return fromU, nil + } + return query +} + +// QueryItems chains the current query on the "items" edge. +func (gq *GroupQuery) QueryItems() *ItemQuery { + query := &ItemQuery{config: gq.config} + query.path = func(ctx context.Context) (fromU *sql.Selector, err error) { + if err := gq.prepareQuery(ctx); err != nil { + return nil, err + } + selector := gq.sqlQuery(ctx) + if err := selector.Err(); err != nil { + return nil, err + } + step := sqlgraph.NewStep( + sqlgraph.From(group.Table, group.FieldID, selector), + sqlgraph.To(item.Table, item.FieldID), + sqlgraph.Edge(sqlgraph.O2M, false, group.ItemsTable, group.ItemsColumn), + ) + fromU = sqlgraph.SetNeighbors(gq.driver.Dialect(), step) + return fromU, nil + } + return query +} + +// QueryLabels chains the current query on the "labels" edge. +func (gq *GroupQuery) QueryLabels() *LabelQuery { + query := &LabelQuery{config: gq.config} + query.path = func(ctx context.Context) (fromU *sql.Selector, err error) { + if err := gq.prepareQuery(ctx); err != nil { + return nil, err + } + selector := gq.sqlQuery(ctx) + if err := selector.Err(); err != nil { + return nil, err + } + step := sqlgraph.NewStep( + sqlgraph.From(group.Table, group.FieldID, selector), + sqlgraph.To(label.Table, label.FieldID), + sqlgraph.Edge(sqlgraph.O2M, false, group.LabelsTable, group.LabelsColumn), + ) + fromU = sqlgraph.SetNeighbors(gq.driver.Dialect(), step) + return fromU, nil + } + return query +} + +// First returns the first Group entity from the query. +// Returns a *NotFoundError when no Group was found. +func (gq *GroupQuery) First(ctx context.Context) (*Group, error) { + nodes, err := gq.Limit(1).All(ctx) + if err != nil { + return nil, err + } + if len(nodes) == 0 { + return nil, &NotFoundError{group.Label} + } + return nodes[0], nil +} + +// FirstX is like First, but panics if an error occurs. +func (gq *GroupQuery) FirstX(ctx context.Context) *Group { + node, err := gq.First(ctx) + if err != nil && !IsNotFound(err) { + panic(err) + } + return node +} + +// FirstID returns the first Group ID from the query. +// Returns a *NotFoundError when no Group ID was found. +func (gq *GroupQuery) FirstID(ctx context.Context) (id uuid.UUID, err error) { + var ids []uuid.UUID + if ids, err = gq.Limit(1).IDs(ctx); err != nil { + return + } + if len(ids) == 0 { + err = &NotFoundError{group.Label} + return + } + return ids[0], nil +} + +// FirstIDX is like FirstID, but panics if an error occurs. +func (gq *GroupQuery) FirstIDX(ctx context.Context) uuid.UUID { + id, err := gq.FirstID(ctx) + if err != nil && !IsNotFound(err) { + panic(err) + } + return id +} + +// Only returns a single Group entity found by the query, ensuring it only returns one. +// Returns a *NotSingularError when more than one Group entity is found. +// Returns a *NotFoundError when no Group entities are found. +func (gq *GroupQuery) Only(ctx context.Context) (*Group, error) { + nodes, err := gq.Limit(2).All(ctx) + if err != nil { + return nil, err + } + switch len(nodes) { + case 1: + return nodes[0], nil + case 0: + return nil, &NotFoundError{group.Label} + default: + return nil, &NotSingularError{group.Label} + } +} + +// OnlyX is like Only, but panics if an error occurs. +func (gq *GroupQuery) OnlyX(ctx context.Context) *Group { + node, err := gq.Only(ctx) + if err != nil { + panic(err) + } + return node +} + +// OnlyID is like Only, but returns the only Group ID in the query. +// Returns a *NotSingularError when more than one Group ID is found. +// Returns a *NotFoundError when no entities are found. +func (gq *GroupQuery) OnlyID(ctx context.Context) (id uuid.UUID, err error) { + var ids []uuid.UUID + if ids, err = gq.Limit(2).IDs(ctx); err != nil { + return + } + switch len(ids) { + case 1: + id = ids[0] + case 0: + err = &NotFoundError{group.Label} + default: + err = &NotSingularError{group.Label} + } + return +} + +// OnlyIDX is like OnlyID, but panics if an error occurs. +func (gq *GroupQuery) OnlyIDX(ctx context.Context) uuid.UUID { + id, err := gq.OnlyID(ctx) + if err != nil { + panic(err) + } + return id +} + +// All executes the query and returns a list of Groups. +func (gq *GroupQuery) All(ctx context.Context) ([]*Group, error) { + if err := gq.prepareQuery(ctx); err != nil { + return nil, err + } + return gq.sqlAll(ctx) +} + +// AllX is like All, but panics if an error occurs. +func (gq *GroupQuery) AllX(ctx context.Context) []*Group { + nodes, err := gq.All(ctx) + if err != nil { + panic(err) + } + return nodes +} + +// IDs executes the query and returns a list of Group IDs. +func (gq *GroupQuery) IDs(ctx context.Context) ([]uuid.UUID, error) { + var ids []uuid.UUID + if err := gq.Select(group.FieldID).Scan(ctx, &ids); err != nil { + return nil, err + } + return ids, nil +} + +// IDsX is like IDs, but panics if an error occurs. +func (gq *GroupQuery) IDsX(ctx context.Context) []uuid.UUID { + ids, err := gq.IDs(ctx) + if err != nil { + panic(err) + } + return ids +} + +// Count returns the count of the given query. +func (gq *GroupQuery) Count(ctx context.Context) (int, error) { + if err := gq.prepareQuery(ctx); err != nil { + return 0, err + } + return gq.sqlCount(ctx) +} + +// CountX is like Count, but panics if an error occurs. +func (gq *GroupQuery) CountX(ctx context.Context) int { + count, err := gq.Count(ctx) + if err != nil { + panic(err) + } + return count +} + +// Exist returns true if the query has elements in the graph. +func (gq *GroupQuery) Exist(ctx context.Context) (bool, error) { + if err := gq.prepareQuery(ctx); err != nil { + return false, err + } + return gq.sqlExist(ctx) +} + +// ExistX is like Exist, but panics if an error occurs. +func (gq *GroupQuery) ExistX(ctx context.Context) bool { + exist, err := gq.Exist(ctx) + if err != nil { + panic(err) + } + return exist +} + +// Clone returns a duplicate of the GroupQuery builder, including all associated steps. It can be +// used to prepare common query builders and use them differently after the clone is made. +func (gq *GroupQuery) Clone() *GroupQuery { + if gq == nil { + return nil + } + return &GroupQuery{ + config: gq.config, + limit: gq.limit, + offset: gq.offset, + order: append([]OrderFunc{}, gq.order...), + predicates: append([]predicate.Group{}, gq.predicates...), + withUsers: gq.withUsers.Clone(), + withLocations: gq.withLocations.Clone(), + withItems: gq.withItems.Clone(), + withLabels: gq.withLabels.Clone(), + // clone intermediate query. + sql: gq.sql.Clone(), + path: gq.path, + unique: gq.unique, + } +} + +// WithUsers tells the query-builder to eager-load the nodes that are connected to +// the "users" edge. The optional arguments are used to configure the query builder of the edge. +func (gq *GroupQuery) WithUsers(opts ...func(*UserQuery)) *GroupQuery { + query := &UserQuery{config: gq.config} + for _, opt := range opts { + opt(query) + } + gq.withUsers = query + return gq +} + +// WithLocations tells the query-builder to eager-load the nodes that are connected to +// the "locations" edge. The optional arguments are used to configure the query builder of the edge. +func (gq *GroupQuery) WithLocations(opts ...func(*LocationQuery)) *GroupQuery { + query := &LocationQuery{config: gq.config} + for _, opt := range opts { + opt(query) + } + gq.withLocations = query + return gq +} + +// WithItems tells the query-builder to eager-load the nodes that are connected to +// the "items" edge. The optional arguments are used to configure the query builder of the edge. +func (gq *GroupQuery) WithItems(opts ...func(*ItemQuery)) *GroupQuery { + query := &ItemQuery{config: gq.config} + for _, opt := range opts { + opt(query) + } + gq.withItems = query + return gq +} + +// WithLabels tells the query-builder to eager-load the nodes that are connected to +// the "labels" edge. The optional arguments are used to configure the query builder of the edge. +func (gq *GroupQuery) WithLabels(opts ...func(*LabelQuery)) *GroupQuery { + query := &LabelQuery{config: gq.config} + for _, opt := range opts { + opt(query) + } + gq.withLabels = query + return gq +} + +// GroupBy is used to group vertices by one or more fields/columns. +// It is often used with aggregate functions, like: count, max, mean, min, sum. +// +// Example: +// +// var v []struct { +// CreatedAt time.Time `json:"created_at,omitempty"` +// Count int `json:"count,omitempty"` +// } +// +// client.Group.Query(). +// GroupBy(group.FieldCreatedAt). +// Aggregate(ent.Count()). +// Scan(ctx, &v) +func (gq *GroupQuery) GroupBy(field string, fields ...string) *GroupGroupBy { + grbuild := &GroupGroupBy{config: gq.config} + grbuild.fields = append([]string{field}, fields...) + grbuild.path = func(ctx context.Context) (prev *sql.Selector, err error) { + if err := gq.prepareQuery(ctx); err != nil { + return nil, err + } + return gq.sqlQuery(ctx), nil + } + grbuild.label = group.Label + grbuild.flds, grbuild.scan = &grbuild.fields, grbuild.Scan + return grbuild +} + +// Select allows the selection one or more fields/columns for the given query, +// instead of selecting all fields in the entity. +// +// Example: +// +// var v []struct { +// CreatedAt time.Time `json:"created_at,omitempty"` +// } +// +// client.Group.Query(). +// Select(group.FieldCreatedAt). +// Scan(ctx, &v) +func (gq *GroupQuery) Select(fields ...string) *GroupSelect { + gq.fields = append(gq.fields, fields...) + selbuild := &GroupSelect{GroupQuery: gq} + selbuild.label = group.Label + selbuild.flds, selbuild.scan = &gq.fields, selbuild.Scan + return selbuild +} + +func (gq *GroupQuery) prepareQuery(ctx context.Context) error { + for _, f := range gq.fields { + if !group.ValidColumn(f) { + return &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)} + } + } + if gq.path != nil { + prev, err := gq.path(ctx) + if err != nil { + return err + } + gq.sql = prev + } + return nil +} + +func (gq *GroupQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*Group, error) { + var ( + nodes = []*Group{} + _spec = gq.querySpec() + loadedTypes = [4]bool{ + gq.withUsers != nil, + gq.withLocations != nil, + gq.withItems != nil, + gq.withLabels != nil, + } + ) + _spec.ScanValues = func(columns []string) ([]interface{}, error) { + return (*Group).scanValues(nil, columns) + } + _spec.Assign = func(columns []string, values []interface{}) error { + node := &Group{config: gq.config} + nodes = append(nodes, node) + node.Edges.loadedTypes = loadedTypes + return node.assignValues(columns, values) + } + for i := range hooks { + hooks[i](ctx, _spec) + } + if err := sqlgraph.QueryNodes(ctx, gq.driver, _spec); err != nil { + return nil, err + } + if len(nodes) == 0 { + return nodes, nil + } + if query := gq.withUsers; query != nil { + if err := gq.loadUsers(ctx, query, nodes, + func(n *Group) { n.Edges.Users = []*User{} }, + func(n *Group, e *User) { n.Edges.Users = append(n.Edges.Users, e) }); err != nil { + return nil, err + } + } + if query := gq.withLocations; query != nil { + if err := gq.loadLocations(ctx, query, nodes, + func(n *Group) { n.Edges.Locations = []*Location{} }, + func(n *Group, e *Location) { n.Edges.Locations = append(n.Edges.Locations, e) }); err != nil { + return nil, err + } + } + if query := gq.withItems; query != nil { + if err := gq.loadItems(ctx, query, nodes, + func(n *Group) { n.Edges.Items = []*Item{} }, + func(n *Group, e *Item) { n.Edges.Items = append(n.Edges.Items, e) }); err != nil { + return nil, err + } + } + if query := gq.withLabels; query != nil { + if err := gq.loadLabels(ctx, query, nodes, + func(n *Group) { n.Edges.Labels = []*Label{} }, + func(n *Group, e *Label) { n.Edges.Labels = append(n.Edges.Labels, e) }); err != nil { + return nil, err + } + } + return nodes, nil +} + +func (gq *GroupQuery) loadUsers(ctx context.Context, query *UserQuery, nodes []*Group, init func(*Group), assign func(*Group, *User)) error { + fks := make([]driver.Value, 0, len(nodes)) + nodeids := make(map[uuid.UUID]*Group) + for i := range nodes { + fks = append(fks, nodes[i].ID) + nodeids[nodes[i].ID] = nodes[i] + if init != nil { + init(nodes[i]) + } + } + query.withFKs = true + query.Where(predicate.User(func(s *sql.Selector) { + s.Where(sql.InValues(group.UsersColumn, fks...)) + })) + neighbors, err := query.All(ctx) + if err != nil { + return err + } + for _, n := range neighbors { + fk := n.group_users + if fk == nil { + return fmt.Errorf(`foreign-key "group_users" is nil for node %v`, n.ID) + } + node, ok := nodeids[*fk] + if !ok { + return fmt.Errorf(`unexpected foreign-key "group_users" returned %v for node %v`, *fk, n.ID) + } + assign(node, n) + } + return nil +} +func (gq *GroupQuery) loadLocations(ctx context.Context, query *LocationQuery, nodes []*Group, init func(*Group), assign func(*Group, *Location)) error { + fks := make([]driver.Value, 0, len(nodes)) + nodeids := make(map[uuid.UUID]*Group) + for i := range nodes { + fks = append(fks, nodes[i].ID) + nodeids[nodes[i].ID] = nodes[i] + if init != nil { + init(nodes[i]) + } + } + query.withFKs = true + query.Where(predicate.Location(func(s *sql.Selector) { + s.Where(sql.InValues(group.LocationsColumn, fks...)) + })) + neighbors, err := query.All(ctx) + if err != nil { + return err + } + for _, n := range neighbors { + fk := n.group_locations + if fk == nil { + return fmt.Errorf(`foreign-key "group_locations" is nil for node %v`, n.ID) + } + node, ok := nodeids[*fk] + if !ok { + return fmt.Errorf(`unexpected foreign-key "group_locations" returned %v for node %v`, *fk, n.ID) + } + assign(node, n) + } + return nil +} +func (gq *GroupQuery) loadItems(ctx context.Context, query *ItemQuery, nodes []*Group, init func(*Group), assign func(*Group, *Item)) error { + fks := make([]driver.Value, 0, len(nodes)) + nodeids := make(map[uuid.UUID]*Group) + for i := range nodes { + fks = append(fks, nodes[i].ID) + nodeids[nodes[i].ID] = nodes[i] + if init != nil { + init(nodes[i]) + } + } + query.withFKs = true + query.Where(predicate.Item(func(s *sql.Selector) { + s.Where(sql.InValues(group.ItemsColumn, fks...)) + })) + neighbors, err := query.All(ctx) + if err != nil { + return err + } + for _, n := range neighbors { + fk := n.group_items + if fk == nil { + return fmt.Errorf(`foreign-key "group_items" is nil for node %v`, n.ID) + } + node, ok := nodeids[*fk] + if !ok { + return fmt.Errorf(`unexpected foreign-key "group_items" returned %v for node %v`, *fk, n.ID) + } + assign(node, n) + } + return nil +} +func (gq *GroupQuery) loadLabels(ctx context.Context, query *LabelQuery, nodes []*Group, init func(*Group), assign func(*Group, *Label)) error { + fks := make([]driver.Value, 0, len(nodes)) + nodeids := make(map[uuid.UUID]*Group) + for i := range nodes { + fks = append(fks, nodes[i].ID) + nodeids[nodes[i].ID] = nodes[i] + if init != nil { + init(nodes[i]) + } + } + query.withFKs = true + query.Where(predicate.Label(func(s *sql.Selector) { + s.Where(sql.InValues(group.LabelsColumn, fks...)) + })) + neighbors, err := query.All(ctx) + if err != nil { + return err + } + for _, n := range neighbors { + fk := n.group_labels + if fk == nil { + return fmt.Errorf(`foreign-key "group_labels" is nil for node %v`, n.ID) + } + node, ok := nodeids[*fk] + if !ok { + return fmt.Errorf(`unexpected foreign-key "group_labels" returned %v for node %v`, *fk, n.ID) + } + assign(node, n) + } + return nil +} + +func (gq *GroupQuery) sqlCount(ctx context.Context) (int, error) { + _spec := gq.querySpec() + _spec.Node.Columns = gq.fields + if len(gq.fields) > 0 { + _spec.Unique = gq.unique != nil && *gq.unique + } + return sqlgraph.CountNodes(ctx, gq.driver, _spec) +} + +func (gq *GroupQuery) sqlExist(ctx context.Context) (bool, error) { + n, err := gq.sqlCount(ctx) + if err != nil { + return false, fmt.Errorf("ent: check existence: %w", err) + } + return n > 0, nil +} + +func (gq *GroupQuery) querySpec() *sqlgraph.QuerySpec { + _spec := &sqlgraph.QuerySpec{ + Node: &sqlgraph.NodeSpec{ + Table: group.Table, + Columns: group.Columns, + ID: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: group.FieldID, + }, + }, + From: gq.sql, + Unique: true, + } + if unique := gq.unique; unique != nil { + _spec.Unique = *unique + } + if fields := gq.fields; len(fields) > 0 { + _spec.Node.Columns = make([]string, 0, len(fields)) + _spec.Node.Columns = append(_spec.Node.Columns, group.FieldID) + for i := range fields { + if fields[i] != group.FieldID { + _spec.Node.Columns = append(_spec.Node.Columns, fields[i]) + } + } + } + if ps := gq.predicates; len(ps) > 0 { + _spec.Predicate = func(selector *sql.Selector) { + for i := range ps { + ps[i](selector) + } + } + } + if limit := gq.limit; limit != nil { + _spec.Limit = *limit + } + if offset := gq.offset; offset != nil { + _spec.Offset = *offset + } + if ps := gq.order; len(ps) > 0 { + _spec.Order = func(selector *sql.Selector) { + for i := range ps { + ps[i](selector) + } + } + } + return _spec +} + +func (gq *GroupQuery) sqlQuery(ctx context.Context) *sql.Selector { + builder := sql.Dialect(gq.driver.Dialect()) + t1 := builder.Table(group.Table) + columns := gq.fields + if len(columns) == 0 { + columns = group.Columns + } + selector := builder.Select(t1.Columns(columns...)...).From(t1) + if gq.sql != nil { + selector = gq.sql + selector.Select(selector.Columns(columns...)...) + } + if gq.unique != nil && *gq.unique { + selector.Distinct() + } + for _, p := range gq.predicates { + p(selector) + } + for _, p := range gq.order { + p(selector) + } + if offset := gq.offset; offset != nil { + // limit is mandatory for offset clause. We start + // with default value, and override it below if needed. + selector.Offset(*offset).Limit(math.MaxInt32) + } + if limit := gq.limit; limit != nil { + selector.Limit(*limit) + } + return selector +} + +// GroupGroupBy is the group-by builder for Group entities. +type GroupGroupBy struct { + config + selector + fields []string + fns []AggregateFunc + // intermediate query (i.e. traversal path). + sql *sql.Selector + path func(context.Context) (*sql.Selector, error) +} + +// Aggregate adds the given aggregation functions to the group-by query. +func (ggb *GroupGroupBy) Aggregate(fns ...AggregateFunc) *GroupGroupBy { + ggb.fns = append(ggb.fns, fns...) + return ggb +} + +// Scan applies the group-by query and scans the result into the given value. +func (ggb *GroupGroupBy) Scan(ctx context.Context, v interface{}) error { + query, err := ggb.path(ctx) + if err != nil { + return err + } + ggb.sql = query + return ggb.sqlScan(ctx, v) +} + +func (ggb *GroupGroupBy) sqlScan(ctx context.Context, v interface{}) error { + for _, f := range ggb.fields { + if !group.ValidColumn(f) { + return &ValidationError{Name: f, err: fmt.Errorf("invalid field %q for group-by", f)} + } + } + selector := ggb.sqlQuery() + if err := selector.Err(); err != nil { + return err + } + rows := &sql.Rows{} + query, args := selector.Query() + if err := ggb.driver.Query(ctx, query, args, rows); err != nil { + return err + } + defer rows.Close() + return sql.ScanSlice(rows, v) +} + +func (ggb *GroupGroupBy) sqlQuery() *sql.Selector { + selector := ggb.sql.Select() + aggregation := make([]string, 0, len(ggb.fns)) + for _, fn := range ggb.fns { + aggregation = append(aggregation, fn(selector)) + } + // If no columns were selected in a custom aggregation function, the default + // selection is the fields used for "group-by", and the aggregation functions. + if len(selector.SelectedColumns()) == 0 { + columns := make([]string, 0, len(ggb.fields)+len(ggb.fns)) + for _, f := range ggb.fields { + columns = append(columns, selector.C(f)) + } + columns = append(columns, aggregation...) + selector.Select(columns...) + } + return selector.GroupBy(selector.Columns(ggb.fields...)...) +} + +// GroupSelect is the builder for selecting fields of Group entities. +type GroupSelect struct { + *GroupQuery + selector + // intermediate query (i.e. traversal path). + sql *sql.Selector +} + +// Scan applies the selector query and scans the result into the given value. +func (gs *GroupSelect) Scan(ctx context.Context, v interface{}) error { + if err := gs.prepareQuery(ctx); err != nil { + return err + } + gs.sql = gs.GroupQuery.sqlQuery(ctx) + return gs.sqlScan(ctx, v) +} + +func (gs *GroupSelect) sqlScan(ctx context.Context, v interface{}) error { + rows := &sql.Rows{} + query, args := gs.sql.Query() + if err := gs.driver.Query(ctx, query, args, rows); err != nil { + return err + } + defer rows.Close() + return sql.ScanSlice(rows, v) +} diff --git a/backend/ent/group_update.go b/backend/ent/group_update.go new file mode 100644 index 0000000..4fa63a8 --- /dev/null +++ b/backend/ent/group_update.go @@ -0,0 +1,1125 @@ +// Code generated by ent, DO NOT EDIT. + +package ent + +import ( + "context" + "errors" + "fmt" + "time" + + "entgo.io/ent/dialect/sql" + "entgo.io/ent/dialect/sql/sqlgraph" + "entgo.io/ent/schema/field" + "github.com/google/uuid" + "github.com/hay-kot/content/backend/ent/group" + "github.com/hay-kot/content/backend/ent/item" + "github.com/hay-kot/content/backend/ent/label" + "github.com/hay-kot/content/backend/ent/location" + "github.com/hay-kot/content/backend/ent/predicate" + "github.com/hay-kot/content/backend/ent/user" +) + +// GroupUpdate is the builder for updating Group entities. +type GroupUpdate struct { + config + hooks []Hook + mutation *GroupMutation +} + +// Where appends a list predicates to the GroupUpdate builder. +func (gu *GroupUpdate) Where(ps ...predicate.Group) *GroupUpdate { + gu.mutation.Where(ps...) + return gu +} + +// SetUpdatedAt sets the "updated_at" field. +func (gu *GroupUpdate) SetUpdatedAt(t time.Time) *GroupUpdate { + gu.mutation.SetUpdatedAt(t) + return gu +} + +// SetName sets the "name" field. +func (gu *GroupUpdate) SetName(s string) *GroupUpdate { + gu.mutation.SetName(s) + return gu +} + +// SetCurrency sets the "currency" field. +func (gu *GroupUpdate) SetCurrency(gr group.Currency) *GroupUpdate { + gu.mutation.SetCurrency(gr) + return gu +} + +// SetNillableCurrency sets the "currency" field if the given value is not nil. +func (gu *GroupUpdate) SetNillableCurrency(gr *group.Currency) *GroupUpdate { + if gr != nil { + gu.SetCurrency(*gr) + } + return gu +} + +// AddUserIDs adds the "users" edge to the User entity by IDs. +func (gu *GroupUpdate) AddUserIDs(ids ...uuid.UUID) *GroupUpdate { + gu.mutation.AddUserIDs(ids...) + return gu +} + +// AddUsers adds the "users" edges to the User entity. +func (gu *GroupUpdate) AddUsers(u ...*User) *GroupUpdate { + ids := make([]uuid.UUID, len(u)) + for i := range u { + ids[i] = u[i].ID + } + return gu.AddUserIDs(ids...) +} + +// AddLocationIDs adds the "locations" edge to the Location entity by IDs. +func (gu *GroupUpdate) AddLocationIDs(ids ...uuid.UUID) *GroupUpdate { + gu.mutation.AddLocationIDs(ids...) + return gu +} + +// AddLocations adds the "locations" edges to the Location entity. +func (gu *GroupUpdate) AddLocations(l ...*Location) *GroupUpdate { + ids := make([]uuid.UUID, len(l)) + for i := range l { + ids[i] = l[i].ID + } + return gu.AddLocationIDs(ids...) +} + +// AddItemIDs adds the "items" edge to the Item entity by IDs. +func (gu *GroupUpdate) AddItemIDs(ids ...uuid.UUID) *GroupUpdate { + gu.mutation.AddItemIDs(ids...) + return gu +} + +// AddItems adds the "items" edges to the Item entity. +func (gu *GroupUpdate) AddItems(i ...*Item) *GroupUpdate { + ids := make([]uuid.UUID, len(i)) + for j := range i { + ids[j] = i[j].ID + } + return gu.AddItemIDs(ids...) +} + +// AddLabelIDs adds the "labels" edge to the Label entity by IDs. +func (gu *GroupUpdate) AddLabelIDs(ids ...uuid.UUID) *GroupUpdate { + gu.mutation.AddLabelIDs(ids...) + return gu +} + +// AddLabels adds the "labels" edges to the Label entity. +func (gu *GroupUpdate) AddLabels(l ...*Label) *GroupUpdate { + ids := make([]uuid.UUID, len(l)) + for i := range l { + ids[i] = l[i].ID + } + return gu.AddLabelIDs(ids...) +} + +// Mutation returns the GroupMutation object of the builder. +func (gu *GroupUpdate) Mutation() *GroupMutation { + return gu.mutation +} + +// ClearUsers clears all "users" edges to the User entity. +func (gu *GroupUpdate) ClearUsers() *GroupUpdate { + gu.mutation.ClearUsers() + return gu +} + +// RemoveUserIDs removes the "users" edge to User entities by IDs. +func (gu *GroupUpdate) RemoveUserIDs(ids ...uuid.UUID) *GroupUpdate { + gu.mutation.RemoveUserIDs(ids...) + return gu +} + +// RemoveUsers removes "users" edges to User entities. +func (gu *GroupUpdate) RemoveUsers(u ...*User) *GroupUpdate { + ids := make([]uuid.UUID, len(u)) + for i := range u { + ids[i] = u[i].ID + } + return gu.RemoveUserIDs(ids...) +} + +// ClearLocations clears all "locations" edges to the Location entity. +func (gu *GroupUpdate) ClearLocations() *GroupUpdate { + gu.mutation.ClearLocations() + return gu +} + +// RemoveLocationIDs removes the "locations" edge to Location entities by IDs. +func (gu *GroupUpdate) RemoveLocationIDs(ids ...uuid.UUID) *GroupUpdate { + gu.mutation.RemoveLocationIDs(ids...) + return gu +} + +// RemoveLocations removes "locations" edges to Location entities. +func (gu *GroupUpdate) RemoveLocations(l ...*Location) *GroupUpdate { + ids := make([]uuid.UUID, len(l)) + for i := range l { + ids[i] = l[i].ID + } + return gu.RemoveLocationIDs(ids...) +} + +// ClearItems clears all "items" edges to the Item entity. +func (gu *GroupUpdate) ClearItems() *GroupUpdate { + gu.mutation.ClearItems() + return gu +} + +// RemoveItemIDs removes the "items" edge to Item entities by IDs. +func (gu *GroupUpdate) RemoveItemIDs(ids ...uuid.UUID) *GroupUpdate { + gu.mutation.RemoveItemIDs(ids...) + return gu +} + +// RemoveItems removes "items" edges to Item entities. +func (gu *GroupUpdate) RemoveItems(i ...*Item) *GroupUpdate { + ids := make([]uuid.UUID, len(i)) + for j := range i { + ids[j] = i[j].ID + } + return gu.RemoveItemIDs(ids...) +} + +// ClearLabels clears all "labels" edges to the Label entity. +func (gu *GroupUpdate) ClearLabels() *GroupUpdate { + gu.mutation.ClearLabels() + return gu +} + +// RemoveLabelIDs removes the "labels" edge to Label entities by IDs. +func (gu *GroupUpdate) RemoveLabelIDs(ids ...uuid.UUID) *GroupUpdate { + gu.mutation.RemoveLabelIDs(ids...) + return gu +} + +// RemoveLabels removes "labels" edges to Label entities. +func (gu *GroupUpdate) RemoveLabels(l ...*Label) *GroupUpdate { + ids := make([]uuid.UUID, len(l)) + for i := range l { + ids[i] = l[i].ID + } + return gu.RemoveLabelIDs(ids...) +} + +// Save executes the query and returns the number of nodes affected by the update operation. +func (gu *GroupUpdate) Save(ctx context.Context) (int, error) { + var ( + err error + affected int + ) + gu.defaults() + if len(gu.hooks) == 0 { + if err = gu.check(); err != nil { + return 0, err + } + affected, err = gu.sqlSave(ctx) + } else { + var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) { + mutation, ok := m.(*GroupMutation) + if !ok { + return nil, fmt.Errorf("unexpected mutation type %T", m) + } + if err = gu.check(); err != nil { + return 0, err + } + gu.mutation = mutation + affected, err = gu.sqlSave(ctx) + mutation.done = true + return affected, err + }) + for i := len(gu.hooks) - 1; i >= 0; i-- { + if gu.hooks[i] == nil { + return 0, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)") + } + mut = gu.hooks[i](mut) + } + if _, err := mut.Mutate(ctx, gu.mutation); err != nil { + return 0, err + } + } + return affected, err +} + +// SaveX is like Save, but panics if an error occurs. +func (gu *GroupUpdate) SaveX(ctx context.Context) int { + affected, err := gu.Save(ctx) + if err != nil { + panic(err) + } + return affected +} + +// Exec executes the query. +func (gu *GroupUpdate) Exec(ctx context.Context) error { + _, err := gu.Save(ctx) + return err +} + +// ExecX is like Exec, but panics if an error occurs. +func (gu *GroupUpdate) ExecX(ctx context.Context) { + if err := gu.Exec(ctx); err != nil { + panic(err) + } +} + +// defaults sets the default values of the builder before save. +func (gu *GroupUpdate) defaults() { + if _, ok := gu.mutation.UpdatedAt(); !ok { + v := group.UpdateDefaultUpdatedAt() + gu.mutation.SetUpdatedAt(v) + } +} + +// check runs all checks and user-defined validators on the builder. +func (gu *GroupUpdate) check() error { + if v, ok := gu.mutation.Name(); ok { + if err := group.NameValidator(v); err != nil { + return &ValidationError{Name: "name", err: fmt.Errorf(`ent: validator failed for field "Group.name": %w`, err)} + } + } + if v, ok := gu.mutation.Currency(); ok { + if err := group.CurrencyValidator(v); err != nil { + return &ValidationError{Name: "currency", err: fmt.Errorf(`ent: validator failed for field "Group.currency": %w`, err)} + } + } + return nil +} + +func (gu *GroupUpdate) sqlSave(ctx context.Context) (n int, err error) { + _spec := &sqlgraph.UpdateSpec{ + Node: &sqlgraph.NodeSpec{ + Table: group.Table, + Columns: group.Columns, + ID: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: group.FieldID, + }, + }, + } + if ps := gu.mutation.predicates; len(ps) > 0 { + _spec.Predicate = func(selector *sql.Selector) { + for i := range ps { + ps[i](selector) + } + } + } + if value, ok := gu.mutation.UpdatedAt(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeTime, + Value: value, + Column: group.FieldUpdatedAt, + }) + } + if value, ok := gu.mutation.Name(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: group.FieldName, + }) + } + if value, ok := gu.mutation.Currency(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeEnum, + Value: value, + Column: group.FieldCurrency, + }) + } + if gu.mutation.UsersCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: group.UsersTable, + Columns: []string{group.UsersColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: user.FieldID, + }, + }, + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := gu.mutation.RemovedUsersIDs(); len(nodes) > 0 && !gu.mutation.UsersCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: group.UsersTable, + Columns: []string{group.UsersColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: user.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := gu.mutation.UsersIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: group.UsersTable, + Columns: []string{group.UsersColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: user.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Add = append(_spec.Edges.Add, edge) + } + if gu.mutation.LocationsCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: group.LocationsTable, + Columns: []string{group.LocationsColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: location.FieldID, + }, + }, + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := gu.mutation.RemovedLocationsIDs(); len(nodes) > 0 && !gu.mutation.LocationsCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: group.LocationsTable, + Columns: []string{group.LocationsColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: location.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := gu.mutation.LocationsIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: group.LocationsTable, + Columns: []string{group.LocationsColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: location.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Add = append(_spec.Edges.Add, edge) + } + if gu.mutation.ItemsCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: group.ItemsTable, + Columns: []string{group.ItemsColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: item.FieldID, + }, + }, + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := gu.mutation.RemovedItemsIDs(); len(nodes) > 0 && !gu.mutation.ItemsCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: group.ItemsTable, + Columns: []string{group.ItemsColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: item.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := gu.mutation.ItemsIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: group.ItemsTable, + Columns: []string{group.ItemsColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: item.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Add = append(_spec.Edges.Add, edge) + } + if gu.mutation.LabelsCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: group.LabelsTable, + Columns: []string{group.LabelsColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: label.FieldID, + }, + }, + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := gu.mutation.RemovedLabelsIDs(); len(nodes) > 0 && !gu.mutation.LabelsCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: group.LabelsTable, + Columns: []string{group.LabelsColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: label.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := gu.mutation.LabelsIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: group.LabelsTable, + Columns: []string{group.LabelsColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: label.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Add = append(_spec.Edges.Add, edge) + } + if n, err = sqlgraph.UpdateNodes(ctx, gu.driver, _spec); err != nil { + if _, ok := err.(*sqlgraph.NotFoundError); ok { + err = &NotFoundError{group.Label} + } else if sqlgraph.IsConstraintError(err) { + err = &ConstraintError{msg: err.Error(), wrap: err} + } + return 0, err + } + return n, nil +} + +// GroupUpdateOne is the builder for updating a single Group entity. +type GroupUpdateOne struct { + config + fields []string + hooks []Hook + mutation *GroupMutation +} + +// SetUpdatedAt sets the "updated_at" field. +func (guo *GroupUpdateOne) SetUpdatedAt(t time.Time) *GroupUpdateOne { + guo.mutation.SetUpdatedAt(t) + return guo +} + +// SetName sets the "name" field. +func (guo *GroupUpdateOne) SetName(s string) *GroupUpdateOne { + guo.mutation.SetName(s) + return guo +} + +// SetCurrency sets the "currency" field. +func (guo *GroupUpdateOne) SetCurrency(gr group.Currency) *GroupUpdateOne { + guo.mutation.SetCurrency(gr) + return guo +} + +// SetNillableCurrency sets the "currency" field if the given value is not nil. +func (guo *GroupUpdateOne) SetNillableCurrency(gr *group.Currency) *GroupUpdateOne { + if gr != nil { + guo.SetCurrency(*gr) + } + return guo +} + +// AddUserIDs adds the "users" edge to the User entity by IDs. +func (guo *GroupUpdateOne) AddUserIDs(ids ...uuid.UUID) *GroupUpdateOne { + guo.mutation.AddUserIDs(ids...) + return guo +} + +// AddUsers adds the "users" edges to the User entity. +func (guo *GroupUpdateOne) AddUsers(u ...*User) *GroupUpdateOne { + ids := make([]uuid.UUID, len(u)) + for i := range u { + ids[i] = u[i].ID + } + return guo.AddUserIDs(ids...) +} + +// AddLocationIDs adds the "locations" edge to the Location entity by IDs. +func (guo *GroupUpdateOne) AddLocationIDs(ids ...uuid.UUID) *GroupUpdateOne { + guo.mutation.AddLocationIDs(ids...) + return guo +} + +// AddLocations adds the "locations" edges to the Location entity. +func (guo *GroupUpdateOne) AddLocations(l ...*Location) *GroupUpdateOne { + ids := make([]uuid.UUID, len(l)) + for i := range l { + ids[i] = l[i].ID + } + return guo.AddLocationIDs(ids...) +} + +// AddItemIDs adds the "items" edge to the Item entity by IDs. +func (guo *GroupUpdateOne) AddItemIDs(ids ...uuid.UUID) *GroupUpdateOne { + guo.mutation.AddItemIDs(ids...) + return guo +} + +// AddItems adds the "items" edges to the Item entity. +func (guo *GroupUpdateOne) AddItems(i ...*Item) *GroupUpdateOne { + ids := make([]uuid.UUID, len(i)) + for j := range i { + ids[j] = i[j].ID + } + return guo.AddItemIDs(ids...) +} + +// AddLabelIDs adds the "labels" edge to the Label entity by IDs. +func (guo *GroupUpdateOne) AddLabelIDs(ids ...uuid.UUID) *GroupUpdateOne { + guo.mutation.AddLabelIDs(ids...) + return guo +} + +// AddLabels adds the "labels" edges to the Label entity. +func (guo *GroupUpdateOne) AddLabels(l ...*Label) *GroupUpdateOne { + ids := make([]uuid.UUID, len(l)) + for i := range l { + ids[i] = l[i].ID + } + return guo.AddLabelIDs(ids...) +} + +// Mutation returns the GroupMutation object of the builder. +func (guo *GroupUpdateOne) Mutation() *GroupMutation { + return guo.mutation +} + +// ClearUsers clears all "users" edges to the User entity. +func (guo *GroupUpdateOne) ClearUsers() *GroupUpdateOne { + guo.mutation.ClearUsers() + return guo +} + +// RemoveUserIDs removes the "users" edge to User entities by IDs. +func (guo *GroupUpdateOne) RemoveUserIDs(ids ...uuid.UUID) *GroupUpdateOne { + guo.mutation.RemoveUserIDs(ids...) + return guo +} + +// RemoveUsers removes "users" edges to User entities. +func (guo *GroupUpdateOne) RemoveUsers(u ...*User) *GroupUpdateOne { + ids := make([]uuid.UUID, len(u)) + for i := range u { + ids[i] = u[i].ID + } + return guo.RemoveUserIDs(ids...) +} + +// ClearLocations clears all "locations" edges to the Location entity. +func (guo *GroupUpdateOne) ClearLocations() *GroupUpdateOne { + guo.mutation.ClearLocations() + return guo +} + +// RemoveLocationIDs removes the "locations" edge to Location entities by IDs. +func (guo *GroupUpdateOne) RemoveLocationIDs(ids ...uuid.UUID) *GroupUpdateOne { + guo.mutation.RemoveLocationIDs(ids...) + return guo +} + +// RemoveLocations removes "locations" edges to Location entities. +func (guo *GroupUpdateOne) RemoveLocations(l ...*Location) *GroupUpdateOne { + ids := make([]uuid.UUID, len(l)) + for i := range l { + ids[i] = l[i].ID + } + return guo.RemoveLocationIDs(ids...) +} + +// ClearItems clears all "items" edges to the Item entity. +func (guo *GroupUpdateOne) ClearItems() *GroupUpdateOne { + guo.mutation.ClearItems() + return guo +} + +// RemoveItemIDs removes the "items" edge to Item entities by IDs. +func (guo *GroupUpdateOne) RemoveItemIDs(ids ...uuid.UUID) *GroupUpdateOne { + guo.mutation.RemoveItemIDs(ids...) + return guo +} + +// RemoveItems removes "items" edges to Item entities. +func (guo *GroupUpdateOne) RemoveItems(i ...*Item) *GroupUpdateOne { + ids := make([]uuid.UUID, len(i)) + for j := range i { + ids[j] = i[j].ID + } + return guo.RemoveItemIDs(ids...) +} + +// ClearLabels clears all "labels" edges to the Label entity. +func (guo *GroupUpdateOne) ClearLabels() *GroupUpdateOne { + guo.mutation.ClearLabels() + return guo +} + +// RemoveLabelIDs removes the "labels" edge to Label entities by IDs. +func (guo *GroupUpdateOne) RemoveLabelIDs(ids ...uuid.UUID) *GroupUpdateOne { + guo.mutation.RemoveLabelIDs(ids...) + return guo +} + +// RemoveLabels removes "labels" edges to Label entities. +func (guo *GroupUpdateOne) RemoveLabels(l ...*Label) *GroupUpdateOne { + ids := make([]uuid.UUID, len(l)) + for i := range l { + ids[i] = l[i].ID + } + return guo.RemoveLabelIDs(ids...) +} + +// Select allows selecting one or more fields (columns) of the returned entity. +// The default is selecting all fields defined in the entity schema. +func (guo *GroupUpdateOne) Select(field string, fields ...string) *GroupUpdateOne { + guo.fields = append([]string{field}, fields...) + return guo +} + +// Save executes the query and returns the updated Group entity. +func (guo *GroupUpdateOne) Save(ctx context.Context) (*Group, error) { + var ( + err error + node *Group + ) + guo.defaults() + if len(guo.hooks) == 0 { + if err = guo.check(); err != nil { + return nil, err + } + node, err = guo.sqlSave(ctx) + } else { + var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) { + mutation, ok := m.(*GroupMutation) + if !ok { + return nil, fmt.Errorf("unexpected mutation type %T", m) + } + if err = guo.check(); err != nil { + return nil, err + } + guo.mutation = mutation + node, err = guo.sqlSave(ctx) + mutation.done = true + return node, err + }) + for i := len(guo.hooks) - 1; i >= 0; i-- { + if guo.hooks[i] == nil { + return nil, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)") + } + mut = guo.hooks[i](mut) + } + v, err := mut.Mutate(ctx, guo.mutation) + if err != nil { + return nil, err + } + nv, ok := v.(*Group) + if !ok { + return nil, fmt.Errorf("unexpected node type %T returned from GroupMutation", v) + } + node = nv + } + return node, err +} + +// SaveX is like Save, but panics if an error occurs. +func (guo *GroupUpdateOne) SaveX(ctx context.Context) *Group { + node, err := guo.Save(ctx) + if err != nil { + panic(err) + } + return node +} + +// Exec executes the query on the entity. +func (guo *GroupUpdateOne) Exec(ctx context.Context) error { + _, err := guo.Save(ctx) + return err +} + +// ExecX is like Exec, but panics if an error occurs. +func (guo *GroupUpdateOne) ExecX(ctx context.Context) { + if err := guo.Exec(ctx); err != nil { + panic(err) + } +} + +// defaults sets the default values of the builder before save. +func (guo *GroupUpdateOne) defaults() { + if _, ok := guo.mutation.UpdatedAt(); !ok { + v := group.UpdateDefaultUpdatedAt() + guo.mutation.SetUpdatedAt(v) + } +} + +// check runs all checks and user-defined validators on the builder. +func (guo *GroupUpdateOne) check() error { + if v, ok := guo.mutation.Name(); ok { + if err := group.NameValidator(v); err != nil { + return &ValidationError{Name: "name", err: fmt.Errorf(`ent: validator failed for field "Group.name": %w`, err)} + } + } + if v, ok := guo.mutation.Currency(); ok { + if err := group.CurrencyValidator(v); err != nil { + return &ValidationError{Name: "currency", err: fmt.Errorf(`ent: validator failed for field "Group.currency": %w`, err)} + } + } + return nil +} + +func (guo *GroupUpdateOne) sqlSave(ctx context.Context) (_node *Group, err error) { + _spec := &sqlgraph.UpdateSpec{ + Node: &sqlgraph.NodeSpec{ + Table: group.Table, + Columns: group.Columns, + ID: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: group.FieldID, + }, + }, + } + id, ok := guo.mutation.ID() + if !ok { + return nil, &ValidationError{Name: "id", err: errors.New(`ent: missing "Group.id" for update`)} + } + _spec.Node.ID.Value = id + if fields := guo.fields; len(fields) > 0 { + _spec.Node.Columns = make([]string, 0, len(fields)) + _spec.Node.Columns = append(_spec.Node.Columns, group.FieldID) + for _, f := range fields { + if !group.ValidColumn(f) { + return nil, &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)} + } + if f != group.FieldID { + _spec.Node.Columns = append(_spec.Node.Columns, f) + } + } + } + if ps := guo.mutation.predicates; len(ps) > 0 { + _spec.Predicate = func(selector *sql.Selector) { + for i := range ps { + ps[i](selector) + } + } + } + if value, ok := guo.mutation.UpdatedAt(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeTime, + Value: value, + Column: group.FieldUpdatedAt, + }) + } + if value, ok := guo.mutation.Name(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: group.FieldName, + }) + } + if value, ok := guo.mutation.Currency(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeEnum, + Value: value, + Column: group.FieldCurrency, + }) + } + if guo.mutation.UsersCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: group.UsersTable, + Columns: []string{group.UsersColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: user.FieldID, + }, + }, + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := guo.mutation.RemovedUsersIDs(); len(nodes) > 0 && !guo.mutation.UsersCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: group.UsersTable, + Columns: []string{group.UsersColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: user.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := guo.mutation.UsersIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: group.UsersTable, + Columns: []string{group.UsersColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: user.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Add = append(_spec.Edges.Add, edge) + } + if guo.mutation.LocationsCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: group.LocationsTable, + Columns: []string{group.LocationsColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: location.FieldID, + }, + }, + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := guo.mutation.RemovedLocationsIDs(); len(nodes) > 0 && !guo.mutation.LocationsCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: group.LocationsTable, + Columns: []string{group.LocationsColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: location.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := guo.mutation.LocationsIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: group.LocationsTable, + Columns: []string{group.LocationsColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: location.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Add = append(_spec.Edges.Add, edge) + } + if guo.mutation.ItemsCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: group.ItemsTable, + Columns: []string{group.ItemsColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: item.FieldID, + }, + }, + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := guo.mutation.RemovedItemsIDs(); len(nodes) > 0 && !guo.mutation.ItemsCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: group.ItemsTable, + Columns: []string{group.ItemsColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: item.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := guo.mutation.ItemsIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: group.ItemsTable, + Columns: []string{group.ItemsColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: item.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Add = append(_spec.Edges.Add, edge) + } + if guo.mutation.LabelsCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: group.LabelsTable, + Columns: []string{group.LabelsColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: label.FieldID, + }, + }, + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := guo.mutation.RemovedLabelsIDs(); len(nodes) > 0 && !guo.mutation.LabelsCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: group.LabelsTable, + Columns: []string{group.LabelsColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: label.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := guo.mutation.LabelsIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: group.LabelsTable, + Columns: []string{group.LabelsColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: label.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Add = append(_spec.Edges.Add, edge) + } + _node = &Group{config: guo.config} + _spec.Assign = _node.assignValues + _spec.ScanValues = _node.scanValues + if err = sqlgraph.UpdateNode(ctx, guo.driver, _spec); err != nil { + if _, ok := err.(*sqlgraph.NotFoundError); ok { + err = &NotFoundError{group.Label} + } else if sqlgraph.IsConstraintError(err) { + err = &ConstraintError{msg: err.Error(), wrap: err} + } + return nil, err + } + return _node, nil +} diff --git a/backend/ent/hook/hook.go b/backend/ent/hook/hook.go index 362c3cf..6bffd21 100644 --- a/backend/ent/hook/hook.go +++ b/backend/ent/hook/hook.go @@ -1,4 +1,4 @@ -// Code generated by entc, DO NOT EDIT. +// Code generated by ent, DO NOT EDIT. package hook @@ -22,6 +22,71 @@ func (f AuthTokensFunc) Mutate(ctx context.Context, m ent.Mutation) (ent.Value, return f(ctx, mv) } +// The GroupFunc type is an adapter to allow the use of ordinary +// function as Group mutator. +type GroupFunc func(context.Context, *ent.GroupMutation) (ent.Value, error) + +// Mutate calls f(ctx, m). +func (f GroupFunc) Mutate(ctx context.Context, m ent.Mutation) (ent.Value, error) { + mv, ok := m.(*ent.GroupMutation) + if !ok { + return nil, fmt.Errorf("unexpected mutation type %T. expect *ent.GroupMutation", m) + } + return f(ctx, mv) +} + +// The ItemFunc type is an adapter to allow the use of ordinary +// function as Item mutator. +type ItemFunc func(context.Context, *ent.ItemMutation) (ent.Value, error) + +// Mutate calls f(ctx, m). +func (f ItemFunc) Mutate(ctx context.Context, m ent.Mutation) (ent.Value, error) { + mv, ok := m.(*ent.ItemMutation) + if !ok { + return nil, fmt.Errorf("unexpected mutation type %T. expect *ent.ItemMutation", m) + } + return f(ctx, mv) +} + +// The ItemFieldFunc type is an adapter to allow the use of ordinary +// function as ItemField mutator. +type ItemFieldFunc func(context.Context, *ent.ItemFieldMutation) (ent.Value, error) + +// Mutate calls f(ctx, m). +func (f ItemFieldFunc) Mutate(ctx context.Context, m ent.Mutation) (ent.Value, error) { + mv, ok := m.(*ent.ItemFieldMutation) + if !ok { + return nil, fmt.Errorf("unexpected mutation type %T. expect *ent.ItemFieldMutation", m) + } + return f(ctx, mv) +} + +// The LabelFunc type is an adapter to allow the use of ordinary +// function as Label mutator. +type LabelFunc func(context.Context, *ent.LabelMutation) (ent.Value, error) + +// Mutate calls f(ctx, m). +func (f LabelFunc) Mutate(ctx context.Context, m ent.Mutation) (ent.Value, error) { + mv, ok := m.(*ent.LabelMutation) + if !ok { + return nil, fmt.Errorf("unexpected mutation type %T. expect *ent.LabelMutation", m) + } + return f(ctx, mv) +} + +// The LocationFunc type is an adapter to allow the use of ordinary +// function as Location mutator. +type LocationFunc func(context.Context, *ent.LocationMutation) (ent.Value, error) + +// Mutate calls f(ctx, m). +func (f LocationFunc) Mutate(ctx context.Context, m ent.Mutation) (ent.Value, error) { + mv, ok := m.(*ent.LocationMutation) + if !ok { + return nil, fmt.Errorf("unexpected mutation type %T. expect *ent.LocationMutation", m) + } + return f(ctx, mv) +} + // The UserFunc type is an adapter to allow the use of ordinary // function as User mutator. type UserFunc func(context.Context, *ent.UserMutation) (ent.Value, error) @@ -130,7 +195,6 @@ func HasFields(field string, fields ...string) Condition { // If executes the given hook under condition. // // hook.If(ComputeAverage, And(HasFields(...), HasAddedFields(...))) -// func If(hk ent.Hook, cond Condition) ent.Hook { return func(next ent.Mutator) ent.Mutator { return ent.MutateFunc(func(ctx context.Context, m ent.Mutation) (ent.Value, error) { @@ -145,7 +209,6 @@ func If(hk ent.Hook, cond Condition) ent.Hook { // On executes the given hook only for the given operation. // // hook.On(Log, ent.Delete|ent.Create) -// func On(hk ent.Hook, op ent.Op) ent.Hook { return If(hk, HasOp(op)) } @@ -153,7 +216,6 @@ func On(hk ent.Hook, op ent.Op) ent.Hook { // Unless skips the given hook only for the given operation. // // hook.Unless(Log, ent.Update|ent.UpdateOne) -// func Unless(hk ent.Hook, op ent.Op) ent.Hook { return If(hk, Not(HasOp(op))) } @@ -174,7 +236,6 @@ func FixedError(err error) ent.Hook { // Reject(ent.Delete|ent.Update), // } // } -// func Reject(op ent.Op) ent.Hook { hk := FixedError(fmt.Errorf("%s operation is not allowed", op)) return On(hk, op) diff --git a/backend/ent/item.go b/backend/ent/item.go new file mode 100644 index 0000000..6c88f6b --- /dev/null +++ b/backend/ent/item.go @@ -0,0 +1,385 @@ +// Code generated by ent, DO NOT EDIT. + +package ent + +import ( + "fmt" + "strings" + "time" + + "entgo.io/ent/dialect/sql" + "github.com/google/uuid" + "github.com/hay-kot/content/backend/ent/group" + "github.com/hay-kot/content/backend/ent/item" + "github.com/hay-kot/content/backend/ent/location" +) + +// Item is the model entity for the Item schema. +type Item struct { + config `json:"-"` + // ID of the ent. + ID uuid.UUID `json:"id,omitempty"` + // CreatedAt holds the value of the "created_at" field. + CreatedAt time.Time `json:"created_at,omitempty"` + // UpdatedAt holds the value of the "updated_at" field. + UpdatedAt time.Time `json:"updated_at,omitempty"` + // Name holds the value of the "name" field. + Name string `json:"name,omitempty"` + // Description holds the value of the "description" field. + Description string `json:"description,omitempty"` + // Notes holds the value of the "notes" field. + Notes string `json:"notes,omitempty"` + // SerialNumber holds the value of the "serial_number" field. + SerialNumber string `json:"serial_number,omitempty"` + // ModelNumber holds the value of the "model_number" field. + ModelNumber string `json:"model_number,omitempty"` + // Manufacturer holds the value of the "manufacturer" field. + Manufacturer string `json:"manufacturer,omitempty"` + // PurchaseTime holds the value of the "purchase_time" field. + PurchaseTime time.Time `json:"purchase_time,omitempty"` + // PurchaseFrom holds the value of the "purchase_from" field. + PurchaseFrom string `json:"purchase_from,omitempty"` + // PurchasePrice holds the value of the "purchase_price" field. + PurchasePrice float64 `json:"purchase_price,omitempty"` + // PurchaseReceiptID holds the value of the "purchase_receipt_id" field. + PurchaseReceiptID uuid.UUID `json:"purchase_receipt_id,omitempty"` + // SoldTime holds the value of the "sold_time" field. + SoldTime time.Time `json:"sold_time,omitempty"` + // SoldTo holds the value of the "sold_to" field. + SoldTo string `json:"sold_to,omitempty"` + // SoldPrice holds the value of the "sold_price" field. + SoldPrice float64 `json:"sold_price,omitempty"` + // SoldReceiptID holds the value of the "sold_receipt_id" field. + SoldReceiptID uuid.UUID `json:"sold_receipt_id,omitempty"` + // SoldNotes holds the value of the "sold_notes" field. + SoldNotes string `json:"sold_notes,omitempty"` + // Edges holds the relations/edges for other nodes in the graph. + // The values are being populated by the ItemQuery when eager-loading is set. + Edges ItemEdges `json:"edges"` + group_items *uuid.UUID + location_items *uuid.UUID +} + +// ItemEdges holds the relations/edges for other nodes in the graph. +type ItemEdges struct { + // Group holds the value of the group edge. + Group *Group `json:"group,omitempty"` + // Location holds the value of the location edge. + Location *Location `json:"location,omitempty"` + // Fields holds the value of the fields edge. + Fields []*ItemField `json:"fields,omitempty"` + // Label holds the value of the label edge. + Label []*Label `json:"label,omitempty"` + // loadedTypes holds the information for reporting if a + // type was loaded (or requested) in eager-loading or not. + loadedTypes [4]bool +} + +// GroupOrErr returns the Group value or an error if the edge +// was not loaded in eager-loading, or loaded but was not found. +func (e ItemEdges) GroupOrErr() (*Group, error) { + if e.loadedTypes[0] { + if e.Group == nil { + // Edge was loaded but was not found. + return nil, &NotFoundError{label: group.Label} + } + return e.Group, nil + } + return nil, &NotLoadedError{edge: "group"} +} + +// LocationOrErr returns the Location value or an error if the edge +// was not loaded in eager-loading, or loaded but was not found. +func (e ItemEdges) LocationOrErr() (*Location, error) { + if e.loadedTypes[1] { + if e.Location == nil { + // Edge was loaded but was not found. + return nil, &NotFoundError{label: location.Label} + } + return e.Location, nil + } + return nil, &NotLoadedError{edge: "location"} +} + +// FieldsOrErr returns the Fields value or an error if the edge +// was not loaded in eager-loading. +func (e ItemEdges) FieldsOrErr() ([]*ItemField, error) { + if e.loadedTypes[2] { + return e.Fields, nil + } + return nil, &NotLoadedError{edge: "fields"} +} + +// LabelOrErr returns the Label value or an error if the edge +// was not loaded in eager-loading. +func (e ItemEdges) LabelOrErr() ([]*Label, error) { + if e.loadedTypes[3] { + return e.Label, nil + } + return nil, &NotLoadedError{edge: "label"} +} + +// scanValues returns the types for scanning values from sql.Rows. +func (*Item) scanValues(columns []string) ([]interface{}, error) { + values := make([]interface{}, len(columns)) + for i := range columns { + switch columns[i] { + case item.FieldPurchasePrice, item.FieldSoldPrice: + values[i] = new(sql.NullFloat64) + case item.FieldName, item.FieldDescription, item.FieldNotes, item.FieldSerialNumber, item.FieldModelNumber, item.FieldManufacturer, item.FieldPurchaseFrom, item.FieldSoldTo, item.FieldSoldNotes: + values[i] = new(sql.NullString) + case item.FieldCreatedAt, item.FieldUpdatedAt, item.FieldPurchaseTime, item.FieldSoldTime: + values[i] = new(sql.NullTime) + case item.FieldID, item.FieldPurchaseReceiptID, item.FieldSoldReceiptID: + values[i] = new(uuid.UUID) + case item.ForeignKeys[0]: // group_items + values[i] = &sql.NullScanner{S: new(uuid.UUID)} + case item.ForeignKeys[1]: // location_items + values[i] = &sql.NullScanner{S: new(uuid.UUID)} + default: + return nil, fmt.Errorf("unexpected column %q for type Item", columns[i]) + } + } + return values, nil +} + +// assignValues assigns the values that were returned from sql.Rows (after scanning) +// to the Item fields. +func (i *Item) assignValues(columns []string, values []interface{}) error { + if m, n := len(values), len(columns); m < n { + return fmt.Errorf("mismatch number of scan values: %d != %d", m, n) + } + for j := range columns { + switch columns[j] { + case item.FieldID: + if value, ok := values[j].(*uuid.UUID); !ok { + return fmt.Errorf("unexpected type %T for field id", values[j]) + } else if value != nil { + i.ID = *value + } + case item.FieldCreatedAt: + if value, ok := values[j].(*sql.NullTime); !ok { + return fmt.Errorf("unexpected type %T for field created_at", values[j]) + } else if value.Valid { + i.CreatedAt = value.Time + } + case item.FieldUpdatedAt: + if value, ok := values[j].(*sql.NullTime); !ok { + return fmt.Errorf("unexpected type %T for field updated_at", values[j]) + } else if value.Valid { + i.UpdatedAt = value.Time + } + case item.FieldName: + if value, ok := values[j].(*sql.NullString); !ok { + return fmt.Errorf("unexpected type %T for field name", values[j]) + } else if value.Valid { + i.Name = value.String + } + case item.FieldDescription: + if value, ok := values[j].(*sql.NullString); !ok { + return fmt.Errorf("unexpected type %T for field description", values[j]) + } else if value.Valid { + i.Description = value.String + } + case item.FieldNotes: + if value, ok := values[j].(*sql.NullString); !ok { + return fmt.Errorf("unexpected type %T for field notes", values[j]) + } else if value.Valid { + i.Notes = value.String + } + case item.FieldSerialNumber: + if value, ok := values[j].(*sql.NullString); !ok { + return fmt.Errorf("unexpected type %T for field serial_number", values[j]) + } else if value.Valid { + i.SerialNumber = value.String + } + case item.FieldModelNumber: + if value, ok := values[j].(*sql.NullString); !ok { + return fmt.Errorf("unexpected type %T for field model_number", values[j]) + } else if value.Valid { + i.ModelNumber = value.String + } + case item.FieldManufacturer: + if value, ok := values[j].(*sql.NullString); !ok { + return fmt.Errorf("unexpected type %T for field manufacturer", values[j]) + } else if value.Valid { + i.Manufacturer = value.String + } + case item.FieldPurchaseTime: + if value, ok := values[j].(*sql.NullTime); !ok { + return fmt.Errorf("unexpected type %T for field purchase_time", values[j]) + } else if value.Valid { + i.PurchaseTime = value.Time + } + case item.FieldPurchaseFrom: + if value, ok := values[j].(*sql.NullString); !ok { + return fmt.Errorf("unexpected type %T for field purchase_from", values[j]) + } else if value.Valid { + i.PurchaseFrom = value.String + } + case item.FieldPurchasePrice: + if value, ok := values[j].(*sql.NullFloat64); !ok { + return fmt.Errorf("unexpected type %T for field purchase_price", values[j]) + } else if value.Valid { + i.PurchasePrice = value.Float64 + } + case item.FieldPurchaseReceiptID: + if value, ok := values[j].(*uuid.UUID); !ok { + return fmt.Errorf("unexpected type %T for field purchase_receipt_id", values[j]) + } else if value != nil { + i.PurchaseReceiptID = *value + } + case item.FieldSoldTime: + if value, ok := values[j].(*sql.NullTime); !ok { + return fmt.Errorf("unexpected type %T for field sold_time", values[j]) + } else if value.Valid { + i.SoldTime = value.Time + } + case item.FieldSoldTo: + if value, ok := values[j].(*sql.NullString); !ok { + return fmt.Errorf("unexpected type %T for field sold_to", values[j]) + } else if value.Valid { + i.SoldTo = value.String + } + case item.FieldSoldPrice: + if value, ok := values[j].(*sql.NullFloat64); !ok { + return fmt.Errorf("unexpected type %T for field sold_price", values[j]) + } else if value.Valid { + i.SoldPrice = value.Float64 + } + case item.FieldSoldReceiptID: + if value, ok := values[j].(*uuid.UUID); !ok { + return fmt.Errorf("unexpected type %T for field sold_receipt_id", values[j]) + } else if value != nil { + i.SoldReceiptID = *value + } + case item.FieldSoldNotes: + if value, ok := values[j].(*sql.NullString); !ok { + return fmt.Errorf("unexpected type %T for field sold_notes", values[j]) + } else if value.Valid { + i.SoldNotes = value.String + } + case item.ForeignKeys[0]: + if value, ok := values[j].(*sql.NullScanner); !ok { + return fmt.Errorf("unexpected type %T for field group_items", values[j]) + } else if value.Valid { + i.group_items = new(uuid.UUID) + *i.group_items = *value.S.(*uuid.UUID) + } + case item.ForeignKeys[1]: + if value, ok := values[j].(*sql.NullScanner); !ok { + return fmt.Errorf("unexpected type %T for field location_items", values[j]) + } else if value.Valid { + i.location_items = new(uuid.UUID) + *i.location_items = *value.S.(*uuid.UUID) + } + } + } + return nil +} + +// QueryGroup queries the "group" edge of the Item entity. +func (i *Item) QueryGroup() *GroupQuery { + return (&ItemClient{config: i.config}).QueryGroup(i) +} + +// QueryLocation queries the "location" edge of the Item entity. +func (i *Item) QueryLocation() *LocationQuery { + return (&ItemClient{config: i.config}).QueryLocation(i) +} + +// QueryFields queries the "fields" edge of the Item entity. +func (i *Item) QueryFields() *ItemFieldQuery { + return (&ItemClient{config: i.config}).QueryFields(i) +} + +// QueryLabel queries the "label" edge of the Item entity. +func (i *Item) QueryLabel() *LabelQuery { + return (&ItemClient{config: i.config}).QueryLabel(i) +} + +// Update returns a builder for updating this Item. +// Note that you need to call Item.Unwrap() before calling this method if this Item +// was returned from a transaction, and the transaction was committed or rolled back. +func (i *Item) Update() *ItemUpdateOne { + return (&ItemClient{config: i.config}).UpdateOne(i) +} + +// Unwrap unwraps the Item entity that was returned from a transaction after it was closed, +// so that all future queries will be executed through the driver which created the transaction. +func (i *Item) Unwrap() *Item { + _tx, ok := i.config.driver.(*txDriver) + if !ok { + panic("ent: Item is not a transactional entity") + } + i.config.driver = _tx.drv + return i +} + +// String implements the fmt.Stringer. +func (i *Item) String() string { + var builder strings.Builder + builder.WriteString("Item(") + builder.WriteString(fmt.Sprintf("id=%v, ", i.ID)) + builder.WriteString("created_at=") + builder.WriteString(i.CreatedAt.Format(time.ANSIC)) + builder.WriteString(", ") + builder.WriteString("updated_at=") + builder.WriteString(i.UpdatedAt.Format(time.ANSIC)) + builder.WriteString(", ") + builder.WriteString("name=") + builder.WriteString(i.Name) + builder.WriteString(", ") + builder.WriteString("description=") + builder.WriteString(i.Description) + builder.WriteString(", ") + builder.WriteString("notes=") + builder.WriteString(i.Notes) + builder.WriteString(", ") + builder.WriteString("serial_number=") + builder.WriteString(i.SerialNumber) + builder.WriteString(", ") + builder.WriteString("model_number=") + builder.WriteString(i.ModelNumber) + builder.WriteString(", ") + builder.WriteString("manufacturer=") + builder.WriteString(i.Manufacturer) + builder.WriteString(", ") + builder.WriteString("purchase_time=") + builder.WriteString(i.PurchaseTime.Format(time.ANSIC)) + builder.WriteString(", ") + builder.WriteString("purchase_from=") + builder.WriteString(i.PurchaseFrom) + builder.WriteString(", ") + builder.WriteString("purchase_price=") + builder.WriteString(fmt.Sprintf("%v", i.PurchasePrice)) + builder.WriteString(", ") + builder.WriteString("purchase_receipt_id=") + builder.WriteString(fmt.Sprintf("%v", i.PurchaseReceiptID)) + builder.WriteString(", ") + builder.WriteString("sold_time=") + builder.WriteString(i.SoldTime.Format(time.ANSIC)) + builder.WriteString(", ") + builder.WriteString("sold_to=") + builder.WriteString(i.SoldTo) + builder.WriteString(", ") + builder.WriteString("sold_price=") + builder.WriteString(fmt.Sprintf("%v", i.SoldPrice)) + builder.WriteString(", ") + builder.WriteString("sold_receipt_id=") + builder.WriteString(fmt.Sprintf("%v", i.SoldReceiptID)) + builder.WriteString(", ") + builder.WriteString("sold_notes=") + builder.WriteString(i.SoldNotes) + builder.WriteByte(')') + return builder.String() +} + +// Items is a parsable slice of Item. +type Items []*Item + +func (i Items) config(cfg config) { + for _i := range i { + i[_i].config = cfg + } +} diff --git a/backend/ent/item/item.go b/backend/ent/item/item.go new file mode 100644 index 0000000..e43d6b3 --- /dev/null +++ b/backend/ent/item/item.go @@ -0,0 +1,165 @@ +// Code generated by ent, DO NOT EDIT. + +package item + +import ( + "time" + + "github.com/google/uuid" +) + +const ( + // Label holds the string label denoting the item type in the database. + Label = "item" + // FieldID holds the string denoting the id field in the database. + FieldID = "id" + // FieldCreatedAt holds the string denoting the created_at field in the database. + FieldCreatedAt = "created_at" + // FieldUpdatedAt holds the string denoting the updated_at field in the database. + FieldUpdatedAt = "updated_at" + // FieldName holds the string denoting the name field in the database. + FieldName = "name" + // FieldDescription holds the string denoting the description field in the database. + FieldDescription = "description" + // FieldNotes holds the string denoting the notes field in the database. + FieldNotes = "notes" + // FieldSerialNumber holds the string denoting the serial_number field in the database. + FieldSerialNumber = "serial_number" + // FieldModelNumber holds the string denoting the model_number field in the database. + FieldModelNumber = "model_number" + // FieldManufacturer holds the string denoting the manufacturer field in the database. + FieldManufacturer = "manufacturer" + // FieldPurchaseTime holds the string denoting the purchase_time field in the database. + FieldPurchaseTime = "purchase_time" + // FieldPurchaseFrom holds the string denoting the purchase_from field in the database. + FieldPurchaseFrom = "purchase_from" + // FieldPurchasePrice holds the string denoting the purchase_price field in the database. + FieldPurchasePrice = "purchase_price" + // FieldPurchaseReceiptID holds the string denoting the purchase_receipt_id field in the database. + FieldPurchaseReceiptID = "purchase_receipt_id" + // FieldSoldTime holds the string denoting the sold_time field in the database. + FieldSoldTime = "sold_time" + // FieldSoldTo holds the string denoting the sold_to field in the database. + FieldSoldTo = "sold_to" + // FieldSoldPrice holds the string denoting the sold_price field in the database. + FieldSoldPrice = "sold_price" + // FieldSoldReceiptID holds the string denoting the sold_receipt_id field in the database. + FieldSoldReceiptID = "sold_receipt_id" + // FieldSoldNotes holds the string denoting the sold_notes field in the database. + FieldSoldNotes = "sold_notes" + // EdgeGroup holds the string denoting the group edge name in mutations. + EdgeGroup = "group" + // EdgeLocation holds the string denoting the location edge name in mutations. + EdgeLocation = "location" + // EdgeFields holds the string denoting the fields edge name in mutations. + EdgeFields = "fields" + // EdgeLabel holds the string denoting the label edge name in mutations. + EdgeLabel = "label" + // Table holds the table name of the item in the database. + Table = "items" + // GroupTable is the table that holds the group relation/edge. + GroupTable = "items" + // GroupInverseTable is the table name for the Group entity. + // It exists in this package in order to avoid circular dependency with the "group" package. + GroupInverseTable = "groups" + // GroupColumn is the table column denoting the group relation/edge. + GroupColumn = "group_items" + // LocationTable is the table that holds the location relation/edge. + LocationTable = "items" + // LocationInverseTable is the table name for the Location entity. + // It exists in this package in order to avoid circular dependency with the "location" package. + LocationInverseTable = "locations" + // LocationColumn is the table column denoting the location relation/edge. + LocationColumn = "location_items" + // FieldsTable is the table that holds the fields relation/edge. + FieldsTable = "item_fields" + // FieldsInverseTable is the table name for the ItemField entity. + // It exists in this package in order to avoid circular dependency with the "itemfield" package. + FieldsInverseTable = "item_fields" + // FieldsColumn is the table column denoting the fields relation/edge. + FieldsColumn = "item_fields" + // LabelTable is the table that holds the label relation/edge. The primary key declared below. + LabelTable = "label_items" + // LabelInverseTable is the table name for the Label entity. + // It exists in this package in order to avoid circular dependency with the "label" package. + LabelInverseTable = "labels" +) + +// Columns holds all SQL columns for item fields. +var Columns = []string{ + FieldID, + FieldCreatedAt, + FieldUpdatedAt, + FieldName, + FieldDescription, + FieldNotes, + FieldSerialNumber, + FieldModelNumber, + FieldManufacturer, + FieldPurchaseTime, + FieldPurchaseFrom, + FieldPurchasePrice, + FieldPurchaseReceiptID, + FieldSoldTime, + FieldSoldTo, + FieldSoldPrice, + FieldSoldReceiptID, + FieldSoldNotes, +} + +// ForeignKeys holds the SQL foreign-keys that are owned by the "items" +// table and are not defined as standalone fields in the schema. +var ForeignKeys = []string{ + "group_items", + "location_items", +} + +var ( + // LabelPrimaryKey and LabelColumn2 are the table columns denoting the + // primary key for the label relation (M2M). + LabelPrimaryKey = []string{"label_id", "item_id"} +) + +// ValidColumn reports if the column name is valid (part of the table columns). +func ValidColumn(column string) bool { + for i := range Columns { + if column == Columns[i] { + return true + } + } + for i := range ForeignKeys { + if column == ForeignKeys[i] { + return true + } + } + return false +} + +var ( + // DefaultCreatedAt holds the default value on creation for the "created_at" field. + DefaultCreatedAt func() time.Time + // DefaultUpdatedAt holds the default value on creation for the "updated_at" field. + DefaultUpdatedAt func() time.Time + // UpdateDefaultUpdatedAt holds the default value on update for the "updated_at" field. + UpdateDefaultUpdatedAt func() time.Time + // NameValidator is a validator for the "name" field. It is called by the builders before save. + NameValidator func(string) error + // DescriptionValidator is a validator for the "description" field. It is called by the builders before save. + DescriptionValidator func(string) error + // NotesValidator is a validator for the "notes" field. It is called by the builders before save. + NotesValidator func(string) error + // SerialNumberValidator is a validator for the "serial_number" field. It is called by the builders before save. + SerialNumberValidator func(string) error + // ModelNumberValidator is a validator for the "model_number" field. It is called by the builders before save. + ModelNumberValidator func(string) error + // ManufacturerValidator is a validator for the "manufacturer" field. It is called by the builders before save. + ManufacturerValidator func(string) error + // DefaultPurchasePrice holds the default value on creation for the "purchase_price" field. + DefaultPurchasePrice float64 + // DefaultSoldPrice holds the default value on creation for the "sold_price" field. + DefaultSoldPrice float64 + // SoldNotesValidator is a validator for the "sold_notes" field. It is called by the builders before save. + SoldNotesValidator func(string) error + // DefaultID holds the default value on creation for the "id" field. + DefaultID func() uuid.UUID +) diff --git a/backend/ent/item/where.go b/backend/ent/item/where.go new file mode 100644 index 0000000..a15a556 --- /dev/null +++ b/backend/ent/item/where.go @@ -0,0 +1,1917 @@ +// Code generated by ent, DO NOT EDIT. + +package item + +import ( + "time" + + "entgo.io/ent/dialect/sql" + "entgo.io/ent/dialect/sql/sqlgraph" + "github.com/google/uuid" + "github.com/hay-kot/content/backend/ent/predicate" +) + +// ID filters vertices based on their ID field. +func ID(id uuid.UUID) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldID), id)) + }) +} + +// IDEQ applies the EQ predicate on the ID field. +func IDEQ(id uuid.UUID) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldID), id)) + }) +} + +// IDNEQ applies the NEQ predicate on the ID field. +func IDNEQ(id uuid.UUID) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldID), id)) + }) +} + +// IDIn applies the In predicate on the ID field. +func IDIn(ids ...uuid.UUID) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + v := make([]interface{}, len(ids)) + for i := range v { + v[i] = ids[i] + } + s.Where(sql.In(s.C(FieldID), v...)) + }) +} + +// IDNotIn applies the NotIn predicate on the ID field. +func IDNotIn(ids ...uuid.UUID) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + v := make([]interface{}, len(ids)) + for i := range v { + v[i] = ids[i] + } + s.Where(sql.NotIn(s.C(FieldID), v...)) + }) +} + +// IDGT applies the GT predicate on the ID field. +func IDGT(id uuid.UUID) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.GT(s.C(FieldID), id)) + }) +} + +// IDGTE applies the GTE predicate on the ID field. +func IDGTE(id uuid.UUID) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.GTE(s.C(FieldID), id)) + }) +} + +// IDLT applies the LT predicate on the ID field. +func IDLT(id uuid.UUID) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.LT(s.C(FieldID), id)) + }) +} + +// IDLTE applies the LTE predicate on the ID field. +func IDLTE(id uuid.UUID) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.LTE(s.C(FieldID), id)) + }) +} + +// CreatedAt applies equality check predicate on the "created_at" field. It's identical to CreatedAtEQ. +func CreatedAt(v time.Time) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldCreatedAt), v)) + }) +} + +// UpdatedAt applies equality check predicate on the "updated_at" field. It's identical to UpdatedAtEQ. +func UpdatedAt(v time.Time) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldUpdatedAt), v)) + }) +} + +// Name applies equality check predicate on the "name" field. It's identical to NameEQ. +func Name(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldName), v)) + }) +} + +// Description applies equality check predicate on the "description" field. It's identical to DescriptionEQ. +func Description(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldDescription), v)) + }) +} + +// Notes applies equality check predicate on the "notes" field. It's identical to NotesEQ. +func Notes(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldNotes), v)) + }) +} + +// SerialNumber applies equality check predicate on the "serial_number" field. It's identical to SerialNumberEQ. +func SerialNumber(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldSerialNumber), v)) + }) +} + +// ModelNumber applies equality check predicate on the "model_number" field. It's identical to ModelNumberEQ. +func ModelNumber(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldModelNumber), v)) + }) +} + +// Manufacturer applies equality check predicate on the "manufacturer" field. It's identical to ManufacturerEQ. +func Manufacturer(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldManufacturer), v)) + }) +} + +// PurchaseTime applies equality check predicate on the "purchase_time" field. It's identical to PurchaseTimeEQ. +func PurchaseTime(v time.Time) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldPurchaseTime), v)) + }) +} + +// PurchaseFrom applies equality check predicate on the "purchase_from" field. It's identical to PurchaseFromEQ. +func PurchaseFrom(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldPurchaseFrom), v)) + }) +} + +// PurchasePrice applies equality check predicate on the "purchase_price" field. It's identical to PurchasePriceEQ. +func PurchasePrice(v float64) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldPurchasePrice), v)) + }) +} + +// PurchaseReceiptID applies equality check predicate on the "purchase_receipt_id" field. It's identical to PurchaseReceiptIDEQ. +func PurchaseReceiptID(v uuid.UUID) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldPurchaseReceiptID), v)) + }) +} + +// SoldTime applies equality check predicate on the "sold_time" field. It's identical to SoldTimeEQ. +func SoldTime(v time.Time) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldSoldTime), v)) + }) +} + +// SoldTo applies equality check predicate on the "sold_to" field. It's identical to SoldToEQ. +func SoldTo(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldSoldTo), v)) + }) +} + +// SoldPrice applies equality check predicate on the "sold_price" field. It's identical to SoldPriceEQ. +func SoldPrice(v float64) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldSoldPrice), v)) + }) +} + +// SoldReceiptID applies equality check predicate on the "sold_receipt_id" field. It's identical to SoldReceiptIDEQ. +func SoldReceiptID(v uuid.UUID) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldSoldReceiptID), v)) + }) +} + +// SoldNotes applies equality check predicate on the "sold_notes" field. It's identical to SoldNotesEQ. +func SoldNotes(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldSoldNotes), v)) + }) +} + +// CreatedAtEQ applies the EQ predicate on the "created_at" field. +func CreatedAtEQ(v time.Time) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldCreatedAt), v)) + }) +} + +// CreatedAtNEQ applies the NEQ predicate on the "created_at" field. +func CreatedAtNEQ(v time.Time) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldCreatedAt), v)) + }) +} + +// CreatedAtIn applies the In predicate on the "created_at" field. +func CreatedAtIn(vs ...time.Time) predicate.Item { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.In(s.C(FieldCreatedAt), v...)) + }) +} + +// CreatedAtNotIn applies the NotIn predicate on the "created_at" field. +func CreatedAtNotIn(vs ...time.Time) predicate.Item { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.NotIn(s.C(FieldCreatedAt), v...)) + }) +} + +// CreatedAtGT applies the GT predicate on the "created_at" field. +func CreatedAtGT(v time.Time) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.GT(s.C(FieldCreatedAt), v)) + }) +} + +// CreatedAtGTE applies the GTE predicate on the "created_at" field. +func CreatedAtGTE(v time.Time) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.GTE(s.C(FieldCreatedAt), v)) + }) +} + +// CreatedAtLT applies the LT predicate on the "created_at" field. +func CreatedAtLT(v time.Time) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.LT(s.C(FieldCreatedAt), v)) + }) +} + +// CreatedAtLTE applies the LTE predicate on the "created_at" field. +func CreatedAtLTE(v time.Time) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.LTE(s.C(FieldCreatedAt), v)) + }) +} + +// UpdatedAtEQ applies the EQ predicate on the "updated_at" field. +func UpdatedAtEQ(v time.Time) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldUpdatedAt), v)) + }) +} + +// UpdatedAtNEQ applies the NEQ predicate on the "updated_at" field. +func UpdatedAtNEQ(v time.Time) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldUpdatedAt), v)) + }) +} + +// UpdatedAtIn applies the In predicate on the "updated_at" field. +func UpdatedAtIn(vs ...time.Time) predicate.Item { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.In(s.C(FieldUpdatedAt), v...)) + }) +} + +// UpdatedAtNotIn applies the NotIn predicate on the "updated_at" field. +func UpdatedAtNotIn(vs ...time.Time) predicate.Item { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.NotIn(s.C(FieldUpdatedAt), v...)) + }) +} + +// UpdatedAtGT applies the GT predicate on the "updated_at" field. +func UpdatedAtGT(v time.Time) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.GT(s.C(FieldUpdatedAt), v)) + }) +} + +// UpdatedAtGTE applies the GTE predicate on the "updated_at" field. +func UpdatedAtGTE(v time.Time) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.GTE(s.C(FieldUpdatedAt), v)) + }) +} + +// UpdatedAtLT applies the LT predicate on the "updated_at" field. +func UpdatedAtLT(v time.Time) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.LT(s.C(FieldUpdatedAt), v)) + }) +} + +// UpdatedAtLTE applies the LTE predicate on the "updated_at" field. +func UpdatedAtLTE(v time.Time) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.LTE(s.C(FieldUpdatedAt), v)) + }) +} + +// NameEQ applies the EQ predicate on the "name" field. +func NameEQ(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldName), v)) + }) +} + +// NameNEQ applies the NEQ predicate on the "name" field. +func NameNEQ(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldName), v)) + }) +} + +// NameIn applies the In predicate on the "name" field. +func NameIn(vs ...string) predicate.Item { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.In(s.C(FieldName), v...)) + }) +} + +// NameNotIn applies the NotIn predicate on the "name" field. +func NameNotIn(vs ...string) predicate.Item { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.NotIn(s.C(FieldName), v...)) + }) +} + +// NameGT applies the GT predicate on the "name" field. +func NameGT(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.GT(s.C(FieldName), v)) + }) +} + +// NameGTE applies the GTE predicate on the "name" field. +func NameGTE(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.GTE(s.C(FieldName), v)) + }) +} + +// NameLT applies the LT predicate on the "name" field. +func NameLT(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.LT(s.C(FieldName), v)) + }) +} + +// NameLTE applies the LTE predicate on the "name" field. +func NameLTE(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.LTE(s.C(FieldName), v)) + }) +} + +// NameContains applies the Contains predicate on the "name" field. +func NameContains(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.Contains(s.C(FieldName), v)) + }) +} + +// NameHasPrefix applies the HasPrefix predicate on the "name" field. +func NameHasPrefix(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.HasPrefix(s.C(FieldName), v)) + }) +} + +// NameHasSuffix applies the HasSuffix predicate on the "name" field. +func NameHasSuffix(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.HasSuffix(s.C(FieldName), v)) + }) +} + +// NameEqualFold applies the EqualFold predicate on the "name" field. +func NameEqualFold(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.EqualFold(s.C(FieldName), v)) + }) +} + +// NameContainsFold applies the ContainsFold predicate on the "name" field. +func NameContainsFold(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.ContainsFold(s.C(FieldName), v)) + }) +} + +// DescriptionEQ applies the EQ predicate on the "description" field. +func DescriptionEQ(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldDescription), v)) + }) +} + +// DescriptionNEQ applies the NEQ predicate on the "description" field. +func DescriptionNEQ(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldDescription), v)) + }) +} + +// DescriptionIn applies the In predicate on the "description" field. +func DescriptionIn(vs ...string) predicate.Item { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.In(s.C(FieldDescription), v...)) + }) +} + +// DescriptionNotIn applies the NotIn predicate on the "description" field. +func DescriptionNotIn(vs ...string) predicate.Item { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.NotIn(s.C(FieldDescription), v...)) + }) +} + +// DescriptionGT applies the GT predicate on the "description" field. +func DescriptionGT(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.GT(s.C(FieldDescription), v)) + }) +} + +// DescriptionGTE applies the GTE predicate on the "description" field. +func DescriptionGTE(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.GTE(s.C(FieldDescription), v)) + }) +} + +// DescriptionLT applies the LT predicate on the "description" field. +func DescriptionLT(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.LT(s.C(FieldDescription), v)) + }) +} + +// DescriptionLTE applies the LTE predicate on the "description" field. +func DescriptionLTE(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.LTE(s.C(FieldDescription), v)) + }) +} + +// DescriptionContains applies the Contains predicate on the "description" field. +func DescriptionContains(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.Contains(s.C(FieldDescription), v)) + }) +} + +// DescriptionHasPrefix applies the HasPrefix predicate on the "description" field. +func DescriptionHasPrefix(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.HasPrefix(s.C(FieldDescription), v)) + }) +} + +// DescriptionHasSuffix applies the HasSuffix predicate on the "description" field. +func DescriptionHasSuffix(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.HasSuffix(s.C(FieldDescription), v)) + }) +} + +// DescriptionIsNil applies the IsNil predicate on the "description" field. +func DescriptionIsNil() predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.IsNull(s.C(FieldDescription))) + }) +} + +// DescriptionNotNil applies the NotNil predicate on the "description" field. +func DescriptionNotNil() predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.NotNull(s.C(FieldDescription))) + }) +} + +// DescriptionEqualFold applies the EqualFold predicate on the "description" field. +func DescriptionEqualFold(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.EqualFold(s.C(FieldDescription), v)) + }) +} + +// DescriptionContainsFold applies the ContainsFold predicate on the "description" field. +func DescriptionContainsFold(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.ContainsFold(s.C(FieldDescription), v)) + }) +} + +// NotesEQ applies the EQ predicate on the "notes" field. +func NotesEQ(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldNotes), v)) + }) +} + +// NotesNEQ applies the NEQ predicate on the "notes" field. +func NotesNEQ(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldNotes), v)) + }) +} + +// NotesIn applies the In predicate on the "notes" field. +func NotesIn(vs ...string) predicate.Item { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.In(s.C(FieldNotes), v...)) + }) +} + +// NotesNotIn applies the NotIn predicate on the "notes" field. +func NotesNotIn(vs ...string) predicate.Item { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.NotIn(s.C(FieldNotes), v...)) + }) +} + +// NotesGT applies the GT predicate on the "notes" field. +func NotesGT(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.GT(s.C(FieldNotes), v)) + }) +} + +// NotesGTE applies the GTE predicate on the "notes" field. +func NotesGTE(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.GTE(s.C(FieldNotes), v)) + }) +} + +// NotesLT applies the LT predicate on the "notes" field. +func NotesLT(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.LT(s.C(FieldNotes), v)) + }) +} + +// NotesLTE applies the LTE predicate on the "notes" field. +func NotesLTE(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.LTE(s.C(FieldNotes), v)) + }) +} + +// NotesContains applies the Contains predicate on the "notes" field. +func NotesContains(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.Contains(s.C(FieldNotes), v)) + }) +} + +// NotesHasPrefix applies the HasPrefix predicate on the "notes" field. +func NotesHasPrefix(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.HasPrefix(s.C(FieldNotes), v)) + }) +} + +// NotesHasSuffix applies the HasSuffix predicate on the "notes" field. +func NotesHasSuffix(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.HasSuffix(s.C(FieldNotes), v)) + }) +} + +// NotesIsNil applies the IsNil predicate on the "notes" field. +func NotesIsNil() predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.IsNull(s.C(FieldNotes))) + }) +} + +// NotesNotNil applies the NotNil predicate on the "notes" field. +func NotesNotNil() predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.NotNull(s.C(FieldNotes))) + }) +} + +// NotesEqualFold applies the EqualFold predicate on the "notes" field. +func NotesEqualFold(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.EqualFold(s.C(FieldNotes), v)) + }) +} + +// NotesContainsFold applies the ContainsFold predicate on the "notes" field. +func NotesContainsFold(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.ContainsFold(s.C(FieldNotes), v)) + }) +} + +// SerialNumberEQ applies the EQ predicate on the "serial_number" field. +func SerialNumberEQ(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldSerialNumber), v)) + }) +} + +// SerialNumberNEQ applies the NEQ predicate on the "serial_number" field. +func SerialNumberNEQ(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldSerialNumber), v)) + }) +} + +// SerialNumberIn applies the In predicate on the "serial_number" field. +func SerialNumberIn(vs ...string) predicate.Item { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.In(s.C(FieldSerialNumber), v...)) + }) +} + +// SerialNumberNotIn applies the NotIn predicate on the "serial_number" field. +func SerialNumberNotIn(vs ...string) predicate.Item { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.NotIn(s.C(FieldSerialNumber), v...)) + }) +} + +// SerialNumberGT applies the GT predicate on the "serial_number" field. +func SerialNumberGT(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.GT(s.C(FieldSerialNumber), v)) + }) +} + +// SerialNumberGTE applies the GTE predicate on the "serial_number" field. +func SerialNumberGTE(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.GTE(s.C(FieldSerialNumber), v)) + }) +} + +// SerialNumberLT applies the LT predicate on the "serial_number" field. +func SerialNumberLT(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.LT(s.C(FieldSerialNumber), v)) + }) +} + +// SerialNumberLTE applies the LTE predicate on the "serial_number" field. +func SerialNumberLTE(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.LTE(s.C(FieldSerialNumber), v)) + }) +} + +// SerialNumberContains applies the Contains predicate on the "serial_number" field. +func SerialNumberContains(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.Contains(s.C(FieldSerialNumber), v)) + }) +} + +// SerialNumberHasPrefix applies the HasPrefix predicate on the "serial_number" field. +func SerialNumberHasPrefix(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.HasPrefix(s.C(FieldSerialNumber), v)) + }) +} + +// SerialNumberHasSuffix applies the HasSuffix predicate on the "serial_number" field. +func SerialNumberHasSuffix(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.HasSuffix(s.C(FieldSerialNumber), v)) + }) +} + +// SerialNumberIsNil applies the IsNil predicate on the "serial_number" field. +func SerialNumberIsNil() predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.IsNull(s.C(FieldSerialNumber))) + }) +} + +// SerialNumberNotNil applies the NotNil predicate on the "serial_number" field. +func SerialNumberNotNil() predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.NotNull(s.C(FieldSerialNumber))) + }) +} + +// SerialNumberEqualFold applies the EqualFold predicate on the "serial_number" field. +func SerialNumberEqualFold(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.EqualFold(s.C(FieldSerialNumber), v)) + }) +} + +// SerialNumberContainsFold applies the ContainsFold predicate on the "serial_number" field. +func SerialNumberContainsFold(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.ContainsFold(s.C(FieldSerialNumber), v)) + }) +} + +// ModelNumberEQ applies the EQ predicate on the "model_number" field. +func ModelNumberEQ(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldModelNumber), v)) + }) +} + +// ModelNumberNEQ applies the NEQ predicate on the "model_number" field. +func ModelNumberNEQ(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldModelNumber), v)) + }) +} + +// ModelNumberIn applies the In predicate on the "model_number" field. +func ModelNumberIn(vs ...string) predicate.Item { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.In(s.C(FieldModelNumber), v...)) + }) +} + +// ModelNumberNotIn applies the NotIn predicate on the "model_number" field. +func ModelNumberNotIn(vs ...string) predicate.Item { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.NotIn(s.C(FieldModelNumber), v...)) + }) +} + +// ModelNumberGT applies the GT predicate on the "model_number" field. +func ModelNumberGT(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.GT(s.C(FieldModelNumber), v)) + }) +} + +// ModelNumberGTE applies the GTE predicate on the "model_number" field. +func ModelNumberGTE(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.GTE(s.C(FieldModelNumber), v)) + }) +} + +// ModelNumberLT applies the LT predicate on the "model_number" field. +func ModelNumberLT(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.LT(s.C(FieldModelNumber), v)) + }) +} + +// ModelNumberLTE applies the LTE predicate on the "model_number" field. +func ModelNumberLTE(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.LTE(s.C(FieldModelNumber), v)) + }) +} + +// ModelNumberContains applies the Contains predicate on the "model_number" field. +func ModelNumberContains(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.Contains(s.C(FieldModelNumber), v)) + }) +} + +// ModelNumberHasPrefix applies the HasPrefix predicate on the "model_number" field. +func ModelNumberHasPrefix(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.HasPrefix(s.C(FieldModelNumber), v)) + }) +} + +// ModelNumberHasSuffix applies the HasSuffix predicate on the "model_number" field. +func ModelNumberHasSuffix(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.HasSuffix(s.C(FieldModelNumber), v)) + }) +} + +// ModelNumberIsNil applies the IsNil predicate on the "model_number" field. +func ModelNumberIsNil() predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.IsNull(s.C(FieldModelNumber))) + }) +} + +// ModelNumberNotNil applies the NotNil predicate on the "model_number" field. +func ModelNumberNotNil() predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.NotNull(s.C(FieldModelNumber))) + }) +} + +// ModelNumberEqualFold applies the EqualFold predicate on the "model_number" field. +func ModelNumberEqualFold(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.EqualFold(s.C(FieldModelNumber), v)) + }) +} + +// ModelNumberContainsFold applies the ContainsFold predicate on the "model_number" field. +func ModelNumberContainsFold(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.ContainsFold(s.C(FieldModelNumber), v)) + }) +} + +// ManufacturerEQ applies the EQ predicate on the "manufacturer" field. +func ManufacturerEQ(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldManufacturer), v)) + }) +} + +// ManufacturerNEQ applies the NEQ predicate on the "manufacturer" field. +func ManufacturerNEQ(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldManufacturer), v)) + }) +} + +// ManufacturerIn applies the In predicate on the "manufacturer" field. +func ManufacturerIn(vs ...string) predicate.Item { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.In(s.C(FieldManufacturer), v...)) + }) +} + +// ManufacturerNotIn applies the NotIn predicate on the "manufacturer" field. +func ManufacturerNotIn(vs ...string) predicate.Item { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.NotIn(s.C(FieldManufacturer), v...)) + }) +} + +// ManufacturerGT applies the GT predicate on the "manufacturer" field. +func ManufacturerGT(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.GT(s.C(FieldManufacturer), v)) + }) +} + +// ManufacturerGTE applies the GTE predicate on the "manufacturer" field. +func ManufacturerGTE(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.GTE(s.C(FieldManufacturer), v)) + }) +} + +// ManufacturerLT applies the LT predicate on the "manufacturer" field. +func ManufacturerLT(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.LT(s.C(FieldManufacturer), v)) + }) +} + +// ManufacturerLTE applies the LTE predicate on the "manufacturer" field. +func ManufacturerLTE(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.LTE(s.C(FieldManufacturer), v)) + }) +} + +// ManufacturerContains applies the Contains predicate on the "manufacturer" field. +func ManufacturerContains(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.Contains(s.C(FieldManufacturer), v)) + }) +} + +// ManufacturerHasPrefix applies the HasPrefix predicate on the "manufacturer" field. +func ManufacturerHasPrefix(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.HasPrefix(s.C(FieldManufacturer), v)) + }) +} + +// ManufacturerHasSuffix applies the HasSuffix predicate on the "manufacturer" field. +func ManufacturerHasSuffix(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.HasSuffix(s.C(FieldManufacturer), v)) + }) +} + +// ManufacturerIsNil applies the IsNil predicate on the "manufacturer" field. +func ManufacturerIsNil() predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.IsNull(s.C(FieldManufacturer))) + }) +} + +// ManufacturerNotNil applies the NotNil predicate on the "manufacturer" field. +func ManufacturerNotNil() predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.NotNull(s.C(FieldManufacturer))) + }) +} + +// ManufacturerEqualFold applies the EqualFold predicate on the "manufacturer" field. +func ManufacturerEqualFold(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.EqualFold(s.C(FieldManufacturer), v)) + }) +} + +// ManufacturerContainsFold applies the ContainsFold predicate on the "manufacturer" field. +func ManufacturerContainsFold(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.ContainsFold(s.C(FieldManufacturer), v)) + }) +} + +// PurchaseTimeEQ applies the EQ predicate on the "purchase_time" field. +func PurchaseTimeEQ(v time.Time) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldPurchaseTime), v)) + }) +} + +// PurchaseTimeNEQ applies the NEQ predicate on the "purchase_time" field. +func PurchaseTimeNEQ(v time.Time) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldPurchaseTime), v)) + }) +} + +// PurchaseTimeIn applies the In predicate on the "purchase_time" field. +func PurchaseTimeIn(vs ...time.Time) predicate.Item { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.In(s.C(FieldPurchaseTime), v...)) + }) +} + +// PurchaseTimeNotIn applies the NotIn predicate on the "purchase_time" field. +func PurchaseTimeNotIn(vs ...time.Time) predicate.Item { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.NotIn(s.C(FieldPurchaseTime), v...)) + }) +} + +// PurchaseTimeGT applies the GT predicate on the "purchase_time" field. +func PurchaseTimeGT(v time.Time) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.GT(s.C(FieldPurchaseTime), v)) + }) +} + +// PurchaseTimeGTE applies the GTE predicate on the "purchase_time" field. +func PurchaseTimeGTE(v time.Time) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.GTE(s.C(FieldPurchaseTime), v)) + }) +} + +// PurchaseTimeLT applies the LT predicate on the "purchase_time" field. +func PurchaseTimeLT(v time.Time) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.LT(s.C(FieldPurchaseTime), v)) + }) +} + +// PurchaseTimeLTE applies the LTE predicate on the "purchase_time" field. +func PurchaseTimeLTE(v time.Time) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.LTE(s.C(FieldPurchaseTime), v)) + }) +} + +// PurchaseTimeIsNil applies the IsNil predicate on the "purchase_time" field. +func PurchaseTimeIsNil() predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.IsNull(s.C(FieldPurchaseTime))) + }) +} + +// PurchaseTimeNotNil applies the NotNil predicate on the "purchase_time" field. +func PurchaseTimeNotNil() predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.NotNull(s.C(FieldPurchaseTime))) + }) +} + +// PurchaseFromEQ applies the EQ predicate on the "purchase_from" field. +func PurchaseFromEQ(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldPurchaseFrom), v)) + }) +} + +// PurchaseFromNEQ applies the NEQ predicate on the "purchase_from" field. +func PurchaseFromNEQ(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldPurchaseFrom), v)) + }) +} + +// PurchaseFromIn applies the In predicate on the "purchase_from" field. +func PurchaseFromIn(vs ...string) predicate.Item { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.In(s.C(FieldPurchaseFrom), v...)) + }) +} + +// PurchaseFromNotIn applies the NotIn predicate on the "purchase_from" field. +func PurchaseFromNotIn(vs ...string) predicate.Item { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.NotIn(s.C(FieldPurchaseFrom), v...)) + }) +} + +// PurchaseFromGT applies the GT predicate on the "purchase_from" field. +func PurchaseFromGT(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.GT(s.C(FieldPurchaseFrom), v)) + }) +} + +// PurchaseFromGTE applies the GTE predicate on the "purchase_from" field. +func PurchaseFromGTE(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.GTE(s.C(FieldPurchaseFrom), v)) + }) +} + +// PurchaseFromLT applies the LT predicate on the "purchase_from" field. +func PurchaseFromLT(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.LT(s.C(FieldPurchaseFrom), v)) + }) +} + +// PurchaseFromLTE applies the LTE predicate on the "purchase_from" field. +func PurchaseFromLTE(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.LTE(s.C(FieldPurchaseFrom), v)) + }) +} + +// PurchaseFromContains applies the Contains predicate on the "purchase_from" field. +func PurchaseFromContains(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.Contains(s.C(FieldPurchaseFrom), v)) + }) +} + +// PurchaseFromHasPrefix applies the HasPrefix predicate on the "purchase_from" field. +func PurchaseFromHasPrefix(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.HasPrefix(s.C(FieldPurchaseFrom), v)) + }) +} + +// PurchaseFromHasSuffix applies the HasSuffix predicate on the "purchase_from" field. +func PurchaseFromHasSuffix(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.HasSuffix(s.C(FieldPurchaseFrom), v)) + }) +} + +// PurchaseFromIsNil applies the IsNil predicate on the "purchase_from" field. +func PurchaseFromIsNil() predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.IsNull(s.C(FieldPurchaseFrom))) + }) +} + +// PurchaseFromNotNil applies the NotNil predicate on the "purchase_from" field. +func PurchaseFromNotNil() predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.NotNull(s.C(FieldPurchaseFrom))) + }) +} + +// PurchaseFromEqualFold applies the EqualFold predicate on the "purchase_from" field. +func PurchaseFromEqualFold(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.EqualFold(s.C(FieldPurchaseFrom), v)) + }) +} + +// PurchaseFromContainsFold applies the ContainsFold predicate on the "purchase_from" field. +func PurchaseFromContainsFold(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.ContainsFold(s.C(FieldPurchaseFrom), v)) + }) +} + +// PurchasePriceEQ applies the EQ predicate on the "purchase_price" field. +func PurchasePriceEQ(v float64) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldPurchasePrice), v)) + }) +} + +// PurchasePriceNEQ applies the NEQ predicate on the "purchase_price" field. +func PurchasePriceNEQ(v float64) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldPurchasePrice), v)) + }) +} + +// PurchasePriceIn applies the In predicate on the "purchase_price" field. +func PurchasePriceIn(vs ...float64) predicate.Item { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.In(s.C(FieldPurchasePrice), v...)) + }) +} + +// PurchasePriceNotIn applies the NotIn predicate on the "purchase_price" field. +func PurchasePriceNotIn(vs ...float64) predicate.Item { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.NotIn(s.C(FieldPurchasePrice), v...)) + }) +} + +// PurchasePriceGT applies the GT predicate on the "purchase_price" field. +func PurchasePriceGT(v float64) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.GT(s.C(FieldPurchasePrice), v)) + }) +} + +// PurchasePriceGTE applies the GTE predicate on the "purchase_price" field. +func PurchasePriceGTE(v float64) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.GTE(s.C(FieldPurchasePrice), v)) + }) +} + +// PurchasePriceLT applies the LT predicate on the "purchase_price" field. +func PurchasePriceLT(v float64) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.LT(s.C(FieldPurchasePrice), v)) + }) +} + +// PurchasePriceLTE applies the LTE predicate on the "purchase_price" field. +func PurchasePriceLTE(v float64) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.LTE(s.C(FieldPurchasePrice), v)) + }) +} + +// PurchaseReceiptIDEQ applies the EQ predicate on the "purchase_receipt_id" field. +func PurchaseReceiptIDEQ(v uuid.UUID) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldPurchaseReceiptID), v)) + }) +} + +// PurchaseReceiptIDNEQ applies the NEQ predicate on the "purchase_receipt_id" field. +func PurchaseReceiptIDNEQ(v uuid.UUID) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldPurchaseReceiptID), v)) + }) +} + +// PurchaseReceiptIDIn applies the In predicate on the "purchase_receipt_id" field. +func PurchaseReceiptIDIn(vs ...uuid.UUID) predicate.Item { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.In(s.C(FieldPurchaseReceiptID), v...)) + }) +} + +// PurchaseReceiptIDNotIn applies the NotIn predicate on the "purchase_receipt_id" field. +func PurchaseReceiptIDNotIn(vs ...uuid.UUID) predicate.Item { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.NotIn(s.C(FieldPurchaseReceiptID), v...)) + }) +} + +// PurchaseReceiptIDGT applies the GT predicate on the "purchase_receipt_id" field. +func PurchaseReceiptIDGT(v uuid.UUID) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.GT(s.C(FieldPurchaseReceiptID), v)) + }) +} + +// PurchaseReceiptIDGTE applies the GTE predicate on the "purchase_receipt_id" field. +func PurchaseReceiptIDGTE(v uuid.UUID) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.GTE(s.C(FieldPurchaseReceiptID), v)) + }) +} + +// PurchaseReceiptIDLT applies the LT predicate on the "purchase_receipt_id" field. +func PurchaseReceiptIDLT(v uuid.UUID) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.LT(s.C(FieldPurchaseReceiptID), v)) + }) +} + +// PurchaseReceiptIDLTE applies the LTE predicate on the "purchase_receipt_id" field. +func PurchaseReceiptIDLTE(v uuid.UUID) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.LTE(s.C(FieldPurchaseReceiptID), v)) + }) +} + +// PurchaseReceiptIDIsNil applies the IsNil predicate on the "purchase_receipt_id" field. +func PurchaseReceiptIDIsNil() predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.IsNull(s.C(FieldPurchaseReceiptID))) + }) +} + +// PurchaseReceiptIDNotNil applies the NotNil predicate on the "purchase_receipt_id" field. +func PurchaseReceiptIDNotNil() predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.NotNull(s.C(FieldPurchaseReceiptID))) + }) +} + +// SoldTimeEQ applies the EQ predicate on the "sold_time" field. +func SoldTimeEQ(v time.Time) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldSoldTime), v)) + }) +} + +// SoldTimeNEQ applies the NEQ predicate on the "sold_time" field. +func SoldTimeNEQ(v time.Time) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldSoldTime), v)) + }) +} + +// SoldTimeIn applies the In predicate on the "sold_time" field. +func SoldTimeIn(vs ...time.Time) predicate.Item { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.In(s.C(FieldSoldTime), v...)) + }) +} + +// SoldTimeNotIn applies the NotIn predicate on the "sold_time" field. +func SoldTimeNotIn(vs ...time.Time) predicate.Item { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.NotIn(s.C(FieldSoldTime), v...)) + }) +} + +// SoldTimeGT applies the GT predicate on the "sold_time" field. +func SoldTimeGT(v time.Time) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.GT(s.C(FieldSoldTime), v)) + }) +} + +// SoldTimeGTE applies the GTE predicate on the "sold_time" field. +func SoldTimeGTE(v time.Time) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.GTE(s.C(FieldSoldTime), v)) + }) +} + +// SoldTimeLT applies the LT predicate on the "sold_time" field. +func SoldTimeLT(v time.Time) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.LT(s.C(FieldSoldTime), v)) + }) +} + +// SoldTimeLTE applies the LTE predicate on the "sold_time" field. +func SoldTimeLTE(v time.Time) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.LTE(s.C(FieldSoldTime), v)) + }) +} + +// SoldTimeIsNil applies the IsNil predicate on the "sold_time" field. +func SoldTimeIsNil() predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.IsNull(s.C(FieldSoldTime))) + }) +} + +// SoldTimeNotNil applies the NotNil predicate on the "sold_time" field. +func SoldTimeNotNil() predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.NotNull(s.C(FieldSoldTime))) + }) +} + +// SoldToEQ applies the EQ predicate on the "sold_to" field. +func SoldToEQ(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldSoldTo), v)) + }) +} + +// SoldToNEQ applies the NEQ predicate on the "sold_to" field. +func SoldToNEQ(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldSoldTo), v)) + }) +} + +// SoldToIn applies the In predicate on the "sold_to" field. +func SoldToIn(vs ...string) predicate.Item { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.In(s.C(FieldSoldTo), v...)) + }) +} + +// SoldToNotIn applies the NotIn predicate on the "sold_to" field. +func SoldToNotIn(vs ...string) predicate.Item { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.NotIn(s.C(FieldSoldTo), v...)) + }) +} + +// SoldToGT applies the GT predicate on the "sold_to" field. +func SoldToGT(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.GT(s.C(FieldSoldTo), v)) + }) +} + +// SoldToGTE applies the GTE predicate on the "sold_to" field. +func SoldToGTE(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.GTE(s.C(FieldSoldTo), v)) + }) +} + +// SoldToLT applies the LT predicate on the "sold_to" field. +func SoldToLT(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.LT(s.C(FieldSoldTo), v)) + }) +} + +// SoldToLTE applies the LTE predicate on the "sold_to" field. +func SoldToLTE(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.LTE(s.C(FieldSoldTo), v)) + }) +} + +// SoldToContains applies the Contains predicate on the "sold_to" field. +func SoldToContains(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.Contains(s.C(FieldSoldTo), v)) + }) +} + +// SoldToHasPrefix applies the HasPrefix predicate on the "sold_to" field. +func SoldToHasPrefix(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.HasPrefix(s.C(FieldSoldTo), v)) + }) +} + +// SoldToHasSuffix applies the HasSuffix predicate on the "sold_to" field. +func SoldToHasSuffix(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.HasSuffix(s.C(FieldSoldTo), v)) + }) +} + +// SoldToIsNil applies the IsNil predicate on the "sold_to" field. +func SoldToIsNil() predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.IsNull(s.C(FieldSoldTo))) + }) +} + +// SoldToNotNil applies the NotNil predicate on the "sold_to" field. +func SoldToNotNil() predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.NotNull(s.C(FieldSoldTo))) + }) +} + +// SoldToEqualFold applies the EqualFold predicate on the "sold_to" field. +func SoldToEqualFold(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.EqualFold(s.C(FieldSoldTo), v)) + }) +} + +// SoldToContainsFold applies the ContainsFold predicate on the "sold_to" field. +func SoldToContainsFold(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.ContainsFold(s.C(FieldSoldTo), v)) + }) +} + +// SoldPriceEQ applies the EQ predicate on the "sold_price" field. +func SoldPriceEQ(v float64) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldSoldPrice), v)) + }) +} + +// SoldPriceNEQ applies the NEQ predicate on the "sold_price" field. +func SoldPriceNEQ(v float64) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldSoldPrice), v)) + }) +} + +// SoldPriceIn applies the In predicate on the "sold_price" field. +func SoldPriceIn(vs ...float64) predicate.Item { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.In(s.C(FieldSoldPrice), v...)) + }) +} + +// SoldPriceNotIn applies the NotIn predicate on the "sold_price" field. +func SoldPriceNotIn(vs ...float64) predicate.Item { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.NotIn(s.C(FieldSoldPrice), v...)) + }) +} + +// SoldPriceGT applies the GT predicate on the "sold_price" field. +func SoldPriceGT(v float64) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.GT(s.C(FieldSoldPrice), v)) + }) +} + +// SoldPriceGTE applies the GTE predicate on the "sold_price" field. +func SoldPriceGTE(v float64) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.GTE(s.C(FieldSoldPrice), v)) + }) +} + +// SoldPriceLT applies the LT predicate on the "sold_price" field. +func SoldPriceLT(v float64) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.LT(s.C(FieldSoldPrice), v)) + }) +} + +// SoldPriceLTE applies the LTE predicate on the "sold_price" field. +func SoldPriceLTE(v float64) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.LTE(s.C(FieldSoldPrice), v)) + }) +} + +// SoldReceiptIDEQ applies the EQ predicate on the "sold_receipt_id" field. +func SoldReceiptIDEQ(v uuid.UUID) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldSoldReceiptID), v)) + }) +} + +// SoldReceiptIDNEQ applies the NEQ predicate on the "sold_receipt_id" field. +func SoldReceiptIDNEQ(v uuid.UUID) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldSoldReceiptID), v)) + }) +} + +// SoldReceiptIDIn applies the In predicate on the "sold_receipt_id" field. +func SoldReceiptIDIn(vs ...uuid.UUID) predicate.Item { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.In(s.C(FieldSoldReceiptID), v...)) + }) +} + +// SoldReceiptIDNotIn applies the NotIn predicate on the "sold_receipt_id" field. +func SoldReceiptIDNotIn(vs ...uuid.UUID) predicate.Item { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.NotIn(s.C(FieldSoldReceiptID), v...)) + }) +} + +// SoldReceiptIDGT applies the GT predicate on the "sold_receipt_id" field. +func SoldReceiptIDGT(v uuid.UUID) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.GT(s.C(FieldSoldReceiptID), v)) + }) +} + +// SoldReceiptIDGTE applies the GTE predicate on the "sold_receipt_id" field. +func SoldReceiptIDGTE(v uuid.UUID) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.GTE(s.C(FieldSoldReceiptID), v)) + }) +} + +// SoldReceiptIDLT applies the LT predicate on the "sold_receipt_id" field. +func SoldReceiptIDLT(v uuid.UUID) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.LT(s.C(FieldSoldReceiptID), v)) + }) +} + +// SoldReceiptIDLTE applies the LTE predicate on the "sold_receipt_id" field. +func SoldReceiptIDLTE(v uuid.UUID) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.LTE(s.C(FieldSoldReceiptID), v)) + }) +} + +// SoldReceiptIDIsNil applies the IsNil predicate on the "sold_receipt_id" field. +func SoldReceiptIDIsNil() predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.IsNull(s.C(FieldSoldReceiptID))) + }) +} + +// SoldReceiptIDNotNil applies the NotNil predicate on the "sold_receipt_id" field. +func SoldReceiptIDNotNil() predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.NotNull(s.C(FieldSoldReceiptID))) + }) +} + +// SoldNotesEQ applies the EQ predicate on the "sold_notes" field. +func SoldNotesEQ(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldSoldNotes), v)) + }) +} + +// SoldNotesNEQ applies the NEQ predicate on the "sold_notes" field. +func SoldNotesNEQ(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldSoldNotes), v)) + }) +} + +// SoldNotesIn applies the In predicate on the "sold_notes" field. +func SoldNotesIn(vs ...string) predicate.Item { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.In(s.C(FieldSoldNotes), v...)) + }) +} + +// SoldNotesNotIn applies the NotIn predicate on the "sold_notes" field. +func SoldNotesNotIn(vs ...string) predicate.Item { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.NotIn(s.C(FieldSoldNotes), v...)) + }) +} + +// SoldNotesGT applies the GT predicate on the "sold_notes" field. +func SoldNotesGT(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.GT(s.C(FieldSoldNotes), v)) + }) +} + +// SoldNotesGTE applies the GTE predicate on the "sold_notes" field. +func SoldNotesGTE(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.GTE(s.C(FieldSoldNotes), v)) + }) +} + +// SoldNotesLT applies the LT predicate on the "sold_notes" field. +func SoldNotesLT(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.LT(s.C(FieldSoldNotes), v)) + }) +} + +// SoldNotesLTE applies the LTE predicate on the "sold_notes" field. +func SoldNotesLTE(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.LTE(s.C(FieldSoldNotes), v)) + }) +} + +// SoldNotesContains applies the Contains predicate on the "sold_notes" field. +func SoldNotesContains(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.Contains(s.C(FieldSoldNotes), v)) + }) +} + +// SoldNotesHasPrefix applies the HasPrefix predicate on the "sold_notes" field. +func SoldNotesHasPrefix(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.HasPrefix(s.C(FieldSoldNotes), v)) + }) +} + +// SoldNotesHasSuffix applies the HasSuffix predicate on the "sold_notes" field. +func SoldNotesHasSuffix(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.HasSuffix(s.C(FieldSoldNotes), v)) + }) +} + +// SoldNotesIsNil applies the IsNil predicate on the "sold_notes" field. +func SoldNotesIsNil() predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.IsNull(s.C(FieldSoldNotes))) + }) +} + +// SoldNotesNotNil applies the NotNil predicate on the "sold_notes" field. +func SoldNotesNotNil() predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.NotNull(s.C(FieldSoldNotes))) + }) +} + +// SoldNotesEqualFold applies the EqualFold predicate on the "sold_notes" field. +func SoldNotesEqualFold(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.EqualFold(s.C(FieldSoldNotes), v)) + }) +} + +// SoldNotesContainsFold applies the ContainsFold predicate on the "sold_notes" field. +func SoldNotesContainsFold(v string) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s.Where(sql.ContainsFold(s.C(FieldSoldNotes), v)) + }) +} + +// HasGroup applies the HasEdge predicate on the "group" edge. +func HasGroup() predicate.Item { + return predicate.Item(func(s *sql.Selector) { + step := sqlgraph.NewStep( + sqlgraph.From(Table, FieldID), + sqlgraph.To(GroupTable, FieldID), + sqlgraph.Edge(sqlgraph.M2O, true, GroupTable, GroupColumn), + ) + sqlgraph.HasNeighbors(s, step) + }) +} + +// HasGroupWith applies the HasEdge predicate on the "group" edge with a given conditions (other predicates). +func HasGroupWith(preds ...predicate.Group) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + step := sqlgraph.NewStep( + sqlgraph.From(Table, FieldID), + sqlgraph.To(GroupInverseTable, FieldID), + sqlgraph.Edge(sqlgraph.M2O, true, GroupTable, GroupColumn), + ) + sqlgraph.HasNeighborsWith(s, step, func(s *sql.Selector) { + for _, p := range preds { + p(s) + } + }) + }) +} + +// HasLocation applies the HasEdge predicate on the "location" edge. +func HasLocation() predicate.Item { + return predicate.Item(func(s *sql.Selector) { + step := sqlgraph.NewStep( + sqlgraph.From(Table, FieldID), + sqlgraph.To(LocationTable, FieldID), + sqlgraph.Edge(sqlgraph.M2O, true, LocationTable, LocationColumn), + ) + sqlgraph.HasNeighbors(s, step) + }) +} + +// HasLocationWith applies the HasEdge predicate on the "location" edge with a given conditions (other predicates). +func HasLocationWith(preds ...predicate.Location) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + step := sqlgraph.NewStep( + sqlgraph.From(Table, FieldID), + sqlgraph.To(LocationInverseTable, FieldID), + sqlgraph.Edge(sqlgraph.M2O, true, LocationTable, LocationColumn), + ) + sqlgraph.HasNeighborsWith(s, step, func(s *sql.Selector) { + for _, p := range preds { + p(s) + } + }) + }) +} + +// HasFields applies the HasEdge predicate on the "fields" edge. +func HasFields() predicate.Item { + return predicate.Item(func(s *sql.Selector) { + step := sqlgraph.NewStep( + sqlgraph.From(Table, FieldID), + sqlgraph.To(FieldsTable, FieldID), + sqlgraph.Edge(sqlgraph.O2M, false, FieldsTable, FieldsColumn), + ) + sqlgraph.HasNeighbors(s, step) + }) +} + +// HasFieldsWith applies the HasEdge predicate on the "fields" edge with a given conditions (other predicates). +func HasFieldsWith(preds ...predicate.ItemField) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + step := sqlgraph.NewStep( + sqlgraph.From(Table, FieldID), + sqlgraph.To(FieldsInverseTable, FieldID), + sqlgraph.Edge(sqlgraph.O2M, false, FieldsTable, FieldsColumn), + ) + sqlgraph.HasNeighborsWith(s, step, func(s *sql.Selector) { + for _, p := range preds { + p(s) + } + }) + }) +} + +// HasLabel applies the HasEdge predicate on the "label" edge. +func HasLabel() predicate.Item { + return predicate.Item(func(s *sql.Selector) { + step := sqlgraph.NewStep( + sqlgraph.From(Table, FieldID), + sqlgraph.To(LabelTable, FieldID), + sqlgraph.Edge(sqlgraph.M2M, true, LabelTable, LabelPrimaryKey...), + ) + sqlgraph.HasNeighbors(s, step) + }) +} + +// HasLabelWith applies the HasEdge predicate on the "label" edge with a given conditions (other predicates). +func HasLabelWith(preds ...predicate.Label) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + step := sqlgraph.NewStep( + sqlgraph.From(Table, FieldID), + sqlgraph.To(LabelInverseTable, FieldID), + sqlgraph.Edge(sqlgraph.M2M, true, LabelTable, LabelPrimaryKey...), + ) + sqlgraph.HasNeighborsWith(s, step, func(s *sql.Selector) { + for _, p := range preds { + p(s) + } + }) + }) +} + +// And groups predicates with the AND operator between them. +func And(predicates ...predicate.Item) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s1 := s.Clone().SetP(nil) + for _, p := range predicates { + p(s1) + } + s.Where(s1.P()) + }) +} + +// Or groups predicates with the OR operator between them. +func Or(predicates ...predicate.Item) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + s1 := s.Clone().SetP(nil) + for i, p := range predicates { + if i > 0 { + s1.Or() + } + p(s1) + } + s.Where(s1.P()) + }) +} + +// Not applies the not operator on the given predicate. +func Not(p predicate.Item) predicate.Item { + return predicate.Item(func(s *sql.Selector) { + p(s.Not()) + }) +} diff --git a/backend/ent/item_create.go b/backend/ent/item_create.go new file mode 100644 index 0000000..5242365 --- /dev/null +++ b/backend/ent/item_create.go @@ -0,0 +1,817 @@ +// Code generated by ent, DO NOT EDIT. + +package ent + +import ( + "context" + "errors" + "fmt" + "time" + + "entgo.io/ent/dialect/sql/sqlgraph" + "entgo.io/ent/schema/field" + "github.com/google/uuid" + "github.com/hay-kot/content/backend/ent/group" + "github.com/hay-kot/content/backend/ent/item" + "github.com/hay-kot/content/backend/ent/itemfield" + "github.com/hay-kot/content/backend/ent/label" + "github.com/hay-kot/content/backend/ent/location" +) + +// ItemCreate is the builder for creating a Item entity. +type ItemCreate struct { + config + mutation *ItemMutation + hooks []Hook +} + +// SetCreatedAt sets the "created_at" field. +func (ic *ItemCreate) SetCreatedAt(t time.Time) *ItemCreate { + ic.mutation.SetCreatedAt(t) + return ic +} + +// SetNillableCreatedAt sets the "created_at" field if the given value is not nil. +func (ic *ItemCreate) SetNillableCreatedAt(t *time.Time) *ItemCreate { + if t != nil { + ic.SetCreatedAt(*t) + } + return ic +} + +// SetUpdatedAt sets the "updated_at" field. +func (ic *ItemCreate) SetUpdatedAt(t time.Time) *ItemCreate { + ic.mutation.SetUpdatedAt(t) + return ic +} + +// SetNillableUpdatedAt sets the "updated_at" field if the given value is not nil. +func (ic *ItemCreate) SetNillableUpdatedAt(t *time.Time) *ItemCreate { + if t != nil { + ic.SetUpdatedAt(*t) + } + return ic +} + +// SetName sets the "name" field. +func (ic *ItemCreate) SetName(s string) *ItemCreate { + ic.mutation.SetName(s) + return ic +} + +// SetDescription sets the "description" field. +func (ic *ItemCreate) SetDescription(s string) *ItemCreate { + ic.mutation.SetDescription(s) + return ic +} + +// SetNillableDescription sets the "description" field if the given value is not nil. +func (ic *ItemCreate) SetNillableDescription(s *string) *ItemCreate { + if s != nil { + ic.SetDescription(*s) + } + return ic +} + +// SetNotes sets the "notes" field. +func (ic *ItemCreate) SetNotes(s string) *ItemCreate { + ic.mutation.SetNotes(s) + return ic +} + +// SetNillableNotes sets the "notes" field if the given value is not nil. +func (ic *ItemCreate) SetNillableNotes(s *string) *ItemCreate { + if s != nil { + ic.SetNotes(*s) + } + return ic +} + +// SetSerialNumber sets the "serial_number" field. +func (ic *ItemCreate) SetSerialNumber(s string) *ItemCreate { + ic.mutation.SetSerialNumber(s) + return ic +} + +// SetNillableSerialNumber sets the "serial_number" field if the given value is not nil. +func (ic *ItemCreate) SetNillableSerialNumber(s *string) *ItemCreate { + if s != nil { + ic.SetSerialNumber(*s) + } + return ic +} + +// SetModelNumber sets the "model_number" field. +func (ic *ItemCreate) SetModelNumber(s string) *ItemCreate { + ic.mutation.SetModelNumber(s) + return ic +} + +// SetNillableModelNumber sets the "model_number" field if the given value is not nil. +func (ic *ItemCreate) SetNillableModelNumber(s *string) *ItemCreate { + if s != nil { + ic.SetModelNumber(*s) + } + return ic +} + +// SetManufacturer sets the "manufacturer" field. +func (ic *ItemCreate) SetManufacturer(s string) *ItemCreate { + ic.mutation.SetManufacturer(s) + return ic +} + +// SetNillableManufacturer sets the "manufacturer" field if the given value is not nil. +func (ic *ItemCreate) SetNillableManufacturer(s *string) *ItemCreate { + if s != nil { + ic.SetManufacturer(*s) + } + return ic +} + +// SetPurchaseTime sets the "purchase_time" field. +func (ic *ItemCreate) SetPurchaseTime(t time.Time) *ItemCreate { + ic.mutation.SetPurchaseTime(t) + return ic +} + +// SetNillablePurchaseTime sets the "purchase_time" field if the given value is not nil. +func (ic *ItemCreate) SetNillablePurchaseTime(t *time.Time) *ItemCreate { + if t != nil { + ic.SetPurchaseTime(*t) + } + return ic +} + +// SetPurchaseFrom sets the "purchase_from" field. +func (ic *ItemCreate) SetPurchaseFrom(s string) *ItemCreate { + ic.mutation.SetPurchaseFrom(s) + return ic +} + +// SetNillablePurchaseFrom sets the "purchase_from" field if the given value is not nil. +func (ic *ItemCreate) SetNillablePurchaseFrom(s *string) *ItemCreate { + if s != nil { + ic.SetPurchaseFrom(*s) + } + return ic +} + +// SetPurchasePrice sets the "purchase_price" field. +func (ic *ItemCreate) SetPurchasePrice(f float64) *ItemCreate { + ic.mutation.SetPurchasePrice(f) + return ic +} + +// SetNillablePurchasePrice sets the "purchase_price" field if the given value is not nil. +func (ic *ItemCreate) SetNillablePurchasePrice(f *float64) *ItemCreate { + if f != nil { + ic.SetPurchasePrice(*f) + } + return ic +} + +// SetPurchaseReceiptID sets the "purchase_receipt_id" field. +func (ic *ItemCreate) SetPurchaseReceiptID(u uuid.UUID) *ItemCreate { + ic.mutation.SetPurchaseReceiptID(u) + return ic +} + +// SetNillablePurchaseReceiptID sets the "purchase_receipt_id" field if the given value is not nil. +func (ic *ItemCreate) SetNillablePurchaseReceiptID(u *uuid.UUID) *ItemCreate { + if u != nil { + ic.SetPurchaseReceiptID(*u) + } + return ic +} + +// SetSoldTime sets the "sold_time" field. +func (ic *ItemCreate) SetSoldTime(t time.Time) *ItemCreate { + ic.mutation.SetSoldTime(t) + return ic +} + +// SetNillableSoldTime sets the "sold_time" field if the given value is not nil. +func (ic *ItemCreate) SetNillableSoldTime(t *time.Time) *ItemCreate { + if t != nil { + ic.SetSoldTime(*t) + } + return ic +} + +// SetSoldTo sets the "sold_to" field. +func (ic *ItemCreate) SetSoldTo(s string) *ItemCreate { + ic.mutation.SetSoldTo(s) + return ic +} + +// SetNillableSoldTo sets the "sold_to" field if the given value is not nil. +func (ic *ItemCreate) SetNillableSoldTo(s *string) *ItemCreate { + if s != nil { + ic.SetSoldTo(*s) + } + return ic +} + +// SetSoldPrice sets the "sold_price" field. +func (ic *ItemCreate) SetSoldPrice(f float64) *ItemCreate { + ic.mutation.SetSoldPrice(f) + return ic +} + +// SetNillableSoldPrice sets the "sold_price" field if the given value is not nil. +func (ic *ItemCreate) SetNillableSoldPrice(f *float64) *ItemCreate { + if f != nil { + ic.SetSoldPrice(*f) + } + return ic +} + +// SetSoldReceiptID sets the "sold_receipt_id" field. +func (ic *ItemCreate) SetSoldReceiptID(u uuid.UUID) *ItemCreate { + ic.mutation.SetSoldReceiptID(u) + return ic +} + +// SetNillableSoldReceiptID sets the "sold_receipt_id" field if the given value is not nil. +func (ic *ItemCreate) SetNillableSoldReceiptID(u *uuid.UUID) *ItemCreate { + if u != nil { + ic.SetSoldReceiptID(*u) + } + return ic +} + +// SetSoldNotes sets the "sold_notes" field. +func (ic *ItemCreate) SetSoldNotes(s string) *ItemCreate { + ic.mutation.SetSoldNotes(s) + return ic +} + +// SetNillableSoldNotes sets the "sold_notes" field if the given value is not nil. +func (ic *ItemCreate) SetNillableSoldNotes(s *string) *ItemCreate { + if s != nil { + ic.SetSoldNotes(*s) + } + return ic +} + +// SetID sets the "id" field. +func (ic *ItemCreate) SetID(u uuid.UUID) *ItemCreate { + ic.mutation.SetID(u) + return ic +} + +// SetNillableID sets the "id" field if the given value is not nil. +func (ic *ItemCreate) SetNillableID(u *uuid.UUID) *ItemCreate { + if u != nil { + ic.SetID(*u) + } + return ic +} + +// SetGroupID sets the "group" edge to the Group entity by ID. +func (ic *ItemCreate) SetGroupID(id uuid.UUID) *ItemCreate { + ic.mutation.SetGroupID(id) + return ic +} + +// SetGroup sets the "group" edge to the Group entity. +func (ic *ItemCreate) SetGroup(g *Group) *ItemCreate { + return ic.SetGroupID(g.ID) +} + +// SetLocationID sets the "location" edge to the Location entity by ID. +func (ic *ItemCreate) SetLocationID(id uuid.UUID) *ItemCreate { + ic.mutation.SetLocationID(id) + return ic +} + +// SetNillableLocationID sets the "location" edge to the Location entity by ID if the given value is not nil. +func (ic *ItemCreate) SetNillableLocationID(id *uuid.UUID) *ItemCreate { + if id != nil { + ic = ic.SetLocationID(*id) + } + return ic +} + +// SetLocation sets the "location" edge to the Location entity. +func (ic *ItemCreate) SetLocation(l *Location) *ItemCreate { + return ic.SetLocationID(l.ID) +} + +// AddFieldIDs adds the "fields" edge to the ItemField entity by IDs. +func (ic *ItemCreate) AddFieldIDs(ids ...uuid.UUID) *ItemCreate { + ic.mutation.AddFieldIDs(ids...) + return ic +} + +// AddFields adds the "fields" edges to the ItemField entity. +func (ic *ItemCreate) AddFields(i ...*ItemField) *ItemCreate { + ids := make([]uuid.UUID, len(i)) + for j := range i { + ids[j] = i[j].ID + } + return ic.AddFieldIDs(ids...) +} + +// AddLabelIDs adds the "label" edge to the Label entity by IDs. +func (ic *ItemCreate) AddLabelIDs(ids ...uuid.UUID) *ItemCreate { + ic.mutation.AddLabelIDs(ids...) + return ic +} + +// AddLabel adds the "label" edges to the Label entity. +func (ic *ItemCreate) AddLabel(l ...*Label) *ItemCreate { + ids := make([]uuid.UUID, len(l)) + for i := range l { + ids[i] = l[i].ID + } + return ic.AddLabelIDs(ids...) +} + +// Mutation returns the ItemMutation object of the builder. +func (ic *ItemCreate) Mutation() *ItemMutation { + return ic.mutation +} + +// Save creates the Item in the database. +func (ic *ItemCreate) Save(ctx context.Context) (*Item, error) { + var ( + err error + node *Item + ) + ic.defaults() + if len(ic.hooks) == 0 { + if err = ic.check(); err != nil { + return nil, err + } + node, err = ic.sqlSave(ctx) + } else { + var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) { + mutation, ok := m.(*ItemMutation) + if !ok { + return nil, fmt.Errorf("unexpected mutation type %T", m) + } + if err = ic.check(); err != nil { + return nil, err + } + ic.mutation = mutation + if node, err = ic.sqlSave(ctx); err != nil { + return nil, err + } + mutation.id = &node.ID + mutation.done = true + return node, err + }) + for i := len(ic.hooks) - 1; i >= 0; i-- { + if ic.hooks[i] == nil { + return nil, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)") + } + mut = ic.hooks[i](mut) + } + v, err := mut.Mutate(ctx, ic.mutation) + if err != nil { + return nil, err + } + nv, ok := v.(*Item) + if !ok { + return nil, fmt.Errorf("unexpected node type %T returned from ItemMutation", v) + } + node = nv + } + return node, err +} + +// SaveX calls Save and panics if Save returns an error. +func (ic *ItemCreate) SaveX(ctx context.Context) *Item { + v, err := ic.Save(ctx) + if err != nil { + panic(err) + } + return v +} + +// Exec executes the query. +func (ic *ItemCreate) Exec(ctx context.Context) error { + _, err := ic.Save(ctx) + return err +} + +// ExecX is like Exec, but panics if an error occurs. +func (ic *ItemCreate) ExecX(ctx context.Context) { + if err := ic.Exec(ctx); err != nil { + panic(err) + } +} + +// defaults sets the default values of the builder before save. +func (ic *ItemCreate) defaults() { + if _, ok := ic.mutation.CreatedAt(); !ok { + v := item.DefaultCreatedAt() + ic.mutation.SetCreatedAt(v) + } + if _, ok := ic.mutation.UpdatedAt(); !ok { + v := item.DefaultUpdatedAt() + ic.mutation.SetUpdatedAt(v) + } + if _, ok := ic.mutation.PurchasePrice(); !ok { + v := item.DefaultPurchasePrice + ic.mutation.SetPurchasePrice(v) + } + if _, ok := ic.mutation.SoldPrice(); !ok { + v := item.DefaultSoldPrice + ic.mutation.SetSoldPrice(v) + } + if _, ok := ic.mutation.ID(); !ok { + v := item.DefaultID() + ic.mutation.SetID(v) + } +} + +// check runs all checks and user-defined validators on the builder. +func (ic *ItemCreate) check() error { + if _, ok := ic.mutation.CreatedAt(); !ok { + return &ValidationError{Name: "created_at", err: errors.New(`ent: missing required field "Item.created_at"`)} + } + if _, ok := ic.mutation.UpdatedAt(); !ok { + return &ValidationError{Name: "updated_at", err: errors.New(`ent: missing required field "Item.updated_at"`)} + } + if _, ok := ic.mutation.Name(); !ok { + return &ValidationError{Name: "name", err: errors.New(`ent: missing required field "Item.name"`)} + } + if v, ok := ic.mutation.Name(); ok { + if err := item.NameValidator(v); err != nil { + return &ValidationError{Name: "name", err: fmt.Errorf(`ent: validator failed for field "Item.name": %w`, err)} + } + } + if v, ok := ic.mutation.Description(); ok { + if err := item.DescriptionValidator(v); err != nil { + return &ValidationError{Name: "description", err: fmt.Errorf(`ent: validator failed for field "Item.description": %w`, err)} + } + } + if v, ok := ic.mutation.Notes(); ok { + if err := item.NotesValidator(v); err != nil { + return &ValidationError{Name: "notes", err: fmt.Errorf(`ent: validator failed for field "Item.notes": %w`, err)} + } + } + if v, ok := ic.mutation.SerialNumber(); ok { + if err := item.SerialNumberValidator(v); err != nil { + return &ValidationError{Name: "serial_number", err: fmt.Errorf(`ent: validator failed for field "Item.serial_number": %w`, err)} + } + } + if v, ok := ic.mutation.ModelNumber(); ok { + if err := item.ModelNumberValidator(v); err != nil { + return &ValidationError{Name: "model_number", err: fmt.Errorf(`ent: validator failed for field "Item.model_number": %w`, err)} + } + } + if v, ok := ic.mutation.Manufacturer(); ok { + if err := item.ManufacturerValidator(v); err != nil { + return &ValidationError{Name: "manufacturer", err: fmt.Errorf(`ent: validator failed for field "Item.manufacturer": %w`, err)} + } + } + if _, ok := ic.mutation.PurchasePrice(); !ok { + return &ValidationError{Name: "purchase_price", err: errors.New(`ent: missing required field "Item.purchase_price"`)} + } + if _, ok := ic.mutation.SoldPrice(); !ok { + return &ValidationError{Name: "sold_price", err: errors.New(`ent: missing required field "Item.sold_price"`)} + } + if v, ok := ic.mutation.SoldNotes(); ok { + if err := item.SoldNotesValidator(v); err != nil { + return &ValidationError{Name: "sold_notes", err: fmt.Errorf(`ent: validator failed for field "Item.sold_notes": %w`, err)} + } + } + if _, ok := ic.mutation.GroupID(); !ok { + return &ValidationError{Name: "group", err: errors.New(`ent: missing required edge "Item.group"`)} + } + return nil +} + +func (ic *ItemCreate) sqlSave(ctx context.Context) (*Item, error) { + _node, _spec := ic.createSpec() + if err := sqlgraph.CreateNode(ctx, ic.driver, _spec); err != nil { + if sqlgraph.IsConstraintError(err) { + err = &ConstraintError{msg: err.Error(), wrap: err} + } + return nil, err + } + if _spec.ID.Value != nil { + if id, ok := _spec.ID.Value.(*uuid.UUID); ok { + _node.ID = *id + } else if err := _node.ID.Scan(_spec.ID.Value); err != nil { + return nil, err + } + } + return _node, nil +} + +func (ic *ItemCreate) createSpec() (*Item, *sqlgraph.CreateSpec) { + var ( + _node = &Item{config: ic.config} + _spec = &sqlgraph.CreateSpec{ + Table: item.Table, + ID: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: item.FieldID, + }, + } + ) + if id, ok := ic.mutation.ID(); ok { + _node.ID = id + _spec.ID.Value = &id + } + if value, ok := ic.mutation.CreatedAt(); ok { + _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ + Type: field.TypeTime, + Value: value, + Column: item.FieldCreatedAt, + }) + _node.CreatedAt = value + } + if value, ok := ic.mutation.UpdatedAt(); ok { + _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ + Type: field.TypeTime, + Value: value, + Column: item.FieldUpdatedAt, + }) + _node.UpdatedAt = value + } + if value, ok := ic.mutation.Name(); ok { + _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: item.FieldName, + }) + _node.Name = value + } + if value, ok := ic.mutation.Description(); ok { + _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: item.FieldDescription, + }) + _node.Description = value + } + if value, ok := ic.mutation.Notes(); ok { + _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: item.FieldNotes, + }) + _node.Notes = value + } + if value, ok := ic.mutation.SerialNumber(); ok { + _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: item.FieldSerialNumber, + }) + _node.SerialNumber = value + } + if value, ok := ic.mutation.ModelNumber(); ok { + _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: item.FieldModelNumber, + }) + _node.ModelNumber = value + } + if value, ok := ic.mutation.Manufacturer(); ok { + _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: item.FieldManufacturer, + }) + _node.Manufacturer = value + } + if value, ok := ic.mutation.PurchaseTime(); ok { + _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ + Type: field.TypeTime, + Value: value, + Column: item.FieldPurchaseTime, + }) + _node.PurchaseTime = value + } + if value, ok := ic.mutation.PurchaseFrom(); ok { + _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: item.FieldPurchaseFrom, + }) + _node.PurchaseFrom = value + } + if value, ok := ic.mutation.PurchasePrice(); ok { + _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ + Type: field.TypeFloat64, + Value: value, + Column: item.FieldPurchasePrice, + }) + _node.PurchasePrice = value + } + if value, ok := ic.mutation.PurchaseReceiptID(); ok { + _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Value: value, + Column: item.FieldPurchaseReceiptID, + }) + _node.PurchaseReceiptID = value + } + if value, ok := ic.mutation.SoldTime(); ok { + _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ + Type: field.TypeTime, + Value: value, + Column: item.FieldSoldTime, + }) + _node.SoldTime = value + } + if value, ok := ic.mutation.SoldTo(); ok { + _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: item.FieldSoldTo, + }) + _node.SoldTo = value + } + if value, ok := ic.mutation.SoldPrice(); ok { + _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ + Type: field.TypeFloat64, + Value: value, + Column: item.FieldSoldPrice, + }) + _node.SoldPrice = value + } + if value, ok := ic.mutation.SoldReceiptID(); ok { + _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Value: value, + Column: item.FieldSoldReceiptID, + }) + _node.SoldReceiptID = value + } + if value, ok := ic.mutation.SoldNotes(); ok { + _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: item.FieldSoldNotes, + }) + _node.SoldNotes = value + } + if nodes := ic.mutation.GroupIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2O, + Inverse: true, + Table: item.GroupTable, + Columns: []string{item.GroupColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: group.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _node.group_items = &nodes[0] + _spec.Edges = append(_spec.Edges, edge) + } + if nodes := ic.mutation.LocationIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2O, + Inverse: true, + Table: item.LocationTable, + Columns: []string{item.LocationColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: location.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _node.location_items = &nodes[0] + _spec.Edges = append(_spec.Edges, edge) + } + if nodes := ic.mutation.FieldsIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: item.FieldsTable, + Columns: []string{item.FieldsColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: itemfield.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges = append(_spec.Edges, edge) + } + if nodes := ic.mutation.LabelIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2M, + Inverse: true, + Table: item.LabelTable, + Columns: item.LabelPrimaryKey, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: label.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges = append(_spec.Edges, edge) + } + return _node, _spec +} + +// ItemCreateBulk is the builder for creating many Item entities in bulk. +type ItemCreateBulk struct { + config + builders []*ItemCreate +} + +// Save creates the Item entities in the database. +func (icb *ItemCreateBulk) Save(ctx context.Context) ([]*Item, error) { + specs := make([]*sqlgraph.CreateSpec, len(icb.builders)) + nodes := make([]*Item, len(icb.builders)) + mutators := make([]Mutator, len(icb.builders)) + for i := range icb.builders { + func(i int, root context.Context) { + builder := icb.builders[i] + builder.defaults() + var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) { + mutation, ok := m.(*ItemMutation) + if !ok { + return nil, fmt.Errorf("unexpected mutation type %T", m) + } + if err := builder.check(); err != nil { + return nil, err + } + builder.mutation = mutation + nodes[i], specs[i] = builder.createSpec() + var err error + if i < len(mutators)-1 { + _, err = mutators[i+1].Mutate(root, icb.builders[i+1].mutation) + } else { + spec := &sqlgraph.BatchCreateSpec{Nodes: specs} + // Invoke the actual operation on the latest mutation in the chain. + if err = sqlgraph.BatchCreate(ctx, icb.driver, spec); err != nil { + if sqlgraph.IsConstraintError(err) { + err = &ConstraintError{msg: err.Error(), wrap: err} + } + } + } + if err != nil { + return nil, err + } + mutation.id = &nodes[i].ID + mutation.done = true + return nodes[i], nil + }) + for i := len(builder.hooks) - 1; i >= 0; i-- { + mut = builder.hooks[i](mut) + } + mutators[i] = mut + }(i, ctx) + } + if len(mutators) > 0 { + if _, err := mutators[0].Mutate(ctx, icb.builders[0].mutation); err != nil { + return nil, err + } + } + return nodes, nil +} + +// SaveX is like Save, but panics if an error occurs. +func (icb *ItemCreateBulk) SaveX(ctx context.Context) []*Item { + v, err := icb.Save(ctx) + if err != nil { + panic(err) + } + return v +} + +// Exec executes the query. +func (icb *ItemCreateBulk) Exec(ctx context.Context) error { + _, err := icb.Save(ctx) + return err +} + +// ExecX is like Exec, but panics if an error occurs. +func (icb *ItemCreateBulk) ExecX(ctx context.Context) { + if err := icb.Exec(ctx); err != nil { + panic(err) + } +} diff --git a/backend/ent/item_delete.go b/backend/ent/item_delete.go new file mode 100644 index 0000000..9fea485 --- /dev/null +++ b/backend/ent/item_delete.go @@ -0,0 +1,115 @@ +// Code generated by ent, DO NOT EDIT. + +package ent + +import ( + "context" + "fmt" + + "entgo.io/ent/dialect/sql" + "entgo.io/ent/dialect/sql/sqlgraph" + "entgo.io/ent/schema/field" + "github.com/hay-kot/content/backend/ent/item" + "github.com/hay-kot/content/backend/ent/predicate" +) + +// ItemDelete is the builder for deleting a Item entity. +type ItemDelete struct { + config + hooks []Hook + mutation *ItemMutation +} + +// Where appends a list predicates to the ItemDelete builder. +func (id *ItemDelete) Where(ps ...predicate.Item) *ItemDelete { + id.mutation.Where(ps...) + return id +} + +// Exec executes the deletion query and returns how many vertices were deleted. +func (id *ItemDelete) Exec(ctx context.Context) (int, error) { + var ( + err error + affected int + ) + if len(id.hooks) == 0 { + affected, err = id.sqlExec(ctx) + } else { + var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) { + mutation, ok := m.(*ItemMutation) + if !ok { + return nil, fmt.Errorf("unexpected mutation type %T", m) + } + id.mutation = mutation + affected, err = id.sqlExec(ctx) + mutation.done = true + return affected, err + }) + for i := len(id.hooks) - 1; i >= 0; i-- { + if id.hooks[i] == nil { + return 0, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)") + } + mut = id.hooks[i](mut) + } + if _, err := mut.Mutate(ctx, id.mutation); err != nil { + return 0, err + } + } + return affected, err +} + +// ExecX is like Exec, but panics if an error occurs. +func (id *ItemDelete) ExecX(ctx context.Context) int { + n, err := id.Exec(ctx) + if err != nil { + panic(err) + } + return n +} + +func (id *ItemDelete) sqlExec(ctx context.Context) (int, error) { + _spec := &sqlgraph.DeleteSpec{ + Node: &sqlgraph.NodeSpec{ + Table: item.Table, + ID: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: item.FieldID, + }, + }, + } + if ps := id.mutation.predicates; len(ps) > 0 { + _spec.Predicate = func(selector *sql.Selector) { + for i := range ps { + ps[i](selector) + } + } + } + affected, err := sqlgraph.DeleteNodes(ctx, id.driver, _spec) + if err != nil && sqlgraph.IsConstraintError(err) { + err = &ConstraintError{msg: err.Error(), wrap: err} + } + return affected, err +} + +// ItemDeleteOne is the builder for deleting a single Item entity. +type ItemDeleteOne struct { + id *ItemDelete +} + +// Exec executes the deletion query. +func (ido *ItemDeleteOne) Exec(ctx context.Context) error { + n, err := ido.id.Exec(ctx) + switch { + case err != nil: + return err + case n == 0: + return &NotFoundError{item.Label} + default: + return nil + } +} + +// ExecX is like Exec, but panics if an error occurs. +func (ido *ItemDeleteOne) ExecX(ctx context.Context) { + ido.id.ExecX(ctx) +} diff --git a/backend/ent/item_query.go b/backend/ent/item_query.go new file mode 100644 index 0000000..b164239 --- /dev/null +++ b/backend/ent/item_query.go @@ -0,0 +1,861 @@ +// Code generated by ent, DO NOT EDIT. + +package ent + +import ( + "context" + "database/sql/driver" + "fmt" + "math" + + "entgo.io/ent/dialect/sql" + "entgo.io/ent/dialect/sql/sqlgraph" + "entgo.io/ent/schema/field" + "github.com/google/uuid" + "github.com/hay-kot/content/backend/ent/group" + "github.com/hay-kot/content/backend/ent/item" + "github.com/hay-kot/content/backend/ent/itemfield" + "github.com/hay-kot/content/backend/ent/label" + "github.com/hay-kot/content/backend/ent/location" + "github.com/hay-kot/content/backend/ent/predicate" +) + +// ItemQuery is the builder for querying Item entities. +type ItemQuery struct { + config + limit *int + offset *int + unique *bool + order []OrderFunc + fields []string + predicates []predicate.Item + withGroup *GroupQuery + withLocation *LocationQuery + withFields *ItemFieldQuery + withLabel *LabelQuery + withFKs bool + // intermediate query (i.e. traversal path). + sql *sql.Selector + path func(context.Context) (*sql.Selector, error) +} + +// Where adds a new predicate for the ItemQuery builder. +func (iq *ItemQuery) Where(ps ...predicate.Item) *ItemQuery { + iq.predicates = append(iq.predicates, ps...) + return iq +} + +// Limit adds a limit step to the query. +func (iq *ItemQuery) Limit(limit int) *ItemQuery { + iq.limit = &limit + return iq +} + +// Offset adds an offset step to the query. +func (iq *ItemQuery) Offset(offset int) *ItemQuery { + iq.offset = &offset + return iq +} + +// Unique configures the query builder to filter duplicate records on query. +// By default, unique is set to true, and can be disabled using this method. +func (iq *ItemQuery) Unique(unique bool) *ItemQuery { + iq.unique = &unique + return iq +} + +// Order adds an order step to the query. +func (iq *ItemQuery) Order(o ...OrderFunc) *ItemQuery { + iq.order = append(iq.order, o...) + return iq +} + +// QueryGroup chains the current query on the "group" edge. +func (iq *ItemQuery) QueryGroup() *GroupQuery { + query := &GroupQuery{config: iq.config} + query.path = func(ctx context.Context) (fromU *sql.Selector, err error) { + if err := iq.prepareQuery(ctx); err != nil { + return nil, err + } + selector := iq.sqlQuery(ctx) + if err := selector.Err(); err != nil { + return nil, err + } + step := sqlgraph.NewStep( + sqlgraph.From(item.Table, item.FieldID, selector), + sqlgraph.To(group.Table, group.FieldID), + sqlgraph.Edge(sqlgraph.M2O, true, item.GroupTable, item.GroupColumn), + ) + fromU = sqlgraph.SetNeighbors(iq.driver.Dialect(), step) + return fromU, nil + } + return query +} + +// QueryLocation chains the current query on the "location" edge. +func (iq *ItemQuery) QueryLocation() *LocationQuery { + query := &LocationQuery{config: iq.config} + query.path = func(ctx context.Context) (fromU *sql.Selector, err error) { + if err := iq.prepareQuery(ctx); err != nil { + return nil, err + } + selector := iq.sqlQuery(ctx) + if err := selector.Err(); err != nil { + return nil, err + } + step := sqlgraph.NewStep( + sqlgraph.From(item.Table, item.FieldID, selector), + sqlgraph.To(location.Table, location.FieldID), + sqlgraph.Edge(sqlgraph.M2O, true, item.LocationTable, item.LocationColumn), + ) + fromU = sqlgraph.SetNeighbors(iq.driver.Dialect(), step) + return fromU, nil + } + return query +} + +// QueryFields chains the current query on the "fields" edge. +func (iq *ItemQuery) QueryFields() *ItemFieldQuery { + query := &ItemFieldQuery{config: iq.config} + query.path = func(ctx context.Context) (fromU *sql.Selector, err error) { + if err := iq.prepareQuery(ctx); err != nil { + return nil, err + } + selector := iq.sqlQuery(ctx) + if err := selector.Err(); err != nil { + return nil, err + } + step := sqlgraph.NewStep( + sqlgraph.From(item.Table, item.FieldID, selector), + sqlgraph.To(itemfield.Table, itemfield.FieldID), + sqlgraph.Edge(sqlgraph.O2M, false, item.FieldsTable, item.FieldsColumn), + ) + fromU = sqlgraph.SetNeighbors(iq.driver.Dialect(), step) + return fromU, nil + } + return query +} + +// QueryLabel chains the current query on the "label" edge. +func (iq *ItemQuery) QueryLabel() *LabelQuery { + query := &LabelQuery{config: iq.config} + query.path = func(ctx context.Context) (fromU *sql.Selector, err error) { + if err := iq.prepareQuery(ctx); err != nil { + return nil, err + } + selector := iq.sqlQuery(ctx) + if err := selector.Err(); err != nil { + return nil, err + } + step := sqlgraph.NewStep( + sqlgraph.From(item.Table, item.FieldID, selector), + sqlgraph.To(label.Table, label.FieldID), + sqlgraph.Edge(sqlgraph.M2M, true, item.LabelTable, item.LabelPrimaryKey...), + ) + fromU = sqlgraph.SetNeighbors(iq.driver.Dialect(), step) + return fromU, nil + } + return query +} + +// First returns the first Item entity from the query. +// Returns a *NotFoundError when no Item was found. +func (iq *ItemQuery) First(ctx context.Context) (*Item, error) { + nodes, err := iq.Limit(1).All(ctx) + if err != nil { + return nil, err + } + if len(nodes) == 0 { + return nil, &NotFoundError{item.Label} + } + return nodes[0], nil +} + +// FirstX is like First, but panics if an error occurs. +func (iq *ItemQuery) FirstX(ctx context.Context) *Item { + node, err := iq.First(ctx) + if err != nil && !IsNotFound(err) { + panic(err) + } + return node +} + +// FirstID returns the first Item ID from the query. +// Returns a *NotFoundError when no Item ID was found. +func (iq *ItemQuery) FirstID(ctx context.Context) (id uuid.UUID, err error) { + var ids []uuid.UUID + if ids, err = iq.Limit(1).IDs(ctx); err != nil { + return + } + if len(ids) == 0 { + err = &NotFoundError{item.Label} + return + } + return ids[0], nil +} + +// FirstIDX is like FirstID, but panics if an error occurs. +func (iq *ItemQuery) FirstIDX(ctx context.Context) uuid.UUID { + id, err := iq.FirstID(ctx) + if err != nil && !IsNotFound(err) { + panic(err) + } + return id +} + +// Only returns a single Item entity found by the query, ensuring it only returns one. +// Returns a *NotSingularError when more than one Item entity is found. +// Returns a *NotFoundError when no Item entities are found. +func (iq *ItemQuery) Only(ctx context.Context) (*Item, error) { + nodes, err := iq.Limit(2).All(ctx) + if err != nil { + return nil, err + } + switch len(nodes) { + case 1: + return nodes[0], nil + case 0: + return nil, &NotFoundError{item.Label} + default: + return nil, &NotSingularError{item.Label} + } +} + +// OnlyX is like Only, but panics if an error occurs. +func (iq *ItemQuery) OnlyX(ctx context.Context) *Item { + node, err := iq.Only(ctx) + if err != nil { + panic(err) + } + return node +} + +// OnlyID is like Only, but returns the only Item ID in the query. +// Returns a *NotSingularError when more than one Item ID is found. +// Returns a *NotFoundError when no entities are found. +func (iq *ItemQuery) OnlyID(ctx context.Context) (id uuid.UUID, err error) { + var ids []uuid.UUID + if ids, err = iq.Limit(2).IDs(ctx); err != nil { + return + } + switch len(ids) { + case 1: + id = ids[0] + case 0: + err = &NotFoundError{item.Label} + default: + err = &NotSingularError{item.Label} + } + return +} + +// OnlyIDX is like OnlyID, but panics if an error occurs. +func (iq *ItemQuery) OnlyIDX(ctx context.Context) uuid.UUID { + id, err := iq.OnlyID(ctx) + if err != nil { + panic(err) + } + return id +} + +// All executes the query and returns a list of Items. +func (iq *ItemQuery) All(ctx context.Context) ([]*Item, error) { + if err := iq.prepareQuery(ctx); err != nil { + return nil, err + } + return iq.sqlAll(ctx) +} + +// AllX is like All, but panics if an error occurs. +func (iq *ItemQuery) AllX(ctx context.Context) []*Item { + nodes, err := iq.All(ctx) + if err != nil { + panic(err) + } + return nodes +} + +// IDs executes the query and returns a list of Item IDs. +func (iq *ItemQuery) IDs(ctx context.Context) ([]uuid.UUID, error) { + var ids []uuid.UUID + if err := iq.Select(item.FieldID).Scan(ctx, &ids); err != nil { + return nil, err + } + return ids, nil +} + +// IDsX is like IDs, but panics if an error occurs. +func (iq *ItemQuery) IDsX(ctx context.Context) []uuid.UUID { + ids, err := iq.IDs(ctx) + if err != nil { + panic(err) + } + return ids +} + +// Count returns the count of the given query. +func (iq *ItemQuery) Count(ctx context.Context) (int, error) { + if err := iq.prepareQuery(ctx); err != nil { + return 0, err + } + return iq.sqlCount(ctx) +} + +// CountX is like Count, but panics if an error occurs. +func (iq *ItemQuery) CountX(ctx context.Context) int { + count, err := iq.Count(ctx) + if err != nil { + panic(err) + } + return count +} + +// Exist returns true if the query has elements in the graph. +func (iq *ItemQuery) Exist(ctx context.Context) (bool, error) { + if err := iq.prepareQuery(ctx); err != nil { + return false, err + } + return iq.sqlExist(ctx) +} + +// ExistX is like Exist, but panics if an error occurs. +func (iq *ItemQuery) ExistX(ctx context.Context) bool { + exist, err := iq.Exist(ctx) + if err != nil { + panic(err) + } + return exist +} + +// Clone returns a duplicate of the ItemQuery builder, including all associated steps. It can be +// used to prepare common query builders and use them differently after the clone is made. +func (iq *ItemQuery) Clone() *ItemQuery { + if iq == nil { + return nil + } + return &ItemQuery{ + config: iq.config, + limit: iq.limit, + offset: iq.offset, + order: append([]OrderFunc{}, iq.order...), + predicates: append([]predicate.Item{}, iq.predicates...), + withGroup: iq.withGroup.Clone(), + withLocation: iq.withLocation.Clone(), + withFields: iq.withFields.Clone(), + withLabel: iq.withLabel.Clone(), + // clone intermediate query. + sql: iq.sql.Clone(), + path: iq.path, + unique: iq.unique, + } +} + +// WithGroup tells the query-builder to eager-load the nodes that are connected to +// the "group" edge. The optional arguments are used to configure the query builder of the edge. +func (iq *ItemQuery) WithGroup(opts ...func(*GroupQuery)) *ItemQuery { + query := &GroupQuery{config: iq.config} + for _, opt := range opts { + opt(query) + } + iq.withGroup = query + return iq +} + +// WithLocation tells the query-builder to eager-load the nodes that are connected to +// the "location" edge. The optional arguments are used to configure the query builder of the edge. +func (iq *ItemQuery) WithLocation(opts ...func(*LocationQuery)) *ItemQuery { + query := &LocationQuery{config: iq.config} + for _, opt := range opts { + opt(query) + } + iq.withLocation = query + return iq +} + +// WithFields tells the query-builder to eager-load the nodes that are connected to +// the "fields" edge. The optional arguments are used to configure the query builder of the edge. +func (iq *ItemQuery) WithFields(opts ...func(*ItemFieldQuery)) *ItemQuery { + query := &ItemFieldQuery{config: iq.config} + for _, opt := range opts { + opt(query) + } + iq.withFields = query + return iq +} + +// WithLabel tells the query-builder to eager-load the nodes that are connected to +// the "label" edge. The optional arguments are used to configure the query builder of the edge. +func (iq *ItemQuery) WithLabel(opts ...func(*LabelQuery)) *ItemQuery { + query := &LabelQuery{config: iq.config} + for _, opt := range opts { + opt(query) + } + iq.withLabel = query + return iq +} + +// GroupBy is used to group vertices by one or more fields/columns. +// It is often used with aggregate functions, like: count, max, mean, min, sum. +// +// Example: +// +// var v []struct { +// CreatedAt time.Time `json:"created_at,omitempty"` +// Count int `json:"count,omitempty"` +// } +// +// client.Item.Query(). +// GroupBy(item.FieldCreatedAt). +// Aggregate(ent.Count()). +// Scan(ctx, &v) +func (iq *ItemQuery) GroupBy(field string, fields ...string) *ItemGroupBy { + grbuild := &ItemGroupBy{config: iq.config} + grbuild.fields = append([]string{field}, fields...) + grbuild.path = func(ctx context.Context) (prev *sql.Selector, err error) { + if err := iq.prepareQuery(ctx); err != nil { + return nil, err + } + return iq.sqlQuery(ctx), nil + } + grbuild.label = item.Label + grbuild.flds, grbuild.scan = &grbuild.fields, grbuild.Scan + return grbuild +} + +// Select allows the selection one or more fields/columns for the given query, +// instead of selecting all fields in the entity. +// +// Example: +// +// var v []struct { +// CreatedAt time.Time `json:"created_at,omitempty"` +// } +// +// client.Item.Query(). +// Select(item.FieldCreatedAt). +// Scan(ctx, &v) +func (iq *ItemQuery) Select(fields ...string) *ItemSelect { + iq.fields = append(iq.fields, fields...) + selbuild := &ItemSelect{ItemQuery: iq} + selbuild.label = item.Label + selbuild.flds, selbuild.scan = &iq.fields, selbuild.Scan + return selbuild +} + +func (iq *ItemQuery) prepareQuery(ctx context.Context) error { + for _, f := range iq.fields { + if !item.ValidColumn(f) { + return &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)} + } + } + if iq.path != nil { + prev, err := iq.path(ctx) + if err != nil { + return err + } + iq.sql = prev + } + return nil +} + +func (iq *ItemQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*Item, error) { + var ( + nodes = []*Item{} + withFKs = iq.withFKs + _spec = iq.querySpec() + loadedTypes = [4]bool{ + iq.withGroup != nil, + iq.withLocation != nil, + iq.withFields != nil, + iq.withLabel != nil, + } + ) + if iq.withGroup != nil || iq.withLocation != nil { + withFKs = true + } + if withFKs { + _spec.Node.Columns = append(_spec.Node.Columns, item.ForeignKeys...) + } + _spec.ScanValues = func(columns []string) ([]interface{}, error) { + return (*Item).scanValues(nil, columns) + } + _spec.Assign = func(columns []string, values []interface{}) error { + node := &Item{config: iq.config} + nodes = append(nodes, node) + node.Edges.loadedTypes = loadedTypes + return node.assignValues(columns, values) + } + for i := range hooks { + hooks[i](ctx, _spec) + } + if err := sqlgraph.QueryNodes(ctx, iq.driver, _spec); err != nil { + return nil, err + } + if len(nodes) == 0 { + return nodes, nil + } + if query := iq.withGroup; query != nil { + if err := iq.loadGroup(ctx, query, nodes, nil, + func(n *Item, e *Group) { n.Edges.Group = e }); err != nil { + return nil, err + } + } + if query := iq.withLocation; query != nil { + if err := iq.loadLocation(ctx, query, nodes, nil, + func(n *Item, e *Location) { n.Edges.Location = e }); err != nil { + return nil, err + } + } + if query := iq.withFields; query != nil { + if err := iq.loadFields(ctx, query, nodes, + func(n *Item) { n.Edges.Fields = []*ItemField{} }, + func(n *Item, e *ItemField) { n.Edges.Fields = append(n.Edges.Fields, e) }); err != nil { + return nil, err + } + } + if query := iq.withLabel; query != nil { + if err := iq.loadLabel(ctx, query, nodes, + func(n *Item) { n.Edges.Label = []*Label{} }, + func(n *Item, e *Label) { n.Edges.Label = append(n.Edges.Label, e) }); err != nil { + return nil, err + } + } + return nodes, nil +} + +func (iq *ItemQuery) loadGroup(ctx context.Context, query *GroupQuery, nodes []*Item, init func(*Item), assign func(*Item, *Group)) error { + ids := make([]uuid.UUID, 0, len(nodes)) + nodeids := make(map[uuid.UUID][]*Item) + for i := range nodes { + if nodes[i].group_items == nil { + continue + } + fk := *nodes[i].group_items + if _, ok := nodeids[fk]; !ok { + ids = append(ids, fk) + } + nodeids[fk] = append(nodeids[fk], nodes[i]) + } + query.Where(group.IDIn(ids...)) + neighbors, err := query.All(ctx) + if err != nil { + return err + } + for _, n := range neighbors { + nodes, ok := nodeids[n.ID] + if !ok { + return fmt.Errorf(`unexpected foreign-key "group_items" returned %v`, n.ID) + } + for i := range nodes { + assign(nodes[i], n) + } + } + return nil +} +func (iq *ItemQuery) loadLocation(ctx context.Context, query *LocationQuery, nodes []*Item, init func(*Item), assign func(*Item, *Location)) error { + ids := make([]uuid.UUID, 0, len(nodes)) + nodeids := make(map[uuid.UUID][]*Item) + for i := range nodes { + if nodes[i].location_items == nil { + continue + } + fk := *nodes[i].location_items + if _, ok := nodeids[fk]; !ok { + ids = append(ids, fk) + } + nodeids[fk] = append(nodeids[fk], nodes[i]) + } + query.Where(location.IDIn(ids...)) + neighbors, err := query.All(ctx) + if err != nil { + return err + } + for _, n := range neighbors { + nodes, ok := nodeids[n.ID] + if !ok { + return fmt.Errorf(`unexpected foreign-key "location_items" returned %v`, n.ID) + } + for i := range nodes { + assign(nodes[i], n) + } + } + return nil +} +func (iq *ItemQuery) loadFields(ctx context.Context, query *ItemFieldQuery, nodes []*Item, init func(*Item), assign func(*Item, *ItemField)) error { + fks := make([]driver.Value, 0, len(nodes)) + nodeids := make(map[uuid.UUID]*Item) + for i := range nodes { + fks = append(fks, nodes[i].ID) + nodeids[nodes[i].ID] = nodes[i] + if init != nil { + init(nodes[i]) + } + } + query.withFKs = true + query.Where(predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.InValues(item.FieldsColumn, fks...)) + })) + neighbors, err := query.All(ctx) + if err != nil { + return err + } + for _, n := range neighbors { + fk := n.item_fields + if fk == nil { + return fmt.Errorf(`foreign-key "item_fields" is nil for node %v`, n.ID) + } + node, ok := nodeids[*fk] + if !ok { + return fmt.Errorf(`unexpected foreign-key "item_fields" returned %v for node %v`, *fk, n.ID) + } + assign(node, n) + } + return nil +} +func (iq *ItemQuery) loadLabel(ctx context.Context, query *LabelQuery, nodes []*Item, init func(*Item), assign func(*Item, *Label)) error { + edgeIDs := make([]driver.Value, len(nodes)) + byID := make(map[uuid.UUID]*Item) + nids := make(map[uuid.UUID]map[*Item]struct{}) + for i, node := range nodes { + edgeIDs[i] = node.ID + byID[node.ID] = node + if init != nil { + init(node) + } + } + query.Where(func(s *sql.Selector) { + joinT := sql.Table(item.LabelTable) + s.Join(joinT).On(s.C(label.FieldID), joinT.C(item.LabelPrimaryKey[0])) + s.Where(sql.InValues(joinT.C(item.LabelPrimaryKey[1]), edgeIDs...)) + columns := s.SelectedColumns() + s.Select(joinT.C(item.LabelPrimaryKey[1])) + s.AppendSelect(columns...) + s.SetDistinct(false) + }) + if err := query.prepareQuery(ctx); err != nil { + return err + } + neighbors, err := query.sqlAll(ctx, func(_ context.Context, spec *sqlgraph.QuerySpec) { + assign := spec.Assign + values := spec.ScanValues + spec.ScanValues = func(columns []string) ([]interface{}, error) { + values, err := values(columns[1:]) + if err != nil { + return nil, err + } + return append([]interface{}{new(uuid.UUID)}, values...), nil + } + spec.Assign = func(columns []string, values []interface{}) error { + outValue := *values[0].(*uuid.UUID) + inValue := *values[1].(*uuid.UUID) + if nids[inValue] == nil { + nids[inValue] = map[*Item]struct{}{byID[outValue]: struct{}{}} + return assign(columns[1:], values[1:]) + } + nids[inValue][byID[outValue]] = struct{}{} + return nil + } + }) + if err != nil { + return err + } + for _, n := range neighbors { + nodes, ok := nids[n.ID] + if !ok { + return fmt.Errorf(`unexpected "label" node returned %v`, n.ID) + } + for kn := range nodes { + assign(kn, n) + } + } + return nil +} + +func (iq *ItemQuery) sqlCount(ctx context.Context) (int, error) { + _spec := iq.querySpec() + _spec.Node.Columns = iq.fields + if len(iq.fields) > 0 { + _spec.Unique = iq.unique != nil && *iq.unique + } + return sqlgraph.CountNodes(ctx, iq.driver, _spec) +} + +func (iq *ItemQuery) sqlExist(ctx context.Context) (bool, error) { + n, err := iq.sqlCount(ctx) + if err != nil { + return false, fmt.Errorf("ent: check existence: %w", err) + } + return n > 0, nil +} + +func (iq *ItemQuery) querySpec() *sqlgraph.QuerySpec { + _spec := &sqlgraph.QuerySpec{ + Node: &sqlgraph.NodeSpec{ + Table: item.Table, + Columns: item.Columns, + ID: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: item.FieldID, + }, + }, + From: iq.sql, + Unique: true, + } + if unique := iq.unique; unique != nil { + _spec.Unique = *unique + } + if fields := iq.fields; len(fields) > 0 { + _spec.Node.Columns = make([]string, 0, len(fields)) + _spec.Node.Columns = append(_spec.Node.Columns, item.FieldID) + for i := range fields { + if fields[i] != item.FieldID { + _spec.Node.Columns = append(_spec.Node.Columns, fields[i]) + } + } + } + if ps := iq.predicates; len(ps) > 0 { + _spec.Predicate = func(selector *sql.Selector) { + for i := range ps { + ps[i](selector) + } + } + } + if limit := iq.limit; limit != nil { + _spec.Limit = *limit + } + if offset := iq.offset; offset != nil { + _spec.Offset = *offset + } + if ps := iq.order; len(ps) > 0 { + _spec.Order = func(selector *sql.Selector) { + for i := range ps { + ps[i](selector) + } + } + } + return _spec +} + +func (iq *ItemQuery) sqlQuery(ctx context.Context) *sql.Selector { + builder := sql.Dialect(iq.driver.Dialect()) + t1 := builder.Table(item.Table) + columns := iq.fields + if len(columns) == 0 { + columns = item.Columns + } + selector := builder.Select(t1.Columns(columns...)...).From(t1) + if iq.sql != nil { + selector = iq.sql + selector.Select(selector.Columns(columns...)...) + } + if iq.unique != nil && *iq.unique { + selector.Distinct() + } + for _, p := range iq.predicates { + p(selector) + } + for _, p := range iq.order { + p(selector) + } + if offset := iq.offset; offset != nil { + // limit is mandatory for offset clause. We start + // with default value, and override it below if needed. + selector.Offset(*offset).Limit(math.MaxInt32) + } + if limit := iq.limit; limit != nil { + selector.Limit(*limit) + } + return selector +} + +// ItemGroupBy is the group-by builder for Item entities. +type ItemGroupBy struct { + config + selector + fields []string + fns []AggregateFunc + // intermediate query (i.e. traversal path). + sql *sql.Selector + path func(context.Context) (*sql.Selector, error) +} + +// Aggregate adds the given aggregation functions to the group-by query. +func (igb *ItemGroupBy) Aggregate(fns ...AggregateFunc) *ItemGroupBy { + igb.fns = append(igb.fns, fns...) + return igb +} + +// Scan applies the group-by query and scans the result into the given value. +func (igb *ItemGroupBy) Scan(ctx context.Context, v interface{}) error { + query, err := igb.path(ctx) + if err != nil { + return err + } + igb.sql = query + return igb.sqlScan(ctx, v) +} + +func (igb *ItemGroupBy) sqlScan(ctx context.Context, v interface{}) error { + for _, f := range igb.fields { + if !item.ValidColumn(f) { + return &ValidationError{Name: f, err: fmt.Errorf("invalid field %q for group-by", f)} + } + } + selector := igb.sqlQuery() + if err := selector.Err(); err != nil { + return err + } + rows := &sql.Rows{} + query, args := selector.Query() + if err := igb.driver.Query(ctx, query, args, rows); err != nil { + return err + } + defer rows.Close() + return sql.ScanSlice(rows, v) +} + +func (igb *ItemGroupBy) sqlQuery() *sql.Selector { + selector := igb.sql.Select() + aggregation := make([]string, 0, len(igb.fns)) + for _, fn := range igb.fns { + aggregation = append(aggregation, fn(selector)) + } + // If no columns were selected in a custom aggregation function, the default + // selection is the fields used for "group-by", and the aggregation functions. + if len(selector.SelectedColumns()) == 0 { + columns := make([]string, 0, len(igb.fields)+len(igb.fns)) + for _, f := range igb.fields { + columns = append(columns, selector.C(f)) + } + columns = append(columns, aggregation...) + selector.Select(columns...) + } + return selector.GroupBy(selector.Columns(igb.fields...)...) +} + +// ItemSelect is the builder for selecting fields of Item entities. +type ItemSelect struct { + *ItemQuery + selector + // intermediate query (i.e. traversal path). + sql *sql.Selector +} + +// Scan applies the selector query and scans the result into the given value. +func (is *ItemSelect) Scan(ctx context.Context, v interface{}) error { + if err := is.prepareQuery(ctx); err != nil { + return err + } + is.sql = is.ItemQuery.sqlQuery(ctx) + return is.sqlScan(ctx, v) +} + +func (is *ItemSelect) sqlScan(ctx context.Context, v interface{}) error { + rows := &sql.Rows{} + query, args := is.sql.Query() + if err := is.driver.Query(ctx, query, args, rows); err != nil { + return err + } + defer rows.Close() + return sql.ScanSlice(rows, v) +} diff --git a/backend/ent/item_update.go b/backend/ent/item_update.go new file mode 100644 index 0000000..cbbf531 --- /dev/null +++ b/backend/ent/item_update.go @@ -0,0 +1,1935 @@ +// Code generated by ent, DO NOT EDIT. + +package ent + +import ( + "context" + "errors" + "fmt" + "time" + + "entgo.io/ent/dialect/sql" + "entgo.io/ent/dialect/sql/sqlgraph" + "entgo.io/ent/schema/field" + "github.com/google/uuid" + "github.com/hay-kot/content/backend/ent/group" + "github.com/hay-kot/content/backend/ent/item" + "github.com/hay-kot/content/backend/ent/itemfield" + "github.com/hay-kot/content/backend/ent/label" + "github.com/hay-kot/content/backend/ent/location" + "github.com/hay-kot/content/backend/ent/predicate" +) + +// ItemUpdate is the builder for updating Item entities. +type ItemUpdate struct { + config + hooks []Hook + mutation *ItemMutation +} + +// Where appends a list predicates to the ItemUpdate builder. +func (iu *ItemUpdate) Where(ps ...predicate.Item) *ItemUpdate { + iu.mutation.Where(ps...) + return iu +} + +// SetUpdatedAt sets the "updated_at" field. +func (iu *ItemUpdate) SetUpdatedAt(t time.Time) *ItemUpdate { + iu.mutation.SetUpdatedAt(t) + return iu +} + +// SetName sets the "name" field. +func (iu *ItemUpdate) SetName(s string) *ItemUpdate { + iu.mutation.SetName(s) + return iu +} + +// SetDescription sets the "description" field. +func (iu *ItemUpdate) SetDescription(s string) *ItemUpdate { + iu.mutation.SetDescription(s) + return iu +} + +// SetNillableDescription sets the "description" field if the given value is not nil. +func (iu *ItemUpdate) SetNillableDescription(s *string) *ItemUpdate { + if s != nil { + iu.SetDescription(*s) + } + return iu +} + +// ClearDescription clears the value of the "description" field. +func (iu *ItemUpdate) ClearDescription() *ItemUpdate { + iu.mutation.ClearDescription() + return iu +} + +// SetNotes sets the "notes" field. +func (iu *ItemUpdate) SetNotes(s string) *ItemUpdate { + iu.mutation.SetNotes(s) + return iu +} + +// SetNillableNotes sets the "notes" field if the given value is not nil. +func (iu *ItemUpdate) SetNillableNotes(s *string) *ItemUpdate { + if s != nil { + iu.SetNotes(*s) + } + return iu +} + +// ClearNotes clears the value of the "notes" field. +func (iu *ItemUpdate) ClearNotes() *ItemUpdate { + iu.mutation.ClearNotes() + return iu +} + +// SetSerialNumber sets the "serial_number" field. +func (iu *ItemUpdate) SetSerialNumber(s string) *ItemUpdate { + iu.mutation.SetSerialNumber(s) + return iu +} + +// SetNillableSerialNumber sets the "serial_number" field if the given value is not nil. +func (iu *ItemUpdate) SetNillableSerialNumber(s *string) *ItemUpdate { + if s != nil { + iu.SetSerialNumber(*s) + } + return iu +} + +// ClearSerialNumber clears the value of the "serial_number" field. +func (iu *ItemUpdate) ClearSerialNumber() *ItemUpdate { + iu.mutation.ClearSerialNumber() + return iu +} + +// SetModelNumber sets the "model_number" field. +func (iu *ItemUpdate) SetModelNumber(s string) *ItemUpdate { + iu.mutation.SetModelNumber(s) + return iu +} + +// SetNillableModelNumber sets the "model_number" field if the given value is not nil. +func (iu *ItemUpdate) SetNillableModelNumber(s *string) *ItemUpdate { + if s != nil { + iu.SetModelNumber(*s) + } + return iu +} + +// ClearModelNumber clears the value of the "model_number" field. +func (iu *ItemUpdate) ClearModelNumber() *ItemUpdate { + iu.mutation.ClearModelNumber() + return iu +} + +// SetManufacturer sets the "manufacturer" field. +func (iu *ItemUpdate) SetManufacturer(s string) *ItemUpdate { + iu.mutation.SetManufacturer(s) + return iu +} + +// SetNillableManufacturer sets the "manufacturer" field if the given value is not nil. +func (iu *ItemUpdate) SetNillableManufacturer(s *string) *ItemUpdate { + if s != nil { + iu.SetManufacturer(*s) + } + return iu +} + +// ClearManufacturer clears the value of the "manufacturer" field. +func (iu *ItemUpdate) ClearManufacturer() *ItemUpdate { + iu.mutation.ClearManufacturer() + return iu +} + +// SetPurchaseTime sets the "purchase_time" field. +func (iu *ItemUpdate) SetPurchaseTime(t time.Time) *ItemUpdate { + iu.mutation.SetPurchaseTime(t) + return iu +} + +// SetNillablePurchaseTime sets the "purchase_time" field if the given value is not nil. +func (iu *ItemUpdate) SetNillablePurchaseTime(t *time.Time) *ItemUpdate { + if t != nil { + iu.SetPurchaseTime(*t) + } + return iu +} + +// ClearPurchaseTime clears the value of the "purchase_time" field. +func (iu *ItemUpdate) ClearPurchaseTime() *ItemUpdate { + iu.mutation.ClearPurchaseTime() + return iu +} + +// SetPurchaseFrom sets the "purchase_from" field. +func (iu *ItemUpdate) SetPurchaseFrom(s string) *ItemUpdate { + iu.mutation.SetPurchaseFrom(s) + return iu +} + +// SetNillablePurchaseFrom sets the "purchase_from" field if the given value is not nil. +func (iu *ItemUpdate) SetNillablePurchaseFrom(s *string) *ItemUpdate { + if s != nil { + iu.SetPurchaseFrom(*s) + } + return iu +} + +// ClearPurchaseFrom clears the value of the "purchase_from" field. +func (iu *ItemUpdate) ClearPurchaseFrom() *ItemUpdate { + iu.mutation.ClearPurchaseFrom() + return iu +} + +// SetPurchasePrice sets the "purchase_price" field. +func (iu *ItemUpdate) SetPurchasePrice(f float64) *ItemUpdate { + iu.mutation.ResetPurchasePrice() + iu.mutation.SetPurchasePrice(f) + return iu +} + +// SetNillablePurchasePrice sets the "purchase_price" field if the given value is not nil. +func (iu *ItemUpdate) SetNillablePurchasePrice(f *float64) *ItemUpdate { + if f != nil { + iu.SetPurchasePrice(*f) + } + return iu +} + +// AddPurchasePrice adds f to the "purchase_price" field. +func (iu *ItemUpdate) AddPurchasePrice(f float64) *ItemUpdate { + iu.mutation.AddPurchasePrice(f) + return iu +} + +// SetPurchaseReceiptID sets the "purchase_receipt_id" field. +func (iu *ItemUpdate) SetPurchaseReceiptID(u uuid.UUID) *ItemUpdate { + iu.mutation.SetPurchaseReceiptID(u) + return iu +} + +// SetNillablePurchaseReceiptID sets the "purchase_receipt_id" field if the given value is not nil. +func (iu *ItemUpdate) SetNillablePurchaseReceiptID(u *uuid.UUID) *ItemUpdate { + if u != nil { + iu.SetPurchaseReceiptID(*u) + } + return iu +} + +// ClearPurchaseReceiptID clears the value of the "purchase_receipt_id" field. +func (iu *ItemUpdate) ClearPurchaseReceiptID() *ItemUpdate { + iu.mutation.ClearPurchaseReceiptID() + return iu +} + +// SetSoldTime sets the "sold_time" field. +func (iu *ItemUpdate) SetSoldTime(t time.Time) *ItemUpdate { + iu.mutation.SetSoldTime(t) + return iu +} + +// SetNillableSoldTime sets the "sold_time" field if the given value is not nil. +func (iu *ItemUpdate) SetNillableSoldTime(t *time.Time) *ItemUpdate { + if t != nil { + iu.SetSoldTime(*t) + } + return iu +} + +// ClearSoldTime clears the value of the "sold_time" field. +func (iu *ItemUpdate) ClearSoldTime() *ItemUpdate { + iu.mutation.ClearSoldTime() + return iu +} + +// SetSoldTo sets the "sold_to" field. +func (iu *ItemUpdate) SetSoldTo(s string) *ItemUpdate { + iu.mutation.SetSoldTo(s) + return iu +} + +// SetNillableSoldTo sets the "sold_to" field if the given value is not nil. +func (iu *ItemUpdate) SetNillableSoldTo(s *string) *ItemUpdate { + if s != nil { + iu.SetSoldTo(*s) + } + return iu +} + +// ClearSoldTo clears the value of the "sold_to" field. +func (iu *ItemUpdate) ClearSoldTo() *ItemUpdate { + iu.mutation.ClearSoldTo() + return iu +} + +// SetSoldPrice sets the "sold_price" field. +func (iu *ItemUpdate) SetSoldPrice(f float64) *ItemUpdate { + iu.mutation.ResetSoldPrice() + iu.mutation.SetSoldPrice(f) + return iu +} + +// SetNillableSoldPrice sets the "sold_price" field if the given value is not nil. +func (iu *ItemUpdate) SetNillableSoldPrice(f *float64) *ItemUpdate { + if f != nil { + iu.SetSoldPrice(*f) + } + return iu +} + +// AddSoldPrice adds f to the "sold_price" field. +func (iu *ItemUpdate) AddSoldPrice(f float64) *ItemUpdate { + iu.mutation.AddSoldPrice(f) + return iu +} + +// SetSoldReceiptID sets the "sold_receipt_id" field. +func (iu *ItemUpdate) SetSoldReceiptID(u uuid.UUID) *ItemUpdate { + iu.mutation.SetSoldReceiptID(u) + return iu +} + +// SetNillableSoldReceiptID sets the "sold_receipt_id" field if the given value is not nil. +func (iu *ItemUpdate) SetNillableSoldReceiptID(u *uuid.UUID) *ItemUpdate { + if u != nil { + iu.SetSoldReceiptID(*u) + } + return iu +} + +// ClearSoldReceiptID clears the value of the "sold_receipt_id" field. +func (iu *ItemUpdate) ClearSoldReceiptID() *ItemUpdate { + iu.mutation.ClearSoldReceiptID() + return iu +} + +// SetSoldNotes sets the "sold_notes" field. +func (iu *ItemUpdate) SetSoldNotes(s string) *ItemUpdate { + iu.mutation.SetSoldNotes(s) + return iu +} + +// SetNillableSoldNotes sets the "sold_notes" field if the given value is not nil. +func (iu *ItemUpdate) SetNillableSoldNotes(s *string) *ItemUpdate { + if s != nil { + iu.SetSoldNotes(*s) + } + return iu +} + +// ClearSoldNotes clears the value of the "sold_notes" field. +func (iu *ItemUpdate) ClearSoldNotes() *ItemUpdate { + iu.mutation.ClearSoldNotes() + return iu +} + +// SetGroupID sets the "group" edge to the Group entity by ID. +func (iu *ItemUpdate) SetGroupID(id uuid.UUID) *ItemUpdate { + iu.mutation.SetGroupID(id) + return iu +} + +// SetGroup sets the "group" edge to the Group entity. +func (iu *ItemUpdate) SetGroup(g *Group) *ItemUpdate { + return iu.SetGroupID(g.ID) +} + +// SetLocationID sets the "location" edge to the Location entity by ID. +func (iu *ItemUpdate) SetLocationID(id uuid.UUID) *ItemUpdate { + iu.mutation.SetLocationID(id) + return iu +} + +// SetNillableLocationID sets the "location" edge to the Location entity by ID if the given value is not nil. +func (iu *ItemUpdate) SetNillableLocationID(id *uuid.UUID) *ItemUpdate { + if id != nil { + iu = iu.SetLocationID(*id) + } + return iu +} + +// SetLocation sets the "location" edge to the Location entity. +func (iu *ItemUpdate) SetLocation(l *Location) *ItemUpdate { + return iu.SetLocationID(l.ID) +} + +// AddFieldIDs adds the "fields" edge to the ItemField entity by IDs. +func (iu *ItemUpdate) AddFieldIDs(ids ...uuid.UUID) *ItemUpdate { + iu.mutation.AddFieldIDs(ids...) + return iu +} + +// AddFields adds the "fields" edges to the ItemField entity. +func (iu *ItemUpdate) AddFields(i ...*ItemField) *ItemUpdate { + ids := make([]uuid.UUID, len(i)) + for j := range i { + ids[j] = i[j].ID + } + return iu.AddFieldIDs(ids...) +} + +// AddLabelIDs adds the "label" edge to the Label entity by IDs. +func (iu *ItemUpdate) AddLabelIDs(ids ...uuid.UUID) *ItemUpdate { + iu.mutation.AddLabelIDs(ids...) + return iu +} + +// AddLabel adds the "label" edges to the Label entity. +func (iu *ItemUpdate) AddLabel(l ...*Label) *ItemUpdate { + ids := make([]uuid.UUID, len(l)) + for i := range l { + ids[i] = l[i].ID + } + return iu.AddLabelIDs(ids...) +} + +// Mutation returns the ItemMutation object of the builder. +func (iu *ItemUpdate) Mutation() *ItemMutation { + return iu.mutation +} + +// ClearGroup clears the "group" edge to the Group entity. +func (iu *ItemUpdate) ClearGroup() *ItemUpdate { + iu.mutation.ClearGroup() + return iu +} + +// ClearLocation clears the "location" edge to the Location entity. +func (iu *ItemUpdate) ClearLocation() *ItemUpdate { + iu.mutation.ClearLocation() + return iu +} + +// ClearFields clears all "fields" edges to the ItemField entity. +func (iu *ItemUpdate) ClearFields() *ItemUpdate { + iu.mutation.ClearFields() + return iu +} + +// RemoveFieldIDs removes the "fields" edge to ItemField entities by IDs. +func (iu *ItemUpdate) RemoveFieldIDs(ids ...uuid.UUID) *ItemUpdate { + iu.mutation.RemoveFieldIDs(ids...) + return iu +} + +// RemoveFields removes "fields" edges to ItemField entities. +func (iu *ItemUpdate) RemoveFields(i ...*ItemField) *ItemUpdate { + ids := make([]uuid.UUID, len(i)) + for j := range i { + ids[j] = i[j].ID + } + return iu.RemoveFieldIDs(ids...) +} + +// ClearLabel clears all "label" edges to the Label entity. +func (iu *ItemUpdate) ClearLabel() *ItemUpdate { + iu.mutation.ClearLabel() + return iu +} + +// RemoveLabelIDs removes the "label" edge to Label entities by IDs. +func (iu *ItemUpdate) RemoveLabelIDs(ids ...uuid.UUID) *ItemUpdate { + iu.mutation.RemoveLabelIDs(ids...) + return iu +} + +// RemoveLabel removes "label" edges to Label entities. +func (iu *ItemUpdate) RemoveLabel(l ...*Label) *ItemUpdate { + ids := make([]uuid.UUID, len(l)) + for i := range l { + ids[i] = l[i].ID + } + return iu.RemoveLabelIDs(ids...) +} + +// Save executes the query and returns the number of nodes affected by the update operation. +func (iu *ItemUpdate) Save(ctx context.Context) (int, error) { + var ( + err error + affected int + ) + iu.defaults() + if len(iu.hooks) == 0 { + if err = iu.check(); err != nil { + return 0, err + } + affected, err = iu.sqlSave(ctx) + } else { + var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) { + mutation, ok := m.(*ItemMutation) + if !ok { + return nil, fmt.Errorf("unexpected mutation type %T", m) + } + if err = iu.check(); err != nil { + return 0, err + } + iu.mutation = mutation + affected, err = iu.sqlSave(ctx) + mutation.done = true + return affected, err + }) + for i := len(iu.hooks) - 1; i >= 0; i-- { + if iu.hooks[i] == nil { + return 0, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)") + } + mut = iu.hooks[i](mut) + } + if _, err := mut.Mutate(ctx, iu.mutation); err != nil { + return 0, err + } + } + return affected, err +} + +// SaveX is like Save, but panics if an error occurs. +func (iu *ItemUpdate) SaveX(ctx context.Context) int { + affected, err := iu.Save(ctx) + if err != nil { + panic(err) + } + return affected +} + +// Exec executes the query. +func (iu *ItemUpdate) Exec(ctx context.Context) error { + _, err := iu.Save(ctx) + return err +} + +// ExecX is like Exec, but panics if an error occurs. +func (iu *ItemUpdate) ExecX(ctx context.Context) { + if err := iu.Exec(ctx); err != nil { + panic(err) + } +} + +// defaults sets the default values of the builder before save. +func (iu *ItemUpdate) defaults() { + if _, ok := iu.mutation.UpdatedAt(); !ok { + v := item.UpdateDefaultUpdatedAt() + iu.mutation.SetUpdatedAt(v) + } +} + +// check runs all checks and user-defined validators on the builder. +func (iu *ItemUpdate) check() error { + if v, ok := iu.mutation.Name(); ok { + if err := item.NameValidator(v); err != nil { + return &ValidationError{Name: "name", err: fmt.Errorf(`ent: validator failed for field "Item.name": %w`, err)} + } + } + if v, ok := iu.mutation.Description(); ok { + if err := item.DescriptionValidator(v); err != nil { + return &ValidationError{Name: "description", err: fmt.Errorf(`ent: validator failed for field "Item.description": %w`, err)} + } + } + if v, ok := iu.mutation.Notes(); ok { + if err := item.NotesValidator(v); err != nil { + return &ValidationError{Name: "notes", err: fmt.Errorf(`ent: validator failed for field "Item.notes": %w`, err)} + } + } + if v, ok := iu.mutation.SerialNumber(); ok { + if err := item.SerialNumberValidator(v); err != nil { + return &ValidationError{Name: "serial_number", err: fmt.Errorf(`ent: validator failed for field "Item.serial_number": %w`, err)} + } + } + if v, ok := iu.mutation.ModelNumber(); ok { + if err := item.ModelNumberValidator(v); err != nil { + return &ValidationError{Name: "model_number", err: fmt.Errorf(`ent: validator failed for field "Item.model_number": %w`, err)} + } + } + if v, ok := iu.mutation.Manufacturer(); ok { + if err := item.ManufacturerValidator(v); err != nil { + return &ValidationError{Name: "manufacturer", err: fmt.Errorf(`ent: validator failed for field "Item.manufacturer": %w`, err)} + } + } + if v, ok := iu.mutation.SoldNotes(); ok { + if err := item.SoldNotesValidator(v); err != nil { + return &ValidationError{Name: "sold_notes", err: fmt.Errorf(`ent: validator failed for field "Item.sold_notes": %w`, err)} + } + } + if _, ok := iu.mutation.GroupID(); iu.mutation.GroupCleared() && !ok { + return errors.New(`ent: clearing a required unique edge "Item.group"`) + } + return nil +} + +func (iu *ItemUpdate) sqlSave(ctx context.Context) (n int, err error) { + _spec := &sqlgraph.UpdateSpec{ + Node: &sqlgraph.NodeSpec{ + Table: item.Table, + Columns: item.Columns, + ID: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: item.FieldID, + }, + }, + } + if ps := iu.mutation.predicates; len(ps) > 0 { + _spec.Predicate = func(selector *sql.Selector) { + for i := range ps { + ps[i](selector) + } + } + } + if value, ok := iu.mutation.UpdatedAt(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeTime, + Value: value, + Column: item.FieldUpdatedAt, + }) + } + if value, ok := iu.mutation.Name(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: item.FieldName, + }) + } + if value, ok := iu.mutation.Description(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: item.FieldDescription, + }) + } + if iu.mutation.DescriptionCleared() { + _spec.Fields.Clear = append(_spec.Fields.Clear, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Column: item.FieldDescription, + }) + } + if value, ok := iu.mutation.Notes(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: item.FieldNotes, + }) + } + if iu.mutation.NotesCleared() { + _spec.Fields.Clear = append(_spec.Fields.Clear, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Column: item.FieldNotes, + }) + } + if value, ok := iu.mutation.SerialNumber(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: item.FieldSerialNumber, + }) + } + if iu.mutation.SerialNumberCleared() { + _spec.Fields.Clear = append(_spec.Fields.Clear, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Column: item.FieldSerialNumber, + }) + } + if value, ok := iu.mutation.ModelNumber(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: item.FieldModelNumber, + }) + } + if iu.mutation.ModelNumberCleared() { + _spec.Fields.Clear = append(_spec.Fields.Clear, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Column: item.FieldModelNumber, + }) + } + if value, ok := iu.mutation.Manufacturer(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: item.FieldManufacturer, + }) + } + if iu.mutation.ManufacturerCleared() { + _spec.Fields.Clear = append(_spec.Fields.Clear, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Column: item.FieldManufacturer, + }) + } + if value, ok := iu.mutation.PurchaseTime(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeTime, + Value: value, + Column: item.FieldPurchaseTime, + }) + } + if iu.mutation.PurchaseTimeCleared() { + _spec.Fields.Clear = append(_spec.Fields.Clear, &sqlgraph.FieldSpec{ + Type: field.TypeTime, + Column: item.FieldPurchaseTime, + }) + } + if value, ok := iu.mutation.PurchaseFrom(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: item.FieldPurchaseFrom, + }) + } + if iu.mutation.PurchaseFromCleared() { + _spec.Fields.Clear = append(_spec.Fields.Clear, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Column: item.FieldPurchaseFrom, + }) + } + if value, ok := iu.mutation.PurchasePrice(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeFloat64, + Value: value, + Column: item.FieldPurchasePrice, + }) + } + if value, ok := iu.mutation.AddedPurchasePrice(); ok { + _spec.Fields.Add = append(_spec.Fields.Add, &sqlgraph.FieldSpec{ + Type: field.TypeFloat64, + Value: value, + Column: item.FieldPurchasePrice, + }) + } + if value, ok := iu.mutation.PurchaseReceiptID(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Value: value, + Column: item.FieldPurchaseReceiptID, + }) + } + if iu.mutation.PurchaseReceiptIDCleared() { + _spec.Fields.Clear = append(_spec.Fields.Clear, &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: item.FieldPurchaseReceiptID, + }) + } + if value, ok := iu.mutation.SoldTime(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeTime, + Value: value, + Column: item.FieldSoldTime, + }) + } + if iu.mutation.SoldTimeCleared() { + _spec.Fields.Clear = append(_spec.Fields.Clear, &sqlgraph.FieldSpec{ + Type: field.TypeTime, + Column: item.FieldSoldTime, + }) + } + if value, ok := iu.mutation.SoldTo(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: item.FieldSoldTo, + }) + } + if iu.mutation.SoldToCleared() { + _spec.Fields.Clear = append(_spec.Fields.Clear, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Column: item.FieldSoldTo, + }) + } + if value, ok := iu.mutation.SoldPrice(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeFloat64, + Value: value, + Column: item.FieldSoldPrice, + }) + } + if value, ok := iu.mutation.AddedSoldPrice(); ok { + _spec.Fields.Add = append(_spec.Fields.Add, &sqlgraph.FieldSpec{ + Type: field.TypeFloat64, + Value: value, + Column: item.FieldSoldPrice, + }) + } + if value, ok := iu.mutation.SoldReceiptID(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Value: value, + Column: item.FieldSoldReceiptID, + }) + } + if iu.mutation.SoldReceiptIDCleared() { + _spec.Fields.Clear = append(_spec.Fields.Clear, &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: item.FieldSoldReceiptID, + }) + } + if value, ok := iu.mutation.SoldNotes(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: item.FieldSoldNotes, + }) + } + if iu.mutation.SoldNotesCleared() { + _spec.Fields.Clear = append(_spec.Fields.Clear, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Column: item.FieldSoldNotes, + }) + } + if iu.mutation.GroupCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2O, + Inverse: true, + Table: item.GroupTable, + Columns: []string{item.GroupColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: group.FieldID, + }, + }, + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := iu.mutation.GroupIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2O, + Inverse: true, + Table: item.GroupTable, + Columns: []string{item.GroupColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: group.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Add = append(_spec.Edges.Add, edge) + } + if iu.mutation.LocationCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2O, + Inverse: true, + Table: item.LocationTable, + Columns: []string{item.LocationColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: location.FieldID, + }, + }, + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := iu.mutation.LocationIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2O, + Inverse: true, + Table: item.LocationTable, + Columns: []string{item.LocationColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: location.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Add = append(_spec.Edges.Add, edge) + } + if iu.mutation.FieldsCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: item.FieldsTable, + Columns: []string{item.FieldsColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: itemfield.FieldID, + }, + }, + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := iu.mutation.RemovedFieldsIDs(); len(nodes) > 0 && !iu.mutation.FieldsCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: item.FieldsTable, + Columns: []string{item.FieldsColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: itemfield.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := iu.mutation.FieldsIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: item.FieldsTable, + Columns: []string{item.FieldsColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: itemfield.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Add = append(_spec.Edges.Add, edge) + } + if iu.mutation.LabelCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2M, + Inverse: true, + Table: item.LabelTable, + Columns: item.LabelPrimaryKey, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: label.FieldID, + }, + }, + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := iu.mutation.RemovedLabelIDs(); len(nodes) > 0 && !iu.mutation.LabelCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2M, + Inverse: true, + Table: item.LabelTable, + Columns: item.LabelPrimaryKey, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: label.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := iu.mutation.LabelIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2M, + Inverse: true, + Table: item.LabelTable, + Columns: item.LabelPrimaryKey, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: label.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Add = append(_spec.Edges.Add, edge) + } + if n, err = sqlgraph.UpdateNodes(ctx, iu.driver, _spec); err != nil { + if _, ok := err.(*sqlgraph.NotFoundError); ok { + err = &NotFoundError{item.Label} + } else if sqlgraph.IsConstraintError(err) { + err = &ConstraintError{msg: err.Error(), wrap: err} + } + return 0, err + } + return n, nil +} + +// ItemUpdateOne is the builder for updating a single Item entity. +type ItemUpdateOne struct { + config + fields []string + hooks []Hook + mutation *ItemMutation +} + +// SetUpdatedAt sets the "updated_at" field. +func (iuo *ItemUpdateOne) SetUpdatedAt(t time.Time) *ItemUpdateOne { + iuo.mutation.SetUpdatedAt(t) + return iuo +} + +// SetName sets the "name" field. +func (iuo *ItemUpdateOne) SetName(s string) *ItemUpdateOne { + iuo.mutation.SetName(s) + return iuo +} + +// SetDescription sets the "description" field. +func (iuo *ItemUpdateOne) SetDescription(s string) *ItemUpdateOne { + iuo.mutation.SetDescription(s) + return iuo +} + +// SetNillableDescription sets the "description" field if the given value is not nil. +func (iuo *ItemUpdateOne) SetNillableDescription(s *string) *ItemUpdateOne { + if s != nil { + iuo.SetDescription(*s) + } + return iuo +} + +// ClearDescription clears the value of the "description" field. +func (iuo *ItemUpdateOne) ClearDescription() *ItemUpdateOne { + iuo.mutation.ClearDescription() + return iuo +} + +// SetNotes sets the "notes" field. +func (iuo *ItemUpdateOne) SetNotes(s string) *ItemUpdateOne { + iuo.mutation.SetNotes(s) + return iuo +} + +// SetNillableNotes sets the "notes" field if the given value is not nil. +func (iuo *ItemUpdateOne) SetNillableNotes(s *string) *ItemUpdateOne { + if s != nil { + iuo.SetNotes(*s) + } + return iuo +} + +// ClearNotes clears the value of the "notes" field. +func (iuo *ItemUpdateOne) ClearNotes() *ItemUpdateOne { + iuo.mutation.ClearNotes() + return iuo +} + +// SetSerialNumber sets the "serial_number" field. +func (iuo *ItemUpdateOne) SetSerialNumber(s string) *ItemUpdateOne { + iuo.mutation.SetSerialNumber(s) + return iuo +} + +// SetNillableSerialNumber sets the "serial_number" field if the given value is not nil. +func (iuo *ItemUpdateOne) SetNillableSerialNumber(s *string) *ItemUpdateOne { + if s != nil { + iuo.SetSerialNumber(*s) + } + return iuo +} + +// ClearSerialNumber clears the value of the "serial_number" field. +func (iuo *ItemUpdateOne) ClearSerialNumber() *ItemUpdateOne { + iuo.mutation.ClearSerialNumber() + return iuo +} + +// SetModelNumber sets the "model_number" field. +func (iuo *ItemUpdateOne) SetModelNumber(s string) *ItemUpdateOne { + iuo.mutation.SetModelNumber(s) + return iuo +} + +// SetNillableModelNumber sets the "model_number" field if the given value is not nil. +func (iuo *ItemUpdateOne) SetNillableModelNumber(s *string) *ItemUpdateOne { + if s != nil { + iuo.SetModelNumber(*s) + } + return iuo +} + +// ClearModelNumber clears the value of the "model_number" field. +func (iuo *ItemUpdateOne) ClearModelNumber() *ItemUpdateOne { + iuo.mutation.ClearModelNumber() + return iuo +} + +// SetManufacturer sets the "manufacturer" field. +func (iuo *ItemUpdateOne) SetManufacturer(s string) *ItemUpdateOne { + iuo.mutation.SetManufacturer(s) + return iuo +} + +// SetNillableManufacturer sets the "manufacturer" field if the given value is not nil. +func (iuo *ItemUpdateOne) SetNillableManufacturer(s *string) *ItemUpdateOne { + if s != nil { + iuo.SetManufacturer(*s) + } + return iuo +} + +// ClearManufacturer clears the value of the "manufacturer" field. +func (iuo *ItemUpdateOne) ClearManufacturer() *ItemUpdateOne { + iuo.mutation.ClearManufacturer() + return iuo +} + +// SetPurchaseTime sets the "purchase_time" field. +func (iuo *ItemUpdateOne) SetPurchaseTime(t time.Time) *ItemUpdateOne { + iuo.mutation.SetPurchaseTime(t) + return iuo +} + +// SetNillablePurchaseTime sets the "purchase_time" field if the given value is not nil. +func (iuo *ItemUpdateOne) SetNillablePurchaseTime(t *time.Time) *ItemUpdateOne { + if t != nil { + iuo.SetPurchaseTime(*t) + } + return iuo +} + +// ClearPurchaseTime clears the value of the "purchase_time" field. +func (iuo *ItemUpdateOne) ClearPurchaseTime() *ItemUpdateOne { + iuo.mutation.ClearPurchaseTime() + return iuo +} + +// SetPurchaseFrom sets the "purchase_from" field. +func (iuo *ItemUpdateOne) SetPurchaseFrom(s string) *ItemUpdateOne { + iuo.mutation.SetPurchaseFrom(s) + return iuo +} + +// SetNillablePurchaseFrom sets the "purchase_from" field if the given value is not nil. +func (iuo *ItemUpdateOne) SetNillablePurchaseFrom(s *string) *ItemUpdateOne { + if s != nil { + iuo.SetPurchaseFrom(*s) + } + return iuo +} + +// ClearPurchaseFrom clears the value of the "purchase_from" field. +func (iuo *ItemUpdateOne) ClearPurchaseFrom() *ItemUpdateOne { + iuo.mutation.ClearPurchaseFrom() + return iuo +} + +// SetPurchasePrice sets the "purchase_price" field. +func (iuo *ItemUpdateOne) SetPurchasePrice(f float64) *ItemUpdateOne { + iuo.mutation.ResetPurchasePrice() + iuo.mutation.SetPurchasePrice(f) + return iuo +} + +// SetNillablePurchasePrice sets the "purchase_price" field if the given value is not nil. +func (iuo *ItemUpdateOne) SetNillablePurchasePrice(f *float64) *ItemUpdateOne { + if f != nil { + iuo.SetPurchasePrice(*f) + } + return iuo +} + +// AddPurchasePrice adds f to the "purchase_price" field. +func (iuo *ItemUpdateOne) AddPurchasePrice(f float64) *ItemUpdateOne { + iuo.mutation.AddPurchasePrice(f) + return iuo +} + +// SetPurchaseReceiptID sets the "purchase_receipt_id" field. +func (iuo *ItemUpdateOne) SetPurchaseReceiptID(u uuid.UUID) *ItemUpdateOne { + iuo.mutation.SetPurchaseReceiptID(u) + return iuo +} + +// SetNillablePurchaseReceiptID sets the "purchase_receipt_id" field if the given value is not nil. +func (iuo *ItemUpdateOne) SetNillablePurchaseReceiptID(u *uuid.UUID) *ItemUpdateOne { + if u != nil { + iuo.SetPurchaseReceiptID(*u) + } + return iuo +} + +// ClearPurchaseReceiptID clears the value of the "purchase_receipt_id" field. +func (iuo *ItemUpdateOne) ClearPurchaseReceiptID() *ItemUpdateOne { + iuo.mutation.ClearPurchaseReceiptID() + return iuo +} + +// SetSoldTime sets the "sold_time" field. +func (iuo *ItemUpdateOne) SetSoldTime(t time.Time) *ItemUpdateOne { + iuo.mutation.SetSoldTime(t) + return iuo +} + +// SetNillableSoldTime sets the "sold_time" field if the given value is not nil. +func (iuo *ItemUpdateOne) SetNillableSoldTime(t *time.Time) *ItemUpdateOne { + if t != nil { + iuo.SetSoldTime(*t) + } + return iuo +} + +// ClearSoldTime clears the value of the "sold_time" field. +func (iuo *ItemUpdateOne) ClearSoldTime() *ItemUpdateOne { + iuo.mutation.ClearSoldTime() + return iuo +} + +// SetSoldTo sets the "sold_to" field. +func (iuo *ItemUpdateOne) SetSoldTo(s string) *ItemUpdateOne { + iuo.mutation.SetSoldTo(s) + return iuo +} + +// SetNillableSoldTo sets the "sold_to" field if the given value is not nil. +func (iuo *ItemUpdateOne) SetNillableSoldTo(s *string) *ItemUpdateOne { + if s != nil { + iuo.SetSoldTo(*s) + } + return iuo +} + +// ClearSoldTo clears the value of the "sold_to" field. +func (iuo *ItemUpdateOne) ClearSoldTo() *ItemUpdateOne { + iuo.mutation.ClearSoldTo() + return iuo +} + +// SetSoldPrice sets the "sold_price" field. +func (iuo *ItemUpdateOne) SetSoldPrice(f float64) *ItemUpdateOne { + iuo.mutation.ResetSoldPrice() + iuo.mutation.SetSoldPrice(f) + return iuo +} + +// SetNillableSoldPrice sets the "sold_price" field if the given value is not nil. +func (iuo *ItemUpdateOne) SetNillableSoldPrice(f *float64) *ItemUpdateOne { + if f != nil { + iuo.SetSoldPrice(*f) + } + return iuo +} + +// AddSoldPrice adds f to the "sold_price" field. +func (iuo *ItemUpdateOne) AddSoldPrice(f float64) *ItemUpdateOne { + iuo.mutation.AddSoldPrice(f) + return iuo +} + +// SetSoldReceiptID sets the "sold_receipt_id" field. +func (iuo *ItemUpdateOne) SetSoldReceiptID(u uuid.UUID) *ItemUpdateOne { + iuo.mutation.SetSoldReceiptID(u) + return iuo +} + +// SetNillableSoldReceiptID sets the "sold_receipt_id" field if the given value is not nil. +func (iuo *ItemUpdateOne) SetNillableSoldReceiptID(u *uuid.UUID) *ItemUpdateOne { + if u != nil { + iuo.SetSoldReceiptID(*u) + } + return iuo +} + +// ClearSoldReceiptID clears the value of the "sold_receipt_id" field. +func (iuo *ItemUpdateOne) ClearSoldReceiptID() *ItemUpdateOne { + iuo.mutation.ClearSoldReceiptID() + return iuo +} + +// SetSoldNotes sets the "sold_notes" field. +func (iuo *ItemUpdateOne) SetSoldNotes(s string) *ItemUpdateOne { + iuo.mutation.SetSoldNotes(s) + return iuo +} + +// SetNillableSoldNotes sets the "sold_notes" field if the given value is not nil. +func (iuo *ItemUpdateOne) SetNillableSoldNotes(s *string) *ItemUpdateOne { + if s != nil { + iuo.SetSoldNotes(*s) + } + return iuo +} + +// ClearSoldNotes clears the value of the "sold_notes" field. +func (iuo *ItemUpdateOne) ClearSoldNotes() *ItemUpdateOne { + iuo.mutation.ClearSoldNotes() + return iuo +} + +// SetGroupID sets the "group" edge to the Group entity by ID. +func (iuo *ItemUpdateOne) SetGroupID(id uuid.UUID) *ItemUpdateOne { + iuo.mutation.SetGroupID(id) + return iuo +} + +// SetGroup sets the "group" edge to the Group entity. +func (iuo *ItemUpdateOne) SetGroup(g *Group) *ItemUpdateOne { + return iuo.SetGroupID(g.ID) +} + +// SetLocationID sets the "location" edge to the Location entity by ID. +func (iuo *ItemUpdateOne) SetLocationID(id uuid.UUID) *ItemUpdateOne { + iuo.mutation.SetLocationID(id) + return iuo +} + +// SetNillableLocationID sets the "location" edge to the Location entity by ID if the given value is not nil. +func (iuo *ItemUpdateOne) SetNillableLocationID(id *uuid.UUID) *ItemUpdateOne { + if id != nil { + iuo = iuo.SetLocationID(*id) + } + return iuo +} + +// SetLocation sets the "location" edge to the Location entity. +func (iuo *ItemUpdateOne) SetLocation(l *Location) *ItemUpdateOne { + return iuo.SetLocationID(l.ID) +} + +// AddFieldIDs adds the "fields" edge to the ItemField entity by IDs. +func (iuo *ItemUpdateOne) AddFieldIDs(ids ...uuid.UUID) *ItemUpdateOne { + iuo.mutation.AddFieldIDs(ids...) + return iuo +} + +// AddFields adds the "fields" edges to the ItemField entity. +func (iuo *ItemUpdateOne) AddFields(i ...*ItemField) *ItemUpdateOne { + ids := make([]uuid.UUID, len(i)) + for j := range i { + ids[j] = i[j].ID + } + return iuo.AddFieldIDs(ids...) +} + +// AddLabelIDs adds the "label" edge to the Label entity by IDs. +func (iuo *ItemUpdateOne) AddLabelIDs(ids ...uuid.UUID) *ItemUpdateOne { + iuo.mutation.AddLabelIDs(ids...) + return iuo +} + +// AddLabel adds the "label" edges to the Label entity. +func (iuo *ItemUpdateOne) AddLabel(l ...*Label) *ItemUpdateOne { + ids := make([]uuid.UUID, len(l)) + for i := range l { + ids[i] = l[i].ID + } + return iuo.AddLabelIDs(ids...) +} + +// Mutation returns the ItemMutation object of the builder. +func (iuo *ItemUpdateOne) Mutation() *ItemMutation { + return iuo.mutation +} + +// ClearGroup clears the "group" edge to the Group entity. +func (iuo *ItemUpdateOne) ClearGroup() *ItemUpdateOne { + iuo.mutation.ClearGroup() + return iuo +} + +// ClearLocation clears the "location" edge to the Location entity. +func (iuo *ItemUpdateOne) ClearLocation() *ItemUpdateOne { + iuo.mutation.ClearLocation() + return iuo +} + +// ClearFields clears all "fields" edges to the ItemField entity. +func (iuo *ItemUpdateOne) ClearFields() *ItemUpdateOne { + iuo.mutation.ClearFields() + return iuo +} + +// RemoveFieldIDs removes the "fields" edge to ItemField entities by IDs. +func (iuo *ItemUpdateOne) RemoveFieldIDs(ids ...uuid.UUID) *ItemUpdateOne { + iuo.mutation.RemoveFieldIDs(ids...) + return iuo +} + +// RemoveFields removes "fields" edges to ItemField entities. +func (iuo *ItemUpdateOne) RemoveFields(i ...*ItemField) *ItemUpdateOne { + ids := make([]uuid.UUID, len(i)) + for j := range i { + ids[j] = i[j].ID + } + return iuo.RemoveFieldIDs(ids...) +} + +// ClearLabel clears all "label" edges to the Label entity. +func (iuo *ItemUpdateOne) ClearLabel() *ItemUpdateOne { + iuo.mutation.ClearLabel() + return iuo +} + +// RemoveLabelIDs removes the "label" edge to Label entities by IDs. +func (iuo *ItemUpdateOne) RemoveLabelIDs(ids ...uuid.UUID) *ItemUpdateOne { + iuo.mutation.RemoveLabelIDs(ids...) + return iuo +} + +// RemoveLabel removes "label" edges to Label entities. +func (iuo *ItemUpdateOne) RemoveLabel(l ...*Label) *ItemUpdateOne { + ids := make([]uuid.UUID, len(l)) + for i := range l { + ids[i] = l[i].ID + } + return iuo.RemoveLabelIDs(ids...) +} + +// Select allows selecting one or more fields (columns) of the returned entity. +// The default is selecting all fields defined in the entity schema. +func (iuo *ItemUpdateOne) Select(field string, fields ...string) *ItemUpdateOne { + iuo.fields = append([]string{field}, fields...) + return iuo +} + +// Save executes the query and returns the updated Item entity. +func (iuo *ItemUpdateOne) Save(ctx context.Context) (*Item, error) { + var ( + err error + node *Item + ) + iuo.defaults() + if len(iuo.hooks) == 0 { + if err = iuo.check(); err != nil { + return nil, err + } + node, err = iuo.sqlSave(ctx) + } else { + var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) { + mutation, ok := m.(*ItemMutation) + if !ok { + return nil, fmt.Errorf("unexpected mutation type %T", m) + } + if err = iuo.check(); err != nil { + return nil, err + } + iuo.mutation = mutation + node, err = iuo.sqlSave(ctx) + mutation.done = true + return node, err + }) + for i := len(iuo.hooks) - 1; i >= 0; i-- { + if iuo.hooks[i] == nil { + return nil, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)") + } + mut = iuo.hooks[i](mut) + } + v, err := mut.Mutate(ctx, iuo.mutation) + if err != nil { + return nil, err + } + nv, ok := v.(*Item) + if !ok { + return nil, fmt.Errorf("unexpected node type %T returned from ItemMutation", v) + } + node = nv + } + return node, err +} + +// SaveX is like Save, but panics if an error occurs. +func (iuo *ItemUpdateOne) SaveX(ctx context.Context) *Item { + node, err := iuo.Save(ctx) + if err != nil { + panic(err) + } + return node +} + +// Exec executes the query on the entity. +func (iuo *ItemUpdateOne) Exec(ctx context.Context) error { + _, err := iuo.Save(ctx) + return err +} + +// ExecX is like Exec, but panics if an error occurs. +func (iuo *ItemUpdateOne) ExecX(ctx context.Context) { + if err := iuo.Exec(ctx); err != nil { + panic(err) + } +} + +// defaults sets the default values of the builder before save. +func (iuo *ItemUpdateOne) defaults() { + if _, ok := iuo.mutation.UpdatedAt(); !ok { + v := item.UpdateDefaultUpdatedAt() + iuo.mutation.SetUpdatedAt(v) + } +} + +// check runs all checks and user-defined validators on the builder. +func (iuo *ItemUpdateOne) check() error { + if v, ok := iuo.mutation.Name(); ok { + if err := item.NameValidator(v); err != nil { + return &ValidationError{Name: "name", err: fmt.Errorf(`ent: validator failed for field "Item.name": %w`, err)} + } + } + if v, ok := iuo.mutation.Description(); ok { + if err := item.DescriptionValidator(v); err != nil { + return &ValidationError{Name: "description", err: fmt.Errorf(`ent: validator failed for field "Item.description": %w`, err)} + } + } + if v, ok := iuo.mutation.Notes(); ok { + if err := item.NotesValidator(v); err != nil { + return &ValidationError{Name: "notes", err: fmt.Errorf(`ent: validator failed for field "Item.notes": %w`, err)} + } + } + if v, ok := iuo.mutation.SerialNumber(); ok { + if err := item.SerialNumberValidator(v); err != nil { + return &ValidationError{Name: "serial_number", err: fmt.Errorf(`ent: validator failed for field "Item.serial_number": %w`, err)} + } + } + if v, ok := iuo.mutation.ModelNumber(); ok { + if err := item.ModelNumberValidator(v); err != nil { + return &ValidationError{Name: "model_number", err: fmt.Errorf(`ent: validator failed for field "Item.model_number": %w`, err)} + } + } + if v, ok := iuo.mutation.Manufacturer(); ok { + if err := item.ManufacturerValidator(v); err != nil { + return &ValidationError{Name: "manufacturer", err: fmt.Errorf(`ent: validator failed for field "Item.manufacturer": %w`, err)} + } + } + if v, ok := iuo.mutation.SoldNotes(); ok { + if err := item.SoldNotesValidator(v); err != nil { + return &ValidationError{Name: "sold_notes", err: fmt.Errorf(`ent: validator failed for field "Item.sold_notes": %w`, err)} + } + } + if _, ok := iuo.mutation.GroupID(); iuo.mutation.GroupCleared() && !ok { + return errors.New(`ent: clearing a required unique edge "Item.group"`) + } + return nil +} + +func (iuo *ItemUpdateOne) sqlSave(ctx context.Context) (_node *Item, err error) { + _spec := &sqlgraph.UpdateSpec{ + Node: &sqlgraph.NodeSpec{ + Table: item.Table, + Columns: item.Columns, + ID: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: item.FieldID, + }, + }, + } + id, ok := iuo.mutation.ID() + if !ok { + return nil, &ValidationError{Name: "id", err: errors.New(`ent: missing "Item.id" for update`)} + } + _spec.Node.ID.Value = id + if fields := iuo.fields; len(fields) > 0 { + _spec.Node.Columns = make([]string, 0, len(fields)) + _spec.Node.Columns = append(_spec.Node.Columns, item.FieldID) + for _, f := range fields { + if !item.ValidColumn(f) { + return nil, &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)} + } + if f != item.FieldID { + _spec.Node.Columns = append(_spec.Node.Columns, f) + } + } + } + if ps := iuo.mutation.predicates; len(ps) > 0 { + _spec.Predicate = func(selector *sql.Selector) { + for i := range ps { + ps[i](selector) + } + } + } + if value, ok := iuo.mutation.UpdatedAt(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeTime, + Value: value, + Column: item.FieldUpdatedAt, + }) + } + if value, ok := iuo.mutation.Name(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: item.FieldName, + }) + } + if value, ok := iuo.mutation.Description(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: item.FieldDescription, + }) + } + if iuo.mutation.DescriptionCleared() { + _spec.Fields.Clear = append(_spec.Fields.Clear, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Column: item.FieldDescription, + }) + } + if value, ok := iuo.mutation.Notes(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: item.FieldNotes, + }) + } + if iuo.mutation.NotesCleared() { + _spec.Fields.Clear = append(_spec.Fields.Clear, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Column: item.FieldNotes, + }) + } + if value, ok := iuo.mutation.SerialNumber(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: item.FieldSerialNumber, + }) + } + if iuo.mutation.SerialNumberCleared() { + _spec.Fields.Clear = append(_spec.Fields.Clear, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Column: item.FieldSerialNumber, + }) + } + if value, ok := iuo.mutation.ModelNumber(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: item.FieldModelNumber, + }) + } + if iuo.mutation.ModelNumberCleared() { + _spec.Fields.Clear = append(_spec.Fields.Clear, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Column: item.FieldModelNumber, + }) + } + if value, ok := iuo.mutation.Manufacturer(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: item.FieldManufacturer, + }) + } + if iuo.mutation.ManufacturerCleared() { + _spec.Fields.Clear = append(_spec.Fields.Clear, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Column: item.FieldManufacturer, + }) + } + if value, ok := iuo.mutation.PurchaseTime(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeTime, + Value: value, + Column: item.FieldPurchaseTime, + }) + } + if iuo.mutation.PurchaseTimeCleared() { + _spec.Fields.Clear = append(_spec.Fields.Clear, &sqlgraph.FieldSpec{ + Type: field.TypeTime, + Column: item.FieldPurchaseTime, + }) + } + if value, ok := iuo.mutation.PurchaseFrom(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: item.FieldPurchaseFrom, + }) + } + if iuo.mutation.PurchaseFromCleared() { + _spec.Fields.Clear = append(_spec.Fields.Clear, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Column: item.FieldPurchaseFrom, + }) + } + if value, ok := iuo.mutation.PurchasePrice(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeFloat64, + Value: value, + Column: item.FieldPurchasePrice, + }) + } + if value, ok := iuo.mutation.AddedPurchasePrice(); ok { + _spec.Fields.Add = append(_spec.Fields.Add, &sqlgraph.FieldSpec{ + Type: field.TypeFloat64, + Value: value, + Column: item.FieldPurchasePrice, + }) + } + if value, ok := iuo.mutation.PurchaseReceiptID(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Value: value, + Column: item.FieldPurchaseReceiptID, + }) + } + if iuo.mutation.PurchaseReceiptIDCleared() { + _spec.Fields.Clear = append(_spec.Fields.Clear, &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: item.FieldPurchaseReceiptID, + }) + } + if value, ok := iuo.mutation.SoldTime(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeTime, + Value: value, + Column: item.FieldSoldTime, + }) + } + if iuo.mutation.SoldTimeCleared() { + _spec.Fields.Clear = append(_spec.Fields.Clear, &sqlgraph.FieldSpec{ + Type: field.TypeTime, + Column: item.FieldSoldTime, + }) + } + if value, ok := iuo.mutation.SoldTo(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: item.FieldSoldTo, + }) + } + if iuo.mutation.SoldToCleared() { + _spec.Fields.Clear = append(_spec.Fields.Clear, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Column: item.FieldSoldTo, + }) + } + if value, ok := iuo.mutation.SoldPrice(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeFloat64, + Value: value, + Column: item.FieldSoldPrice, + }) + } + if value, ok := iuo.mutation.AddedSoldPrice(); ok { + _spec.Fields.Add = append(_spec.Fields.Add, &sqlgraph.FieldSpec{ + Type: field.TypeFloat64, + Value: value, + Column: item.FieldSoldPrice, + }) + } + if value, ok := iuo.mutation.SoldReceiptID(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Value: value, + Column: item.FieldSoldReceiptID, + }) + } + if iuo.mutation.SoldReceiptIDCleared() { + _spec.Fields.Clear = append(_spec.Fields.Clear, &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: item.FieldSoldReceiptID, + }) + } + if value, ok := iuo.mutation.SoldNotes(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: item.FieldSoldNotes, + }) + } + if iuo.mutation.SoldNotesCleared() { + _spec.Fields.Clear = append(_spec.Fields.Clear, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Column: item.FieldSoldNotes, + }) + } + if iuo.mutation.GroupCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2O, + Inverse: true, + Table: item.GroupTable, + Columns: []string{item.GroupColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: group.FieldID, + }, + }, + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := iuo.mutation.GroupIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2O, + Inverse: true, + Table: item.GroupTable, + Columns: []string{item.GroupColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: group.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Add = append(_spec.Edges.Add, edge) + } + if iuo.mutation.LocationCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2O, + Inverse: true, + Table: item.LocationTable, + Columns: []string{item.LocationColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: location.FieldID, + }, + }, + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := iuo.mutation.LocationIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2O, + Inverse: true, + Table: item.LocationTable, + Columns: []string{item.LocationColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: location.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Add = append(_spec.Edges.Add, edge) + } + if iuo.mutation.FieldsCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: item.FieldsTable, + Columns: []string{item.FieldsColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: itemfield.FieldID, + }, + }, + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := iuo.mutation.RemovedFieldsIDs(); len(nodes) > 0 && !iuo.mutation.FieldsCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: item.FieldsTable, + Columns: []string{item.FieldsColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: itemfield.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := iuo.mutation.FieldsIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: item.FieldsTable, + Columns: []string{item.FieldsColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: itemfield.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Add = append(_spec.Edges.Add, edge) + } + if iuo.mutation.LabelCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2M, + Inverse: true, + Table: item.LabelTable, + Columns: item.LabelPrimaryKey, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: label.FieldID, + }, + }, + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := iuo.mutation.RemovedLabelIDs(); len(nodes) > 0 && !iuo.mutation.LabelCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2M, + Inverse: true, + Table: item.LabelTable, + Columns: item.LabelPrimaryKey, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: label.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := iuo.mutation.LabelIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2M, + Inverse: true, + Table: item.LabelTable, + Columns: item.LabelPrimaryKey, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: label.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Add = append(_spec.Edges.Add, edge) + } + _node = &Item{config: iuo.config} + _spec.Assign = _node.assignValues + _spec.ScanValues = _node.scanValues + if err = sqlgraph.UpdateNode(ctx, iuo.driver, _spec); err != nil { + if _, ok := err.(*sqlgraph.NotFoundError); ok { + err = &NotFoundError{item.Label} + } else if sqlgraph.IsConstraintError(err) { + err = &ConstraintError{msg: err.Error(), wrap: err} + } + return nil, err + } + return _node, nil +} diff --git a/backend/ent/itemfield.go b/backend/ent/itemfield.go new file mode 100644 index 0000000..9dfbf65 --- /dev/null +++ b/backend/ent/itemfield.go @@ -0,0 +1,236 @@ +// Code generated by ent, DO NOT EDIT. + +package ent + +import ( + "fmt" + "strings" + "time" + + "entgo.io/ent/dialect/sql" + "github.com/google/uuid" + "github.com/hay-kot/content/backend/ent/item" + "github.com/hay-kot/content/backend/ent/itemfield" +) + +// ItemField is the model entity for the ItemField schema. +type ItemField struct { + config `json:"-"` + // ID of the ent. + ID uuid.UUID `json:"id,omitempty"` + // CreatedAt holds the value of the "created_at" field. + CreatedAt time.Time `json:"created_at,omitempty"` + // UpdatedAt holds the value of the "updated_at" field. + UpdatedAt time.Time `json:"updated_at,omitempty"` + // Name holds the value of the "name" field. + Name string `json:"name,omitempty"` + // Description holds the value of the "description" field. + Description string `json:"description,omitempty"` + // Type holds the value of the "type" field. + Type itemfield.Type `json:"type,omitempty"` + // TextValue holds the value of the "text_value" field. + TextValue string `json:"text_value,omitempty"` + // NumberValue holds the value of the "number_value" field. + NumberValue int `json:"number_value,omitempty"` + // BooleanValue holds the value of the "boolean_value" field. + BooleanValue bool `json:"boolean_value,omitempty"` + // TimeValue holds the value of the "time_value" field. + TimeValue time.Time `json:"time_value,omitempty"` + // Edges holds the relations/edges for other nodes in the graph. + // The values are being populated by the ItemFieldQuery when eager-loading is set. + Edges ItemFieldEdges `json:"edges"` + item_fields *uuid.UUID +} + +// ItemFieldEdges holds the relations/edges for other nodes in the graph. +type ItemFieldEdges struct { + // Item holds the value of the item edge. + Item *Item `json:"item,omitempty"` + // loadedTypes holds the information for reporting if a + // type was loaded (or requested) in eager-loading or not. + loadedTypes [1]bool +} + +// ItemOrErr returns the Item value or an error if the edge +// was not loaded in eager-loading, or loaded but was not found. +func (e ItemFieldEdges) ItemOrErr() (*Item, error) { + if e.loadedTypes[0] { + if e.Item == nil { + // Edge was loaded but was not found. + return nil, &NotFoundError{label: item.Label} + } + return e.Item, nil + } + return nil, &NotLoadedError{edge: "item"} +} + +// scanValues returns the types for scanning values from sql.Rows. +func (*ItemField) scanValues(columns []string) ([]interface{}, error) { + values := make([]interface{}, len(columns)) + for i := range columns { + switch columns[i] { + case itemfield.FieldBooleanValue: + values[i] = new(sql.NullBool) + case itemfield.FieldNumberValue: + values[i] = new(sql.NullInt64) + case itemfield.FieldName, itemfield.FieldDescription, itemfield.FieldType, itemfield.FieldTextValue: + values[i] = new(sql.NullString) + case itemfield.FieldCreatedAt, itemfield.FieldUpdatedAt, itemfield.FieldTimeValue: + values[i] = new(sql.NullTime) + case itemfield.FieldID: + values[i] = new(uuid.UUID) + case itemfield.ForeignKeys[0]: // item_fields + values[i] = &sql.NullScanner{S: new(uuid.UUID)} + default: + return nil, fmt.Errorf("unexpected column %q for type ItemField", columns[i]) + } + } + return values, nil +} + +// assignValues assigns the values that were returned from sql.Rows (after scanning) +// to the ItemField fields. +func (_if *ItemField) assignValues(columns []string, values []interface{}) error { + if m, n := len(values), len(columns); m < n { + return fmt.Errorf("mismatch number of scan values: %d != %d", m, n) + } + for i := range columns { + switch columns[i] { + case itemfield.FieldID: + if value, ok := values[i].(*uuid.UUID); !ok { + return fmt.Errorf("unexpected type %T for field id", values[i]) + } else if value != nil { + _if.ID = *value + } + case itemfield.FieldCreatedAt: + if value, ok := values[i].(*sql.NullTime); !ok { + return fmt.Errorf("unexpected type %T for field created_at", values[i]) + } else if value.Valid { + _if.CreatedAt = value.Time + } + case itemfield.FieldUpdatedAt: + if value, ok := values[i].(*sql.NullTime); !ok { + return fmt.Errorf("unexpected type %T for field updated_at", values[i]) + } else if value.Valid { + _if.UpdatedAt = value.Time + } + case itemfield.FieldName: + if value, ok := values[i].(*sql.NullString); !ok { + return fmt.Errorf("unexpected type %T for field name", values[i]) + } else if value.Valid { + _if.Name = value.String + } + case itemfield.FieldDescription: + if value, ok := values[i].(*sql.NullString); !ok { + return fmt.Errorf("unexpected type %T for field description", values[i]) + } else if value.Valid { + _if.Description = value.String + } + case itemfield.FieldType: + if value, ok := values[i].(*sql.NullString); !ok { + return fmt.Errorf("unexpected type %T for field type", values[i]) + } else if value.Valid { + _if.Type = itemfield.Type(value.String) + } + case itemfield.FieldTextValue: + if value, ok := values[i].(*sql.NullString); !ok { + return fmt.Errorf("unexpected type %T for field text_value", values[i]) + } else if value.Valid { + _if.TextValue = value.String + } + case itemfield.FieldNumberValue: + if value, ok := values[i].(*sql.NullInt64); !ok { + return fmt.Errorf("unexpected type %T for field number_value", values[i]) + } else if value.Valid { + _if.NumberValue = int(value.Int64) + } + case itemfield.FieldBooleanValue: + if value, ok := values[i].(*sql.NullBool); !ok { + return fmt.Errorf("unexpected type %T for field boolean_value", values[i]) + } else if value.Valid { + _if.BooleanValue = value.Bool + } + case itemfield.FieldTimeValue: + if value, ok := values[i].(*sql.NullTime); !ok { + return fmt.Errorf("unexpected type %T for field time_value", values[i]) + } else if value.Valid { + _if.TimeValue = value.Time + } + case itemfield.ForeignKeys[0]: + if value, ok := values[i].(*sql.NullScanner); !ok { + return fmt.Errorf("unexpected type %T for field item_fields", values[i]) + } else if value.Valid { + _if.item_fields = new(uuid.UUID) + *_if.item_fields = *value.S.(*uuid.UUID) + } + } + } + return nil +} + +// QueryItem queries the "item" edge of the ItemField entity. +func (_if *ItemField) QueryItem() *ItemQuery { + return (&ItemFieldClient{config: _if.config}).QueryItem(_if) +} + +// Update returns a builder for updating this ItemField. +// Note that you need to call ItemField.Unwrap() before calling this method if this ItemField +// was returned from a transaction, and the transaction was committed or rolled back. +func (_if *ItemField) Update() *ItemFieldUpdateOne { + return (&ItemFieldClient{config: _if.config}).UpdateOne(_if) +} + +// Unwrap unwraps the ItemField entity that was returned from a transaction after it was closed, +// so that all future queries will be executed through the driver which created the transaction. +func (_if *ItemField) Unwrap() *ItemField { + _tx, ok := _if.config.driver.(*txDriver) + if !ok { + panic("ent: ItemField is not a transactional entity") + } + _if.config.driver = _tx.drv + return _if +} + +// String implements the fmt.Stringer. +func (_if *ItemField) String() string { + var builder strings.Builder + builder.WriteString("ItemField(") + builder.WriteString(fmt.Sprintf("id=%v, ", _if.ID)) + builder.WriteString("created_at=") + builder.WriteString(_if.CreatedAt.Format(time.ANSIC)) + builder.WriteString(", ") + builder.WriteString("updated_at=") + builder.WriteString(_if.UpdatedAt.Format(time.ANSIC)) + builder.WriteString(", ") + builder.WriteString("name=") + builder.WriteString(_if.Name) + builder.WriteString(", ") + builder.WriteString("description=") + builder.WriteString(_if.Description) + builder.WriteString(", ") + builder.WriteString("type=") + builder.WriteString(fmt.Sprintf("%v", _if.Type)) + builder.WriteString(", ") + builder.WriteString("text_value=") + builder.WriteString(_if.TextValue) + builder.WriteString(", ") + builder.WriteString("number_value=") + builder.WriteString(fmt.Sprintf("%v", _if.NumberValue)) + builder.WriteString(", ") + builder.WriteString("boolean_value=") + builder.WriteString(fmt.Sprintf("%v", _if.BooleanValue)) + builder.WriteString(", ") + builder.WriteString("time_value=") + builder.WriteString(_if.TimeValue.Format(time.ANSIC)) + builder.WriteByte(')') + return builder.String() +} + +// ItemFields is a parsable slice of ItemField. +type ItemFields []*ItemField + +func (_if ItemFields) config(cfg config) { + for _i := range _if { + _if[_i].config = cfg + } +} diff --git a/backend/ent/itemfield/itemfield.go b/backend/ent/itemfield/itemfield.go new file mode 100644 index 0000000..ccad0fe --- /dev/null +++ b/backend/ent/itemfield/itemfield.go @@ -0,0 +1,127 @@ +// Code generated by ent, DO NOT EDIT. + +package itemfield + +import ( + "fmt" + "time" + + "github.com/google/uuid" +) + +const ( + // Label holds the string label denoting the itemfield type in the database. + Label = "item_field" + // FieldID holds the string denoting the id field in the database. + FieldID = "id" + // FieldCreatedAt holds the string denoting the created_at field in the database. + FieldCreatedAt = "created_at" + // FieldUpdatedAt holds the string denoting the updated_at field in the database. + FieldUpdatedAt = "updated_at" + // FieldName holds the string denoting the name field in the database. + FieldName = "name" + // FieldDescription holds the string denoting the description field in the database. + FieldDescription = "description" + // FieldType holds the string denoting the type field in the database. + FieldType = "type" + // FieldTextValue holds the string denoting the text_value field in the database. + FieldTextValue = "text_value" + // FieldNumberValue holds the string denoting the number_value field in the database. + FieldNumberValue = "number_value" + // FieldBooleanValue holds the string denoting the boolean_value field in the database. + FieldBooleanValue = "boolean_value" + // FieldTimeValue holds the string denoting the time_value field in the database. + FieldTimeValue = "time_value" + // EdgeItem holds the string denoting the item edge name in mutations. + EdgeItem = "item" + // Table holds the table name of the itemfield in the database. + Table = "item_fields" + // ItemTable is the table that holds the item relation/edge. + ItemTable = "item_fields" + // ItemInverseTable is the table name for the Item entity. + // It exists in this package in order to avoid circular dependency with the "item" package. + ItemInverseTable = "items" + // ItemColumn is the table column denoting the item relation/edge. + ItemColumn = "item_fields" +) + +// Columns holds all SQL columns for itemfield fields. +var Columns = []string{ + FieldID, + FieldCreatedAt, + FieldUpdatedAt, + FieldName, + FieldDescription, + FieldType, + FieldTextValue, + FieldNumberValue, + FieldBooleanValue, + FieldTimeValue, +} + +// ForeignKeys holds the SQL foreign-keys that are owned by the "item_fields" +// table and are not defined as standalone fields in the schema. +var ForeignKeys = []string{ + "item_fields", +} + +// ValidColumn reports if the column name is valid (part of the table columns). +func ValidColumn(column string) bool { + for i := range Columns { + if column == Columns[i] { + return true + } + } + for i := range ForeignKeys { + if column == ForeignKeys[i] { + return true + } + } + return false +} + +var ( + // DefaultCreatedAt holds the default value on creation for the "created_at" field. + DefaultCreatedAt func() time.Time + // DefaultUpdatedAt holds the default value on creation for the "updated_at" field. + DefaultUpdatedAt func() time.Time + // UpdateDefaultUpdatedAt holds the default value on update for the "updated_at" field. + UpdateDefaultUpdatedAt func() time.Time + // NameValidator is a validator for the "name" field. It is called by the builders before save. + NameValidator func(string) error + // DescriptionValidator is a validator for the "description" field. It is called by the builders before save. + DescriptionValidator func(string) error + // TextValueValidator is a validator for the "text_value" field. It is called by the builders before save. + TextValueValidator func(string) error + // DefaultBooleanValue holds the default value on creation for the "boolean_value" field. + DefaultBooleanValue bool + // DefaultTimeValue holds the default value on creation for the "time_value" field. + DefaultTimeValue func() time.Time + // DefaultID holds the default value on creation for the "id" field. + DefaultID func() uuid.UUID +) + +// Type defines the type for the "type" enum field. +type Type string + +// Type values. +const ( + TypeText Type = "text" + TypeNumber Type = "number" + TypeBoolean Type = "boolean" + TypeTime Type = "time" +) + +func (_type Type) String() string { + return string(_type) +} + +// TypeValidator is a validator for the "type" field enum values. It is called by the builders before save. +func TypeValidator(_type Type) error { + switch _type { + case TypeText, TypeNumber, TypeBoolean, TypeTime: + return nil + default: + return fmt.Errorf("itemfield: invalid enum value for type field: %q", _type) + } +} diff --git a/backend/ent/itemfield/where.go b/backend/ent/itemfield/where.go new file mode 100644 index 0000000..e319aa6 --- /dev/null +++ b/backend/ent/itemfield/where.go @@ -0,0 +1,844 @@ +// Code generated by ent, DO NOT EDIT. + +package itemfield + +import ( + "time" + + "entgo.io/ent/dialect/sql" + "entgo.io/ent/dialect/sql/sqlgraph" + "github.com/google/uuid" + "github.com/hay-kot/content/backend/ent/predicate" +) + +// ID filters vertices based on their ID field. +func ID(id uuid.UUID) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldID), id)) + }) +} + +// IDEQ applies the EQ predicate on the ID field. +func IDEQ(id uuid.UUID) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldID), id)) + }) +} + +// IDNEQ applies the NEQ predicate on the ID field. +func IDNEQ(id uuid.UUID) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldID), id)) + }) +} + +// IDIn applies the In predicate on the ID field. +func IDIn(ids ...uuid.UUID) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + v := make([]interface{}, len(ids)) + for i := range v { + v[i] = ids[i] + } + s.Where(sql.In(s.C(FieldID), v...)) + }) +} + +// IDNotIn applies the NotIn predicate on the ID field. +func IDNotIn(ids ...uuid.UUID) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + v := make([]interface{}, len(ids)) + for i := range v { + v[i] = ids[i] + } + s.Where(sql.NotIn(s.C(FieldID), v...)) + }) +} + +// IDGT applies the GT predicate on the ID field. +func IDGT(id uuid.UUID) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.GT(s.C(FieldID), id)) + }) +} + +// IDGTE applies the GTE predicate on the ID field. +func IDGTE(id uuid.UUID) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.GTE(s.C(FieldID), id)) + }) +} + +// IDLT applies the LT predicate on the ID field. +func IDLT(id uuid.UUID) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.LT(s.C(FieldID), id)) + }) +} + +// IDLTE applies the LTE predicate on the ID field. +func IDLTE(id uuid.UUID) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.LTE(s.C(FieldID), id)) + }) +} + +// CreatedAt applies equality check predicate on the "created_at" field. It's identical to CreatedAtEQ. +func CreatedAt(v time.Time) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldCreatedAt), v)) + }) +} + +// UpdatedAt applies equality check predicate on the "updated_at" field. It's identical to UpdatedAtEQ. +func UpdatedAt(v time.Time) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldUpdatedAt), v)) + }) +} + +// Name applies equality check predicate on the "name" field. It's identical to NameEQ. +func Name(v string) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldName), v)) + }) +} + +// Description applies equality check predicate on the "description" field. It's identical to DescriptionEQ. +func Description(v string) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldDescription), v)) + }) +} + +// TextValue applies equality check predicate on the "text_value" field. It's identical to TextValueEQ. +func TextValue(v string) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldTextValue), v)) + }) +} + +// NumberValue applies equality check predicate on the "number_value" field. It's identical to NumberValueEQ. +func NumberValue(v int) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldNumberValue), v)) + }) +} + +// BooleanValue applies equality check predicate on the "boolean_value" field. It's identical to BooleanValueEQ. +func BooleanValue(v bool) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldBooleanValue), v)) + }) +} + +// TimeValue applies equality check predicate on the "time_value" field. It's identical to TimeValueEQ. +func TimeValue(v time.Time) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldTimeValue), v)) + }) +} + +// CreatedAtEQ applies the EQ predicate on the "created_at" field. +func CreatedAtEQ(v time.Time) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldCreatedAt), v)) + }) +} + +// CreatedAtNEQ applies the NEQ predicate on the "created_at" field. +func CreatedAtNEQ(v time.Time) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldCreatedAt), v)) + }) +} + +// CreatedAtIn applies the In predicate on the "created_at" field. +func CreatedAtIn(vs ...time.Time) predicate.ItemField { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.In(s.C(FieldCreatedAt), v...)) + }) +} + +// CreatedAtNotIn applies the NotIn predicate on the "created_at" field. +func CreatedAtNotIn(vs ...time.Time) predicate.ItemField { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.NotIn(s.C(FieldCreatedAt), v...)) + }) +} + +// CreatedAtGT applies the GT predicate on the "created_at" field. +func CreatedAtGT(v time.Time) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.GT(s.C(FieldCreatedAt), v)) + }) +} + +// CreatedAtGTE applies the GTE predicate on the "created_at" field. +func CreatedAtGTE(v time.Time) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.GTE(s.C(FieldCreatedAt), v)) + }) +} + +// CreatedAtLT applies the LT predicate on the "created_at" field. +func CreatedAtLT(v time.Time) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.LT(s.C(FieldCreatedAt), v)) + }) +} + +// CreatedAtLTE applies the LTE predicate on the "created_at" field. +func CreatedAtLTE(v time.Time) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.LTE(s.C(FieldCreatedAt), v)) + }) +} + +// UpdatedAtEQ applies the EQ predicate on the "updated_at" field. +func UpdatedAtEQ(v time.Time) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldUpdatedAt), v)) + }) +} + +// UpdatedAtNEQ applies the NEQ predicate on the "updated_at" field. +func UpdatedAtNEQ(v time.Time) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldUpdatedAt), v)) + }) +} + +// UpdatedAtIn applies the In predicate on the "updated_at" field. +func UpdatedAtIn(vs ...time.Time) predicate.ItemField { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.In(s.C(FieldUpdatedAt), v...)) + }) +} + +// UpdatedAtNotIn applies the NotIn predicate on the "updated_at" field. +func UpdatedAtNotIn(vs ...time.Time) predicate.ItemField { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.NotIn(s.C(FieldUpdatedAt), v...)) + }) +} + +// UpdatedAtGT applies the GT predicate on the "updated_at" field. +func UpdatedAtGT(v time.Time) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.GT(s.C(FieldUpdatedAt), v)) + }) +} + +// UpdatedAtGTE applies the GTE predicate on the "updated_at" field. +func UpdatedAtGTE(v time.Time) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.GTE(s.C(FieldUpdatedAt), v)) + }) +} + +// UpdatedAtLT applies the LT predicate on the "updated_at" field. +func UpdatedAtLT(v time.Time) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.LT(s.C(FieldUpdatedAt), v)) + }) +} + +// UpdatedAtLTE applies the LTE predicate on the "updated_at" field. +func UpdatedAtLTE(v time.Time) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.LTE(s.C(FieldUpdatedAt), v)) + }) +} + +// NameEQ applies the EQ predicate on the "name" field. +func NameEQ(v string) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldName), v)) + }) +} + +// NameNEQ applies the NEQ predicate on the "name" field. +func NameNEQ(v string) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldName), v)) + }) +} + +// NameIn applies the In predicate on the "name" field. +func NameIn(vs ...string) predicate.ItemField { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.In(s.C(FieldName), v...)) + }) +} + +// NameNotIn applies the NotIn predicate on the "name" field. +func NameNotIn(vs ...string) predicate.ItemField { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.NotIn(s.C(FieldName), v...)) + }) +} + +// NameGT applies the GT predicate on the "name" field. +func NameGT(v string) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.GT(s.C(FieldName), v)) + }) +} + +// NameGTE applies the GTE predicate on the "name" field. +func NameGTE(v string) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.GTE(s.C(FieldName), v)) + }) +} + +// NameLT applies the LT predicate on the "name" field. +func NameLT(v string) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.LT(s.C(FieldName), v)) + }) +} + +// NameLTE applies the LTE predicate on the "name" field. +func NameLTE(v string) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.LTE(s.C(FieldName), v)) + }) +} + +// NameContains applies the Contains predicate on the "name" field. +func NameContains(v string) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.Contains(s.C(FieldName), v)) + }) +} + +// NameHasPrefix applies the HasPrefix predicate on the "name" field. +func NameHasPrefix(v string) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.HasPrefix(s.C(FieldName), v)) + }) +} + +// NameHasSuffix applies the HasSuffix predicate on the "name" field. +func NameHasSuffix(v string) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.HasSuffix(s.C(FieldName), v)) + }) +} + +// NameEqualFold applies the EqualFold predicate on the "name" field. +func NameEqualFold(v string) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.EqualFold(s.C(FieldName), v)) + }) +} + +// NameContainsFold applies the ContainsFold predicate on the "name" field. +func NameContainsFold(v string) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.ContainsFold(s.C(FieldName), v)) + }) +} + +// DescriptionEQ applies the EQ predicate on the "description" field. +func DescriptionEQ(v string) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldDescription), v)) + }) +} + +// DescriptionNEQ applies the NEQ predicate on the "description" field. +func DescriptionNEQ(v string) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldDescription), v)) + }) +} + +// DescriptionIn applies the In predicate on the "description" field. +func DescriptionIn(vs ...string) predicate.ItemField { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.In(s.C(FieldDescription), v...)) + }) +} + +// DescriptionNotIn applies the NotIn predicate on the "description" field. +func DescriptionNotIn(vs ...string) predicate.ItemField { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.NotIn(s.C(FieldDescription), v...)) + }) +} + +// DescriptionGT applies the GT predicate on the "description" field. +func DescriptionGT(v string) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.GT(s.C(FieldDescription), v)) + }) +} + +// DescriptionGTE applies the GTE predicate on the "description" field. +func DescriptionGTE(v string) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.GTE(s.C(FieldDescription), v)) + }) +} + +// DescriptionLT applies the LT predicate on the "description" field. +func DescriptionLT(v string) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.LT(s.C(FieldDescription), v)) + }) +} + +// DescriptionLTE applies the LTE predicate on the "description" field. +func DescriptionLTE(v string) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.LTE(s.C(FieldDescription), v)) + }) +} + +// DescriptionContains applies the Contains predicate on the "description" field. +func DescriptionContains(v string) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.Contains(s.C(FieldDescription), v)) + }) +} + +// DescriptionHasPrefix applies the HasPrefix predicate on the "description" field. +func DescriptionHasPrefix(v string) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.HasPrefix(s.C(FieldDescription), v)) + }) +} + +// DescriptionHasSuffix applies the HasSuffix predicate on the "description" field. +func DescriptionHasSuffix(v string) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.HasSuffix(s.C(FieldDescription), v)) + }) +} + +// DescriptionIsNil applies the IsNil predicate on the "description" field. +func DescriptionIsNil() predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.IsNull(s.C(FieldDescription))) + }) +} + +// DescriptionNotNil applies the NotNil predicate on the "description" field. +func DescriptionNotNil() predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.NotNull(s.C(FieldDescription))) + }) +} + +// DescriptionEqualFold applies the EqualFold predicate on the "description" field. +func DescriptionEqualFold(v string) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.EqualFold(s.C(FieldDescription), v)) + }) +} + +// DescriptionContainsFold applies the ContainsFold predicate on the "description" field. +func DescriptionContainsFold(v string) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.ContainsFold(s.C(FieldDescription), v)) + }) +} + +// TypeEQ applies the EQ predicate on the "type" field. +func TypeEQ(v Type) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldType), v)) + }) +} + +// TypeNEQ applies the NEQ predicate on the "type" field. +func TypeNEQ(v Type) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldType), v)) + }) +} + +// TypeIn applies the In predicate on the "type" field. +func TypeIn(vs ...Type) predicate.ItemField { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.In(s.C(FieldType), v...)) + }) +} + +// TypeNotIn applies the NotIn predicate on the "type" field. +func TypeNotIn(vs ...Type) predicate.ItemField { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.NotIn(s.C(FieldType), v...)) + }) +} + +// TextValueEQ applies the EQ predicate on the "text_value" field. +func TextValueEQ(v string) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldTextValue), v)) + }) +} + +// TextValueNEQ applies the NEQ predicate on the "text_value" field. +func TextValueNEQ(v string) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldTextValue), v)) + }) +} + +// TextValueIn applies the In predicate on the "text_value" field. +func TextValueIn(vs ...string) predicate.ItemField { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.In(s.C(FieldTextValue), v...)) + }) +} + +// TextValueNotIn applies the NotIn predicate on the "text_value" field. +func TextValueNotIn(vs ...string) predicate.ItemField { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.NotIn(s.C(FieldTextValue), v...)) + }) +} + +// TextValueGT applies the GT predicate on the "text_value" field. +func TextValueGT(v string) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.GT(s.C(FieldTextValue), v)) + }) +} + +// TextValueGTE applies the GTE predicate on the "text_value" field. +func TextValueGTE(v string) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.GTE(s.C(FieldTextValue), v)) + }) +} + +// TextValueLT applies the LT predicate on the "text_value" field. +func TextValueLT(v string) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.LT(s.C(FieldTextValue), v)) + }) +} + +// TextValueLTE applies the LTE predicate on the "text_value" field. +func TextValueLTE(v string) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.LTE(s.C(FieldTextValue), v)) + }) +} + +// TextValueContains applies the Contains predicate on the "text_value" field. +func TextValueContains(v string) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.Contains(s.C(FieldTextValue), v)) + }) +} + +// TextValueHasPrefix applies the HasPrefix predicate on the "text_value" field. +func TextValueHasPrefix(v string) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.HasPrefix(s.C(FieldTextValue), v)) + }) +} + +// TextValueHasSuffix applies the HasSuffix predicate on the "text_value" field. +func TextValueHasSuffix(v string) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.HasSuffix(s.C(FieldTextValue), v)) + }) +} + +// TextValueIsNil applies the IsNil predicate on the "text_value" field. +func TextValueIsNil() predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.IsNull(s.C(FieldTextValue))) + }) +} + +// TextValueNotNil applies the NotNil predicate on the "text_value" field. +func TextValueNotNil() predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.NotNull(s.C(FieldTextValue))) + }) +} + +// TextValueEqualFold applies the EqualFold predicate on the "text_value" field. +func TextValueEqualFold(v string) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.EqualFold(s.C(FieldTextValue), v)) + }) +} + +// TextValueContainsFold applies the ContainsFold predicate on the "text_value" field. +func TextValueContainsFold(v string) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.ContainsFold(s.C(FieldTextValue), v)) + }) +} + +// NumberValueEQ applies the EQ predicate on the "number_value" field. +func NumberValueEQ(v int) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldNumberValue), v)) + }) +} + +// NumberValueNEQ applies the NEQ predicate on the "number_value" field. +func NumberValueNEQ(v int) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldNumberValue), v)) + }) +} + +// NumberValueIn applies the In predicate on the "number_value" field. +func NumberValueIn(vs ...int) predicate.ItemField { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.In(s.C(FieldNumberValue), v...)) + }) +} + +// NumberValueNotIn applies the NotIn predicate on the "number_value" field. +func NumberValueNotIn(vs ...int) predicate.ItemField { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.NotIn(s.C(FieldNumberValue), v...)) + }) +} + +// NumberValueGT applies the GT predicate on the "number_value" field. +func NumberValueGT(v int) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.GT(s.C(FieldNumberValue), v)) + }) +} + +// NumberValueGTE applies the GTE predicate on the "number_value" field. +func NumberValueGTE(v int) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.GTE(s.C(FieldNumberValue), v)) + }) +} + +// NumberValueLT applies the LT predicate on the "number_value" field. +func NumberValueLT(v int) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.LT(s.C(FieldNumberValue), v)) + }) +} + +// NumberValueLTE applies the LTE predicate on the "number_value" field. +func NumberValueLTE(v int) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.LTE(s.C(FieldNumberValue), v)) + }) +} + +// NumberValueIsNil applies the IsNil predicate on the "number_value" field. +func NumberValueIsNil() predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.IsNull(s.C(FieldNumberValue))) + }) +} + +// NumberValueNotNil applies the NotNil predicate on the "number_value" field. +func NumberValueNotNil() predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.NotNull(s.C(FieldNumberValue))) + }) +} + +// BooleanValueEQ applies the EQ predicate on the "boolean_value" field. +func BooleanValueEQ(v bool) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldBooleanValue), v)) + }) +} + +// BooleanValueNEQ applies the NEQ predicate on the "boolean_value" field. +func BooleanValueNEQ(v bool) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldBooleanValue), v)) + }) +} + +// TimeValueEQ applies the EQ predicate on the "time_value" field. +func TimeValueEQ(v time.Time) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldTimeValue), v)) + }) +} + +// TimeValueNEQ applies the NEQ predicate on the "time_value" field. +func TimeValueNEQ(v time.Time) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldTimeValue), v)) + }) +} + +// TimeValueIn applies the In predicate on the "time_value" field. +func TimeValueIn(vs ...time.Time) predicate.ItemField { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.In(s.C(FieldTimeValue), v...)) + }) +} + +// TimeValueNotIn applies the NotIn predicate on the "time_value" field. +func TimeValueNotIn(vs ...time.Time) predicate.ItemField { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.NotIn(s.C(FieldTimeValue), v...)) + }) +} + +// TimeValueGT applies the GT predicate on the "time_value" field. +func TimeValueGT(v time.Time) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.GT(s.C(FieldTimeValue), v)) + }) +} + +// TimeValueGTE applies the GTE predicate on the "time_value" field. +func TimeValueGTE(v time.Time) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.GTE(s.C(FieldTimeValue), v)) + }) +} + +// TimeValueLT applies the LT predicate on the "time_value" field. +func TimeValueLT(v time.Time) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.LT(s.C(FieldTimeValue), v)) + }) +} + +// TimeValueLTE applies the LTE predicate on the "time_value" field. +func TimeValueLTE(v time.Time) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s.Where(sql.LTE(s.C(FieldTimeValue), v)) + }) +} + +// HasItem applies the HasEdge predicate on the "item" edge. +func HasItem() predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + step := sqlgraph.NewStep( + sqlgraph.From(Table, FieldID), + sqlgraph.To(ItemTable, FieldID), + sqlgraph.Edge(sqlgraph.M2O, true, ItemTable, ItemColumn), + ) + sqlgraph.HasNeighbors(s, step) + }) +} + +// HasItemWith applies the HasEdge predicate on the "item" edge with a given conditions (other predicates). +func HasItemWith(preds ...predicate.Item) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + step := sqlgraph.NewStep( + sqlgraph.From(Table, FieldID), + sqlgraph.To(ItemInverseTable, FieldID), + sqlgraph.Edge(sqlgraph.M2O, true, ItemTable, ItemColumn), + ) + sqlgraph.HasNeighborsWith(s, step, func(s *sql.Selector) { + for _, p := range preds { + p(s) + } + }) + }) +} + +// And groups predicates with the AND operator between them. +func And(predicates ...predicate.ItemField) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s1 := s.Clone().SetP(nil) + for _, p := range predicates { + p(s1) + } + s.Where(s1.P()) + }) +} + +// Or groups predicates with the OR operator between them. +func Or(predicates ...predicate.ItemField) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + s1 := s.Clone().SetP(nil) + for i, p := range predicates { + if i > 0 { + s1.Or() + } + p(s1) + } + s.Where(s1.P()) + }) +} + +// Not applies the not operator on the given predicate. +func Not(p predicate.ItemField) predicate.ItemField { + return predicate.ItemField(func(s *sql.Selector) { + p(s.Not()) + }) +} diff --git a/backend/ent/itemfield_create.go b/backend/ent/itemfield_create.go new file mode 100644 index 0000000..3e01a89 --- /dev/null +++ b/backend/ent/itemfield_create.go @@ -0,0 +1,516 @@ +// Code generated by ent, DO NOT EDIT. + +package ent + +import ( + "context" + "errors" + "fmt" + "time" + + "entgo.io/ent/dialect/sql/sqlgraph" + "entgo.io/ent/schema/field" + "github.com/google/uuid" + "github.com/hay-kot/content/backend/ent/item" + "github.com/hay-kot/content/backend/ent/itemfield" +) + +// ItemFieldCreate is the builder for creating a ItemField entity. +type ItemFieldCreate struct { + config + mutation *ItemFieldMutation + hooks []Hook +} + +// SetCreatedAt sets the "created_at" field. +func (ifc *ItemFieldCreate) SetCreatedAt(t time.Time) *ItemFieldCreate { + ifc.mutation.SetCreatedAt(t) + return ifc +} + +// SetNillableCreatedAt sets the "created_at" field if the given value is not nil. +func (ifc *ItemFieldCreate) SetNillableCreatedAt(t *time.Time) *ItemFieldCreate { + if t != nil { + ifc.SetCreatedAt(*t) + } + return ifc +} + +// SetUpdatedAt sets the "updated_at" field. +func (ifc *ItemFieldCreate) SetUpdatedAt(t time.Time) *ItemFieldCreate { + ifc.mutation.SetUpdatedAt(t) + return ifc +} + +// SetNillableUpdatedAt sets the "updated_at" field if the given value is not nil. +func (ifc *ItemFieldCreate) SetNillableUpdatedAt(t *time.Time) *ItemFieldCreate { + if t != nil { + ifc.SetUpdatedAt(*t) + } + return ifc +} + +// SetName sets the "name" field. +func (ifc *ItemFieldCreate) SetName(s string) *ItemFieldCreate { + ifc.mutation.SetName(s) + return ifc +} + +// SetDescription sets the "description" field. +func (ifc *ItemFieldCreate) SetDescription(s string) *ItemFieldCreate { + ifc.mutation.SetDescription(s) + return ifc +} + +// SetNillableDescription sets the "description" field if the given value is not nil. +func (ifc *ItemFieldCreate) SetNillableDescription(s *string) *ItemFieldCreate { + if s != nil { + ifc.SetDescription(*s) + } + return ifc +} + +// SetType sets the "type" field. +func (ifc *ItemFieldCreate) SetType(i itemfield.Type) *ItemFieldCreate { + ifc.mutation.SetType(i) + return ifc +} + +// SetTextValue sets the "text_value" field. +func (ifc *ItemFieldCreate) SetTextValue(s string) *ItemFieldCreate { + ifc.mutation.SetTextValue(s) + return ifc +} + +// SetNillableTextValue sets the "text_value" field if the given value is not nil. +func (ifc *ItemFieldCreate) SetNillableTextValue(s *string) *ItemFieldCreate { + if s != nil { + ifc.SetTextValue(*s) + } + return ifc +} + +// SetNumberValue sets the "number_value" field. +func (ifc *ItemFieldCreate) SetNumberValue(i int) *ItemFieldCreate { + ifc.mutation.SetNumberValue(i) + return ifc +} + +// SetNillableNumberValue sets the "number_value" field if the given value is not nil. +func (ifc *ItemFieldCreate) SetNillableNumberValue(i *int) *ItemFieldCreate { + if i != nil { + ifc.SetNumberValue(*i) + } + return ifc +} + +// SetBooleanValue sets the "boolean_value" field. +func (ifc *ItemFieldCreate) SetBooleanValue(b bool) *ItemFieldCreate { + ifc.mutation.SetBooleanValue(b) + return ifc +} + +// SetNillableBooleanValue sets the "boolean_value" field if the given value is not nil. +func (ifc *ItemFieldCreate) SetNillableBooleanValue(b *bool) *ItemFieldCreate { + if b != nil { + ifc.SetBooleanValue(*b) + } + return ifc +} + +// SetTimeValue sets the "time_value" field. +func (ifc *ItemFieldCreate) SetTimeValue(t time.Time) *ItemFieldCreate { + ifc.mutation.SetTimeValue(t) + return ifc +} + +// SetNillableTimeValue sets the "time_value" field if the given value is not nil. +func (ifc *ItemFieldCreate) SetNillableTimeValue(t *time.Time) *ItemFieldCreate { + if t != nil { + ifc.SetTimeValue(*t) + } + return ifc +} + +// SetID sets the "id" field. +func (ifc *ItemFieldCreate) SetID(u uuid.UUID) *ItemFieldCreate { + ifc.mutation.SetID(u) + return ifc +} + +// SetNillableID sets the "id" field if the given value is not nil. +func (ifc *ItemFieldCreate) SetNillableID(u *uuid.UUID) *ItemFieldCreate { + if u != nil { + ifc.SetID(*u) + } + return ifc +} + +// SetItemID sets the "item" edge to the Item entity by ID. +func (ifc *ItemFieldCreate) SetItemID(id uuid.UUID) *ItemFieldCreate { + ifc.mutation.SetItemID(id) + return ifc +} + +// SetNillableItemID sets the "item" edge to the Item entity by ID if the given value is not nil. +func (ifc *ItemFieldCreate) SetNillableItemID(id *uuid.UUID) *ItemFieldCreate { + if id != nil { + ifc = ifc.SetItemID(*id) + } + return ifc +} + +// SetItem sets the "item" edge to the Item entity. +func (ifc *ItemFieldCreate) SetItem(i *Item) *ItemFieldCreate { + return ifc.SetItemID(i.ID) +} + +// Mutation returns the ItemFieldMutation object of the builder. +func (ifc *ItemFieldCreate) Mutation() *ItemFieldMutation { + return ifc.mutation +} + +// Save creates the ItemField in the database. +func (ifc *ItemFieldCreate) Save(ctx context.Context) (*ItemField, error) { + var ( + err error + node *ItemField + ) + ifc.defaults() + if len(ifc.hooks) == 0 { + if err = ifc.check(); err != nil { + return nil, err + } + node, err = ifc.sqlSave(ctx) + } else { + var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) { + mutation, ok := m.(*ItemFieldMutation) + if !ok { + return nil, fmt.Errorf("unexpected mutation type %T", m) + } + if err = ifc.check(); err != nil { + return nil, err + } + ifc.mutation = mutation + if node, err = ifc.sqlSave(ctx); err != nil { + return nil, err + } + mutation.id = &node.ID + mutation.done = true + return node, err + }) + for i := len(ifc.hooks) - 1; i >= 0; i-- { + if ifc.hooks[i] == nil { + return nil, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)") + } + mut = ifc.hooks[i](mut) + } + v, err := mut.Mutate(ctx, ifc.mutation) + if err != nil { + return nil, err + } + nv, ok := v.(*ItemField) + if !ok { + return nil, fmt.Errorf("unexpected node type %T returned from ItemFieldMutation", v) + } + node = nv + } + return node, err +} + +// SaveX calls Save and panics if Save returns an error. +func (ifc *ItemFieldCreate) SaveX(ctx context.Context) *ItemField { + v, err := ifc.Save(ctx) + if err != nil { + panic(err) + } + return v +} + +// Exec executes the query. +func (ifc *ItemFieldCreate) Exec(ctx context.Context) error { + _, err := ifc.Save(ctx) + return err +} + +// ExecX is like Exec, but panics if an error occurs. +func (ifc *ItemFieldCreate) ExecX(ctx context.Context) { + if err := ifc.Exec(ctx); err != nil { + panic(err) + } +} + +// defaults sets the default values of the builder before save. +func (ifc *ItemFieldCreate) defaults() { + if _, ok := ifc.mutation.CreatedAt(); !ok { + v := itemfield.DefaultCreatedAt() + ifc.mutation.SetCreatedAt(v) + } + if _, ok := ifc.mutation.UpdatedAt(); !ok { + v := itemfield.DefaultUpdatedAt() + ifc.mutation.SetUpdatedAt(v) + } + if _, ok := ifc.mutation.BooleanValue(); !ok { + v := itemfield.DefaultBooleanValue + ifc.mutation.SetBooleanValue(v) + } + if _, ok := ifc.mutation.TimeValue(); !ok { + v := itemfield.DefaultTimeValue() + ifc.mutation.SetTimeValue(v) + } + if _, ok := ifc.mutation.ID(); !ok { + v := itemfield.DefaultID() + ifc.mutation.SetID(v) + } +} + +// check runs all checks and user-defined validators on the builder. +func (ifc *ItemFieldCreate) check() error { + if _, ok := ifc.mutation.CreatedAt(); !ok { + return &ValidationError{Name: "created_at", err: errors.New(`ent: missing required field "ItemField.created_at"`)} + } + if _, ok := ifc.mutation.UpdatedAt(); !ok { + return &ValidationError{Name: "updated_at", err: errors.New(`ent: missing required field "ItemField.updated_at"`)} + } + if _, ok := ifc.mutation.Name(); !ok { + return &ValidationError{Name: "name", err: errors.New(`ent: missing required field "ItemField.name"`)} + } + if v, ok := ifc.mutation.Name(); ok { + if err := itemfield.NameValidator(v); err != nil { + return &ValidationError{Name: "name", err: fmt.Errorf(`ent: validator failed for field "ItemField.name": %w`, err)} + } + } + if v, ok := ifc.mutation.Description(); ok { + if err := itemfield.DescriptionValidator(v); err != nil { + return &ValidationError{Name: "description", err: fmt.Errorf(`ent: validator failed for field "ItemField.description": %w`, err)} + } + } + if _, ok := ifc.mutation.GetType(); !ok { + return &ValidationError{Name: "type", err: errors.New(`ent: missing required field "ItemField.type"`)} + } + if v, ok := ifc.mutation.GetType(); ok { + if err := itemfield.TypeValidator(v); err != nil { + return &ValidationError{Name: "type", err: fmt.Errorf(`ent: validator failed for field "ItemField.type": %w`, err)} + } + } + if v, ok := ifc.mutation.TextValue(); ok { + if err := itemfield.TextValueValidator(v); err != nil { + return &ValidationError{Name: "text_value", err: fmt.Errorf(`ent: validator failed for field "ItemField.text_value": %w`, err)} + } + } + if _, ok := ifc.mutation.BooleanValue(); !ok { + return &ValidationError{Name: "boolean_value", err: errors.New(`ent: missing required field "ItemField.boolean_value"`)} + } + if _, ok := ifc.mutation.TimeValue(); !ok { + return &ValidationError{Name: "time_value", err: errors.New(`ent: missing required field "ItemField.time_value"`)} + } + return nil +} + +func (ifc *ItemFieldCreate) sqlSave(ctx context.Context) (*ItemField, error) { + _node, _spec := ifc.createSpec() + if err := sqlgraph.CreateNode(ctx, ifc.driver, _spec); err != nil { + if sqlgraph.IsConstraintError(err) { + err = &ConstraintError{msg: err.Error(), wrap: err} + } + return nil, err + } + if _spec.ID.Value != nil { + if id, ok := _spec.ID.Value.(*uuid.UUID); ok { + _node.ID = *id + } else if err := _node.ID.Scan(_spec.ID.Value); err != nil { + return nil, err + } + } + return _node, nil +} + +func (ifc *ItemFieldCreate) createSpec() (*ItemField, *sqlgraph.CreateSpec) { + var ( + _node = &ItemField{config: ifc.config} + _spec = &sqlgraph.CreateSpec{ + Table: itemfield.Table, + ID: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: itemfield.FieldID, + }, + } + ) + if id, ok := ifc.mutation.ID(); ok { + _node.ID = id + _spec.ID.Value = &id + } + if value, ok := ifc.mutation.CreatedAt(); ok { + _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ + Type: field.TypeTime, + Value: value, + Column: itemfield.FieldCreatedAt, + }) + _node.CreatedAt = value + } + if value, ok := ifc.mutation.UpdatedAt(); ok { + _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ + Type: field.TypeTime, + Value: value, + Column: itemfield.FieldUpdatedAt, + }) + _node.UpdatedAt = value + } + if value, ok := ifc.mutation.Name(); ok { + _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: itemfield.FieldName, + }) + _node.Name = value + } + if value, ok := ifc.mutation.Description(); ok { + _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: itemfield.FieldDescription, + }) + _node.Description = value + } + if value, ok := ifc.mutation.GetType(); ok { + _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ + Type: field.TypeEnum, + Value: value, + Column: itemfield.FieldType, + }) + _node.Type = value + } + if value, ok := ifc.mutation.TextValue(); ok { + _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: itemfield.FieldTextValue, + }) + _node.TextValue = value + } + if value, ok := ifc.mutation.NumberValue(); ok { + _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ + Type: field.TypeInt, + Value: value, + Column: itemfield.FieldNumberValue, + }) + _node.NumberValue = value + } + if value, ok := ifc.mutation.BooleanValue(); ok { + _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ + Type: field.TypeBool, + Value: value, + Column: itemfield.FieldBooleanValue, + }) + _node.BooleanValue = value + } + if value, ok := ifc.mutation.TimeValue(); ok { + _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ + Type: field.TypeTime, + Value: value, + Column: itemfield.FieldTimeValue, + }) + _node.TimeValue = value + } + if nodes := ifc.mutation.ItemIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2O, + Inverse: true, + Table: itemfield.ItemTable, + Columns: []string{itemfield.ItemColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: item.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _node.item_fields = &nodes[0] + _spec.Edges = append(_spec.Edges, edge) + } + return _node, _spec +} + +// ItemFieldCreateBulk is the builder for creating many ItemField entities in bulk. +type ItemFieldCreateBulk struct { + config + builders []*ItemFieldCreate +} + +// Save creates the ItemField entities in the database. +func (ifcb *ItemFieldCreateBulk) Save(ctx context.Context) ([]*ItemField, error) { + specs := make([]*sqlgraph.CreateSpec, len(ifcb.builders)) + nodes := make([]*ItemField, len(ifcb.builders)) + mutators := make([]Mutator, len(ifcb.builders)) + for i := range ifcb.builders { + func(i int, root context.Context) { + builder := ifcb.builders[i] + builder.defaults() + var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) { + mutation, ok := m.(*ItemFieldMutation) + if !ok { + return nil, fmt.Errorf("unexpected mutation type %T", m) + } + if err := builder.check(); err != nil { + return nil, err + } + builder.mutation = mutation + nodes[i], specs[i] = builder.createSpec() + var err error + if i < len(mutators)-1 { + _, err = mutators[i+1].Mutate(root, ifcb.builders[i+1].mutation) + } else { + spec := &sqlgraph.BatchCreateSpec{Nodes: specs} + // Invoke the actual operation on the latest mutation in the chain. + if err = sqlgraph.BatchCreate(ctx, ifcb.driver, spec); err != nil { + if sqlgraph.IsConstraintError(err) { + err = &ConstraintError{msg: err.Error(), wrap: err} + } + } + } + if err != nil { + return nil, err + } + mutation.id = &nodes[i].ID + mutation.done = true + return nodes[i], nil + }) + for i := len(builder.hooks) - 1; i >= 0; i-- { + mut = builder.hooks[i](mut) + } + mutators[i] = mut + }(i, ctx) + } + if len(mutators) > 0 { + if _, err := mutators[0].Mutate(ctx, ifcb.builders[0].mutation); err != nil { + return nil, err + } + } + return nodes, nil +} + +// SaveX is like Save, but panics if an error occurs. +func (ifcb *ItemFieldCreateBulk) SaveX(ctx context.Context) []*ItemField { + v, err := ifcb.Save(ctx) + if err != nil { + panic(err) + } + return v +} + +// Exec executes the query. +func (ifcb *ItemFieldCreateBulk) Exec(ctx context.Context) error { + _, err := ifcb.Save(ctx) + return err +} + +// ExecX is like Exec, but panics if an error occurs. +func (ifcb *ItemFieldCreateBulk) ExecX(ctx context.Context) { + if err := ifcb.Exec(ctx); err != nil { + panic(err) + } +} diff --git a/backend/ent/itemfield_delete.go b/backend/ent/itemfield_delete.go new file mode 100644 index 0000000..2db4de3 --- /dev/null +++ b/backend/ent/itemfield_delete.go @@ -0,0 +1,115 @@ +// Code generated by ent, DO NOT EDIT. + +package ent + +import ( + "context" + "fmt" + + "entgo.io/ent/dialect/sql" + "entgo.io/ent/dialect/sql/sqlgraph" + "entgo.io/ent/schema/field" + "github.com/hay-kot/content/backend/ent/itemfield" + "github.com/hay-kot/content/backend/ent/predicate" +) + +// ItemFieldDelete is the builder for deleting a ItemField entity. +type ItemFieldDelete struct { + config + hooks []Hook + mutation *ItemFieldMutation +} + +// Where appends a list predicates to the ItemFieldDelete builder. +func (ifd *ItemFieldDelete) Where(ps ...predicate.ItemField) *ItemFieldDelete { + ifd.mutation.Where(ps...) + return ifd +} + +// Exec executes the deletion query and returns how many vertices were deleted. +func (ifd *ItemFieldDelete) Exec(ctx context.Context) (int, error) { + var ( + err error + affected int + ) + if len(ifd.hooks) == 0 { + affected, err = ifd.sqlExec(ctx) + } else { + var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) { + mutation, ok := m.(*ItemFieldMutation) + if !ok { + return nil, fmt.Errorf("unexpected mutation type %T", m) + } + ifd.mutation = mutation + affected, err = ifd.sqlExec(ctx) + mutation.done = true + return affected, err + }) + for i := len(ifd.hooks) - 1; i >= 0; i-- { + if ifd.hooks[i] == nil { + return 0, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)") + } + mut = ifd.hooks[i](mut) + } + if _, err := mut.Mutate(ctx, ifd.mutation); err != nil { + return 0, err + } + } + return affected, err +} + +// ExecX is like Exec, but panics if an error occurs. +func (ifd *ItemFieldDelete) ExecX(ctx context.Context) int { + n, err := ifd.Exec(ctx) + if err != nil { + panic(err) + } + return n +} + +func (ifd *ItemFieldDelete) sqlExec(ctx context.Context) (int, error) { + _spec := &sqlgraph.DeleteSpec{ + Node: &sqlgraph.NodeSpec{ + Table: itemfield.Table, + ID: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: itemfield.FieldID, + }, + }, + } + if ps := ifd.mutation.predicates; len(ps) > 0 { + _spec.Predicate = func(selector *sql.Selector) { + for i := range ps { + ps[i](selector) + } + } + } + affected, err := sqlgraph.DeleteNodes(ctx, ifd.driver, _spec) + if err != nil && sqlgraph.IsConstraintError(err) { + err = &ConstraintError{msg: err.Error(), wrap: err} + } + return affected, err +} + +// ItemFieldDeleteOne is the builder for deleting a single ItemField entity. +type ItemFieldDeleteOne struct { + ifd *ItemFieldDelete +} + +// Exec executes the deletion query. +func (ifdo *ItemFieldDeleteOne) Exec(ctx context.Context) error { + n, err := ifdo.ifd.Exec(ctx) + switch { + case err != nil: + return err + case n == 0: + return &NotFoundError{itemfield.Label} + default: + return nil + } +} + +// ExecX is like Exec, but panics if an error occurs. +func (ifdo *ItemFieldDeleteOne) ExecX(ctx context.Context) { + ifdo.ifd.ExecX(ctx) +} diff --git a/backend/ent/itemfield_query.go b/backend/ent/itemfield_query.go new file mode 100644 index 0000000..d26cfcf --- /dev/null +++ b/backend/ent/itemfield_query.go @@ -0,0 +1,611 @@ +// Code generated by ent, DO NOT EDIT. + +package ent + +import ( + "context" + "fmt" + "math" + + "entgo.io/ent/dialect/sql" + "entgo.io/ent/dialect/sql/sqlgraph" + "entgo.io/ent/schema/field" + "github.com/google/uuid" + "github.com/hay-kot/content/backend/ent/item" + "github.com/hay-kot/content/backend/ent/itemfield" + "github.com/hay-kot/content/backend/ent/predicate" +) + +// ItemFieldQuery is the builder for querying ItemField entities. +type ItemFieldQuery struct { + config + limit *int + offset *int + unique *bool + order []OrderFunc + fields []string + predicates []predicate.ItemField + withItem *ItemQuery + withFKs bool + // intermediate query (i.e. traversal path). + sql *sql.Selector + path func(context.Context) (*sql.Selector, error) +} + +// Where adds a new predicate for the ItemFieldQuery builder. +func (ifq *ItemFieldQuery) Where(ps ...predicate.ItemField) *ItemFieldQuery { + ifq.predicates = append(ifq.predicates, ps...) + return ifq +} + +// Limit adds a limit step to the query. +func (ifq *ItemFieldQuery) Limit(limit int) *ItemFieldQuery { + ifq.limit = &limit + return ifq +} + +// Offset adds an offset step to the query. +func (ifq *ItemFieldQuery) Offset(offset int) *ItemFieldQuery { + ifq.offset = &offset + return ifq +} + +// Unique configures the query builder to filter duplicate records on query. +// By default, unique is set to true, and can be disabled using this method. +func (ifq *ItemFieldQuery) Unique(unique bool) *ItemFieldQuery { + ifq.unique = &unique + return ifq +} + +// Order adds an order step to the query. +func (ifq *ItemFieldQuery) Order(o ...OrderFunc) *ItemFieldQuery { + ifq.order = append(ifq.order, o...) + return ifq +} + +// QueryItem chains the current query on the "item" edge. +func (ifq *ItemFieldQuery) QueryItem() *ItemQuery { + query := &ItemQuery{config: ifq.config} + query.path = func(ctx context.Context) (fromU *sql.Selector, err error) { + if err := ifq.prepareQuery(ctx); err != nil { + return nil, err + } + selector := ifq.sqlQuery(ctx) + if err := selector.Err(); err != nil { + return nil, err + } + step := sqlgraph.NewStep( + sqlgraph.From(itemfield.Table, itemfield.FieldID, selector), + sqlgraph.To(item.Table, item.FieldID), + sqlgraph.Edge(sqlgraph.M2O, true, itemfield.ItemTable, itemfield.ItemColumn), + ) + fromU = sqlgraph.SetNeighbors(ifq.driver.Dialect(), step) + return fromU, nil + } + return query +} + +// First returns the first ItemField entity from the query. +// Returns a *NotFoundError when no ItemField was found. +func (ifq *ItemFieldQuery) First(ctx context.Context) (*ItemField, error) { + nodes, err := ifq.Limit(1).All(ctx) + if err != nil { + return nil, err + } + if len(nodes) == 0 { + return nil, &NotFoundError{itemfield.Label} + } + return nodes[0], nil +} + +// FirstX is like First, but panics if an error occurs. +func (ifq *ItemFieldQuery) FirstX(ctx context.Context) *ItemField { + node, err := ifq.First(ctx) + if err != nil && !IsNotFound(err) { + panic(err) + } + return node +} + +// FirstID returns the first ItemField ID from the query. +// Returns a *NotFoundError when no ItemField ID was found. +func (ifq *ItemFieldQuery) FirstID(ctx context.Context) (id uuid.UUID, err error) { + var ids []uuid.UUID + if ids, err = ifq.Limit(1).IDs(ctx); err != nil { + return + } + if len(ids) == 0 { + err = &NotFoundError{itemfield.Label} + return + } + return ids[0], nil +} + +// FirstIDX is like FirstID, but panics if an error occurs. +func (ifq *ItemFieldQuery) FirstIDX(ctx context.Context) uuid.UUID { + id, err := ifq.FirstID(ctx) + if err != nil && !IsNotFound(err) { + panic(err) + } + return id +} + +// Only returns a single ItemField entity found by the query, ensuring it only returns one. +// Returns a *NotSingularError when more than one ItemField entity is found. +// Returns a *NotFoundError when no ItemField entities are found. +func (ifq *ItemFieldQuery) Only(ctx context.Context) (*ItemField, error) { + nodes, err := ifq.Limit(2).All(ctx) + if err != nil { + return nil, err + } + switch len(nodes) { + case 1: + return nodes[0], nil + case 0: + return nil, &NotFoundError{itemfield.Label} + default: + return nil, &NotSingularError{itemfield.Label} + } +} + +// OnlyX is like Only, but panics if an error occurs. +func (ifq *ItemFieldQuery) OnlyX(ctx context.Context) *ItemField { + node, err := ifq.Only(ctx) + if err != nil { + panic(err) + } + return node +} + +// OnlyID is like Only, but returns the only ItemField ID in the query. +// Returns a *NotSingularError when more than one ItemField ID is found. +// Returns a *NotFoundError when no entities are found. +func (ifq *ItemFieldQuery) OnlyID(ctx context.Context) (id uuid.UUID, err error) { + var ids []uuid.UUID + if ids, err = ifq.Limit(2).IDs(ctx); err != nil { + return + } + switch len(ids) { + case 1: + id = ids[0] + case 0: + err = &NotFoundError{itemfield.Label} + default: + err = &NotSingularError{itemfield.Label} + } + return +} + +// OnlyIDX is like OnlyID, but panics if an error occurs. +func (ifq *ItemFieldQuery) OnlyIDX(ctx context.Context) uuid.UUID { + id, err := ifq.OnlyID(ctx) + if err != nil { + panic(err) + } + return id +} + +// All executes the query and returns a list of ItemFields. +func (ifq *ItemFieldQuery) All(ctx context.Context) ([]*ItemField, error) { + if err := ifq.prepareQuery(ctx); err != nil { + return nil, err + } + return ifq.sqlAll(ctx) +} + +// AllX is like All, but panics if an error occurs. +func (ifq *ItemFieldQuery) AllX(ctx context.Context) []*ItemField { + nodes, err := ifq.All(ctx) + if err != nil { + panic(err) + } + return nodes +} + +// IDs executes the query and returns a list of ItemField IDs. +func (ifq *ItemFieldQuery) IDs(ctx context.Context) ([]uuid.UUID, error) { + var ids []uuid.UUID + if err := ifq.Select(itemfield.FieldID).Scan(ctx, &ids); err != nil { + return nil, err + } + return ids, nil +} + +// IDsX is like IDs, but panics if an error occurs. +func (ifq *ItemFieldQuery) IDsX(ctx context.Context) []uuid.UUID { + ids, err := ifq.IDs(ctx) + if err != nil { + panic(err) + } + return ids +} + +// Count returns the count of the given query. +func (ifq *ItemFieldQuery) Count(ctx context.Context) (int, error) { + if err := ifq.prepareQuery(ctx); err != nil { + return 0, err + } + return ifq.sqlCount(ctx) +} + +// CountX is like Count, but panics if an error occurs. +func (ifq *ItemFieldQuery) CountX(ctx context.Context) int { + count, err := ifq.Count(ctx) + if err != nil { + panic(err) + } + return count +} + +// Exist returns true if the query has elements in the graph. +func (ifq *ItemFieldQuery) Exist(ctx context.Context) (bool, error) { + if err := ifq.prepareQuery(ctx); err != nil { + return false, err + } + return ifq.sqlExist(ctx) +} + +// ExistX is like Exist, but panics if an error occurs. +func (ifq *ItemFieldQuery) ExistX(ctx context.Context) bool { + exist, err := ifq.Exist(ctx) + if err != nil { + panic(err) + } + return exist +} + +// Clone returns a duplicate of the ItemFieldQuery builder, including all associated steps. It can be +// used to prepare common query builders and use them differently after the clone is made. +func (ifq *ItemFieldQuery) Clone() *ItemFieldQuery { + if ifq == nil { + return nil + } + return &ItemFieldQuery{ + config: ifq.config, + limit: ifq.limit, + offset: ifq.offset, + order: append([]OrderFunc{}, ifq.order...), + predicates: append([]predicate.ItemField{}, ifq.predicates...), + withItem: ifq.withItem.Clone(), + // clone intermediate query. + sql: ifq.sql.Clone(), + path: ifq.path, + unique: ifq.unique, + } +} + +// WithItem tells the query-builder to eager-load the nodes that are connected to +// the "item" edge. The optional arguments are used to configure the query builder of the edge. +func (ifq *ItemFieldQuery) WithItem(opts ...func(*ItemQuery)) *ItemFieldQuery { + query := &ItemQuery{config: ifq.config} + for _, opt := range opts { + opt(query) + } + ifq.withItem = query + return ifq +} + +// GroupBy is used to group vertices by one or more fields/columns. +// It is often used with aggregate functions, like: count, max, mean, min, sum. +// +// Example: +// +// var v []struct { +// CreatedAt time.Time `json:"created_at,omitempty"` +// Count int `json:"count,omitempty"` +// } +// +// client.ItemField.Query(). +// GroupBy(itemfield.FieldCreatedAt). +// Aggregate(ent.Count()). +// Scan(ctx, &v) +func (ifq *ItemFieldQuery) GroupBy(field string, fields ...string) *ItemFieldGroupBy { + grbuild := &ItemFieldGroupBy{config: ifq.config} + grbuild.fields = append([]string{field}, fields...) + grbuild.path = func(ctx context.Context) (prev *sql.Selector, err error) { + if err := ifq.prepareQuery(ctx); err != nil { + return nil, err + } + return ifq.sqlQuery(ctx), nil + } + grbuild.label = itemfield.Label + grbuild.flds, grbuild.scan = &grbuild.fields, grbuild.Scan + return grbuild +} + +// Select allows the selection one or more fields/columns for the given query, +// instead of selecting all fields in the entity. +// +// Example: +// +// var v []struct { +// CreatedAt time.Time `json:"created_at,omitempty"` +// } +// +// client.ItemField.Query(). +// Select(itemfield.FieldCreatedAt). +// Scan(ctx, &v) +func (ifq *ItemFieldQuery) Select(fields ...string) *ItemFieldSelect { + ifq.fields = append(ifq.fields, fields...) + selbuild := &ItemFieldSelect{ItemFieldQuery: ifq} + selbuild.label = itemfield.Label + selbuild.flds, selbuild.scan = &ifq.fields, selbuild.Scan + return selbuild +} + +func (ifq *ItemFieldQuery) prepareQuery(ctx context.Context) error { + for _, f := range ifq.fields { + if !itemfield.ValidColumn(f) { + return &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)} + } + } + if ifq.path != nil { + prev, err := ifq.path(ctx) + if err != nil { + return err + } + ifq.sql = prev + } + return nil +} + +func (ifq *ItemFieldQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*ItemField, error) { + var ( + nodes = []*ItemField{} + withFKs = ifq.withFKs + _spec = ifq.querySpec() + loadedTypes = [1]bool{ + ifq.withItem != nil, + } + ) + if ifq.withItem != nil { + withFKs = true + } + if withFKs { + _spec.Node.Columns = append(_spec.Node.Columns, itemfield.ForeignKeys...) + } + _spec.ScanValues = func(columns []string) ([]interface{}, error) { + return (*ItemField).scanValues(nil, columns) + } + _spec.Assign = func(columns []string, values []interface{}) error { + node := &ItemField{config: ifq.config} + nodes = append(nodes, node) + node.Edges.loadedTypes = loadedTypes + return node.assignValues(columns, values) + } + for i := range hooks { + hooks[i](ctx, _spec) + } + if err := sqlgraph.QueryNodes(ctx, ifq.driver, _spec); err != nil { + return nil, err + } + if len(nodes) == 0 { + return nodes, nil + } + if query := ifq.withItem; query != nil { + if err := ifq.loadItem(ctx, query, nodes, nil, + func(n *ItemField, e *Item) { n.Edges.Item = e }); err != nil { + return nil, err + } + } + return nodes, nil +} + +func (ifq *ItemFieldQuery) loadItem(ctx context.Context, query *ItemQuery, nodes []*ItemField, init func(*ItemField), assign func(*ItemField, *Item)) error { + ids := make([]uuid.UUID, 0, len(nodes)) + nodeids := make(map[uuid.UUID][]*ItemField) + for i := range nodes { + if nodes[i].item_fields == nil { + continue + } + fk := *nodes[i].item_fields + if _, ok := nodeids[fk]; !ok { + ids = append(ids, fk) + } + nodeids[fk] = append(nodeids[fk], nodes[i]) + } + query.Where(item.IDIn(ids...)) + neighbors, err := query.All(ctx) + if err != nil { + return err + } + for _, n := range neighbors { + nodes, ok := nodeids[n.ID] + if !ok { + return fmt.Errorf(`unexpected foreign-key "item_fields" returned %v`, n.ID) + } + for i := range nodes { + assign(nodes[i], n) + } + } + return nil +} + +func (ifq *ItemFieldQuery) sqlCount(ctx context.Context) (int, error) { + _spec := ifq.querySpec() + _spec.Node.Columns = ifq.fields + if len(ifq.fields) > 0 { + _spec.Unique = ifq.unique != nil && *ifq.unique + } + return sqlgraph.CountNodes(ctx, ifq.driver, _spec) +} + +func (ifq *ItemFieldQuery) sqlExist(ctx context.Context) (bool, error) { + n, err := ifq.sqlCount(ctx) + if err != nil { + return false, fmt.Errorf("ent: check existence: %w", err) + } + return n > 0, nil +} + +func (ifq *ItemFieldQuery) querySpec() *sqlgraph.QuerySpec { + _spec := &sqlgraph.QuerySpec{ + Node: &sqlgraph.NodeSpec{ + Table: itemfield.Table, + Columns: itemfield.Columns, + ID: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: itemfield.FieldID, + }, + }, + From: ifq.sql, + Unique: true, + } + if unique := ifq.unique; unique != nil { + _spec.Unique = *unique + } + if fields := ifq.fields; len(fields) > 0 { + _spec.Node.Columns = make([]string, 0, len(fields)) + _spec.Node.Columns = append(_spec.Node.Columns, itemfield.FieldID) + for i := range fields { + if fields[i] != itemfield.FieldID { + _spec.Node.Columns = append(_spec.Node.Columns, fields[i]) + } + } + } + if ps := ifq.predicates; len(ps) > 0 { + _spec.Predicate = func(selector *sql.Selector) { + for i := range ps { + ps[i](selector) + } + } + } + if limit := ifq.limit; limit != nil { + _spec.Limit = *limit + } + if offset := ifq.offset; offset != nil { + _spec.Offset = *offset + } + if ps := ifq.order; len(ps) > 0 { + _spec.Order = func(selector *sql.Selector) { + for i := range ps { + ps[i](selector) + } + } + } + return _spec +} + +func (ifq *ItemFieldQuery) sqlQuery(ctx context.Context) *sql.Selector { + builder := sql.Dialect(ifq.driver.Dialect()) + t1 := builder.Table(itemfield.Table) + columns := ifq.fields + if len(columns) == 0 { + columns = itemfield.Columns + } + selector := builder.Select(t1.Columns(columns...)...).From(t1) + if ifq.sql != nil { + selector = ifq.sql + selector.Select(selector.Columns(columns...)...) + } + if ifq.unique != nil && *ifq.unique { + selector.Distinct() + } + for _, p := range ifq.predicates { + p(selector) + } + for _, p := range ifq.order { + p(selector) + } + if offset := ifq.offset; offset != nil { + // limit is mandatory for offset clause. We start + // with default value, and override it below if needed. + selector.Offset(*offset).Limit(math.MaxInt32) + } + if limit := ifq.limit; limit != nil { + selector.Limit(*limit) + } + return selector +} + +// ItemFieldGroupBy is the group-by builder for ItemField entities. +type ItemFieldGroupBy struct { + config + selector + fields []string + fns []AggregateFunc + // intermediate query (i.e. traversal path). + sql *sql.Selector + path func(context.Context) (*sql.Selector, error) +} + +// Aggregate adds the given aggregation functions to the group-by query. +func (ifgb *ItemFieldGroupBy) Aggregate(fns ...AggregateFunc) *ItemFieldGroupBy { + ifgb.fns = append(ifgb.fns, fns...) + return ifgb +} + +// Scan applies the group-by query and scans the result into the given value. +func (ifgb *ItemFieldGroupBy) Scan(ctx context.Context, v interface{}) error { + query, err := ifgb.path(ctx) + if err != nil { + return err + } + ifgb.sql = query + return ifgb.sqlScan(ctx, v) +} + +func (ifgb *ItemFieldGroupBy) sqlScan(ctx context.Context, v interface{}) error { + for _, f := range ifgb.fields { + if !itemfield.ValidColumn(f) { + return &ValidationError{Name: f, err: fmt.Errorf("invalid field %q for group-by", f)} + } + } + selector := ifgb.sqlQuery() + if err := selector.Err(); err != nil { + return err + } + rows := &sql.Rows{} + query, args := selector.Query() + if err := ifgb.driver.Query(ctx, query, args, rows); err != nil { + return err + } + defer rows.Close() + return sql.ScanSlice(rows, v) +} + +func (ifgb *ItemFieldGroupBy) sqlQuery() *sql.Selector { + selector := ifgb.sql.Select() + aggregation := make([]string, 0, len(ifgb.fns)) + for _, fn := range ifgb.fns { + aggregation = append(aggregation, fn(selector)) + } + // If no columns were selected in a custom aggregation function, the default + // selection is the fields used for "group-by", and the aggregation functions. + if len(selector.SelectedColumns()) == 0 { + columns := make([]string, 0, len(ifgb.fields)+len(ifgb.fns)) + for _, f := range ifgb.fields { + columns = append(columns, selector.C(f)) + } + columns = append(columns, aggregation...) + selector.Select(columns...) + } + return selector.GroupBy(selector.Columns(ifgb.fields...)...) +} + +// ItemFieldSelect is the builder for selecting fields of ItemField entities. +type ItemFieldSelect struct { + *ItemFieldQuery + selector + // intermediate query (i.e. traversal path). + sql *sql.Selector +} + +// Scan applies the selector query and scans the result into the given value. +func (ifs *ItemFieldSelect) Scan(ctx context.Context, v interface{}) error { + if err := ifs.prepareQuery(ctx); err != nil { + return err + } + ifs.sql = ifs.ItemFieldQuery.sqlQuery(ctx) + return ifs.sqlScan(ctx, v) +} + +func (ifs *ItemFieldSelect) sqlScan(ctx context.Context, v interface{}) error { + rows := &sql.Rows{} + query, args := ifs.sql.Query() + if err := ifs.driver.Query(ctx, query, args, rows); err != nil { + return err + } + defer rows.Close() + return sql.ScanSlice(rows, v) +} diff --git a/backend/ent/itemfield_update.go b/backend/ent/itemfield_update.go new file mode 100644 index 0000000..1ba66df --- /dev/null +++ b/backend/ent/itemfield_update.go @@ -0,0 +1,836 @@ +// Code generated by ent, DO NOT EDIT. + +package ent + +import ( + "context" + "errors" + "fmt" + "time" + + "entgo.io/ent/dialect/sql" + "entgo.io/ent/dialect/sql/sqlgraph" + "entgo.io/ent/schema/field" + "github.com/google/uuid" + "github.com/hay-kot/content/backend/ent/item" + "github.com/hay-kot/content/backend/ent/itemfield" + "github.com/hay-kot/content/backend/ent/predicate" +) + +// ItemFieldUpdate is the builder for updating ItemField entities. +type ItemFieldUpdate struct { + config + hooks []Hook + mutation *ItemFieldMutation +} + +// Where appends a list predicates to the ItemFieldUpdate builder. +func (ifu *ItemFieldUpdate) Where(ps ...predicate.ItemField) *ItemFieldUpdate { + ifu.mutation.Where(ps...) + return ifu +} + +// SetUpdatedAt sets the "updated_at" field. +func (ifu *ItemFieldUpdate) SetUpdatedAt(t time.Time) *ItemFieldUpdate { + ifu.mutation.SetUpdatedAt(t) + return ifu +} + +// SetName sets the "name" field. +func (ifu *ItemFieldUpdate) SetName(s string) *ItemFieldUpdate { + ifu.mutation.SetName(s) + return ifu +} + +// SetDescription sets the "description" field. +func (ifu *ItemFieldUpdate) SetDescription(s string) *ItemFieldUpdate { + ifu.mutation.SetDescription(s) + return ifu +} + +// SetNillableDescription sets the "description" field if the given value is not nil. +func (ifu *ItemFieldUpdate) SetNillableDescription(s *string) *ItemFieldUpdate { + if s != nil { + ifu.SetDescription(*s) + } + return ifu +} + +// ClearDescription clears the value of the "description" field. +func (ifu *ItemFieldUpdate) ClearDescription() *ItemFieldUpdate { + ifu.mutation.ClearDescription() + return ifu +} + +// SetType sets the "type" field. +func (ifu *ItemFieldUpdate) SetType(i itemfield.Type) *ItemFieldUpdate { + ifu.mutation.SetType(i) + return ifu +} + +// SetTextValue sets the "text_value" field. +func (ifu *ItemFieldUpdate) SetTextValue(s string) *ItemFieldUpdate { + ifu.mutation.SetTextValue(s) + return ifu +} + +// SetNillableTextValue sets the "text_value" field if the given value is not nil. +func (ifu *ItemFieldUpdate) SetNillableTextValue(s *string) *ItemFieldUpdate { + if s != nil { + ifu.SetTextValue(*s) + } + return ifu +} + +// ClearTextValue clears the value of the "text_value" field. +func (ifu *ItemFieldUpdate) ClearTextValue() *ItemFieldUpdate { + ifu.mutation.ClearTextValue() + return ifu +} + +// SetNumberValue sets the "number_value" field. +func (ifu *ItemFieldUpdate) SetNumberValue(i int) *ItemFieldUpdate { + ifu.mutation.ResetNumberValue() + ifu.mutation.SetNumberValue(i) + return ifu +} + +// SetNillableNumberValue sets the "number_value" field if the given value is not nil. +func (ifu *ItemFieldUpdate) SetNillableNumberValue(i *int) *ItemFieldUpdate { + if i != nil { + ifu.SetNumberValue(*i) + } + return ifu +} + +// AddNumberValue adds i to the "number_value" field. +func (ifu *ItemFieldUpdate) AddNumberValue(i int) *ItemFieldUpdate { + ifu.mutation.AddNumberValue(i) + return ifu +} + +// ClearNumberValue clears the value of the "number_value" field. +func (ifu *ItemFieldUpdate) ClearNumberValue() *ItemFieldUpdate { + ifu.mutation.ClearNumberValue() + return ifu +} + +// SetBooleanValue sets the "boolean_value" field. +func (ifu *ItemFieldUpdate) SetBooleanValue(b bool) *ItemFieldUpdate { + ifu.mutation.SetBooleanValue(b) + return ifu +} + +// SetNillableBooleanValue sets the "boolean_value" field if the given value is not nil. +func (ifu *ItemFieldUpdate) SetNillableBooleanValue(b *bool) *ItemFieldUpdate { + if b != nil { + ifu.SetBooleanValue(*b) + } + return ifu +} + +// SetTimeValue sets the "time_value" field. +func (ifu *ItemFieldUpdate) SetTimeValue(t time.Time) *ItemFieldUpdate { + ifu.mutation.SetTimeValue(t) + return ifu +} + +// SetNillableTimeValue sets the "time_value" field if the given value is not nil. +func (ifu *ItemFieldUpdate) SetNillableTimeValue(t *time.Time) *ItemFieldUpdate { + if t != nil { + ifu.SetTimeValue(*t) + } + return ifu +} + +// SetItemID sets the "item" edge to the Item entity by ID. +func (ifu *ItemFieldUpdate) SetItemID(id uuid.UUID) *ItemFieldUpdate { + ifu.mutation.SetItemID(id) + return ifu +} + +// SetNillableItemID sets the "item" edge to the Item entity by ID if the given value is not nil. +func (ifu *ItemFieldUpdate) SetNillableItemID(id *uuid.UUID) *ItemFieldUpdate { + if id != nil { + ifu = ifu.SetItemID(*id) + } + return ifu +} + +// SetItem sets the "item" edge to the Item entity. +func (ifu *ItemFieldUpdate) SetItem(i *Item) *ItemFieldUpdate { + return ifu.SetItemID(i.ID) +} + +// Mutation returns the ItemFieldMutation object of the builder. +func (ifu *ItemFieldUpdate) Mutation() *ItemFieldMutation { + return ifu.mutation +} + +// ClearItem clears the "item" edge to the Item entity. +func (ifu *ItemFieldUpdate) ClearItem() *ItemFieldUpdate { + ifu.mutation.ClearItem() + return ifu +} + +// Save executes the query and returns the number of nodes affected by the update operation. +func (ifu *ItemFieldUpdate) Save(ctx context.Context) (int, error) { + var ( + err error + affected int + ) + ifu.defaults() + if len(ifu.hooks) == 0 { + if err = ifu.check(); err != nil { + return 0, err + } + affected, err = ifu.sqlSave(ctx) + } else { + var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) { + mutation, ok := m.(*ItemFieldMutation) + if !ok { + return nil, fmt.Errorf("unexpected mutation type %T", m) + } + if err = ifu.check(); err != nil { + return 0, err + } + ifu.mutation = mutation + affected, err = ifu.sqlSave(ctx) + mutation.done = true + return affected, err + }) + for i := len(ifu.hooks) - 1; i >= 0; i-- { + if ifu.hooks[i] == nil { + return 0, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)") + } + mut = ifu.hooks[i](mut) + } + if _, err := mut.Mutate(ctx, ifu.mutation); err != nil { + return 0, err + } + } + return affected, err +} + +// SaveX is like Save, but panics if an error occurs. +func (ifu *ItemFieldUpdate) SaveX(ctx context.Context) int { + affected, err := ifu.Save(ctx) + if err != nil { + panic(err) + } + return affected +} + +// Exec executes the query. +func (ifu *ItemFieldUpdate) Exec(ctx context.Context) error { + _, err := ifu.Save(ctx) + return err +} + +// ExecX is like Exec, but panics if an error occurs. +func (ifu *ItemFieldUpdate) ExecX(ctx context.Context) { + if err := ifu.Exec(ctx); err != nil { + panic(err) + } +} + +// defaults sets the default values of the builder before save. +func (ifu *ItemFieldUpdate) defaults() { + if _, ok := ifu.mutation.UpdatedAt(); !ok { + v := itemfield.UpdateDefaultUpdatedAt() + ifu.mutation.SetUpdatedAt(v) + } +} + +// check runs all checks and user-defined validators on the builder. +func (ifu *ItemFieldUpdate) check() error { + if v, ok := ifu.mutation.Name(); ok { + if err := itemfield.NameValidator(v); err != nil { + return &ValidationError{Name: "name", err: fmt.Errorf(`ent: validator failed for field "ItemField.name": %w`, err)} + } + } + if v, ok := ifu.mutation.Description(); ok { + if err := itemfield.DescriptionValidator(v); err != nil { + return &ValidationError{Name: "description", err: fmt.Errorf(`ent: validator failed for field "ItemField.description": %w`, err)} + } + } + if v, ok := ifu.mutation.GetType(); ok { + if err := itemfield.TypeValidator(v); err != nil { + return &ValidationError{Name: "type", err: fmt.Errorf(`ent: validator failed for field "ItemField.type": %w`, err)} + } + } + if v, ok := ifu.mutation.TextValue(); ok { + if err := itemfield.TextValueValidator(v); err != nil { + return &ValidationError{Name: "text_value", err: fmt.Errorf(`ent: validator failed for field "ItemField.text_value": %w`, err)} + } + } + return nil +} + +func (ifu *ItemFieldUpdate) sqlSave(ctx context.Context) (n int, err error) { + _spec := &sqlgraph.UpdateSpec{ + Node: &sqlgraph.NodeSpec{ + Table: itemfield.Table, + Columns: itemfield.Columns, + ID: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: itemfield.FieldID, + }, + }, + } + if ps := ifu.mutation.predicates; len(ps) > 0 { + _spec.Predicate = func(selector *sql.Selector) { + for i := range ps { + ps[i](selector) + } + } + } + if value, ok := ifu.mutation.UpdatedAt(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeTime, + Value: value, + Column: itemfield.FieldUpdatedAt, + }) + } + if value, ok := ifu.mutation.Name(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: itemfield.FieldName, + }) + } + if value, ok := ifu.mutation.Description(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: itemfield.FieldDescription, + }) + } + if ifu.mutation.DescriptionCleared() { + _spec.Fields.Clear = append(_spec.Fields.Clear, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Column: itemfield.FieldDescription, + }) + } + if value, ok := ifu.mutation.GetType(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeEnum, + Value: value, + Column: itemfield.FieldType, + }) + } + if value, ok := ifu.mutation.TextValue(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: itemfield.FieldTextValue, + }) + } + if ifu.mutation.TextValueCleared() { + _spec.Fields.Clear = append(_spec.Fields.Clear, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Column: itemfield.FieldTextValue, + }) + } + if value, ok := ifu.mutation.NumberValue(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeInt, + Value: value, + Column: itemfield.FieldNumberValue, + }) + } + if value, ok := ifu.mutation.AddedNumberValue(); ok { + _spec.Fields.Add = append(_spec.Fields.Add, &sqlgraph.FieldSpec{ + Type: field.TypeInt, + Value: value, + Column: itemfield.FieldNumberValue, + }) + } + if ifu.mutation.NumberValueCleared() { + _spec.Fields.Clear = append(_spec.Fields.Clear, &sqlgraph.FieldSpec{ + Type: field.TypeInt, + Column: itemfield.FieldNumberValue, + }) + } + if value, ok := ifu.mutation.BooleanValue(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeBool, + Value: value, + Column: itemfield.FieldBooleanValue, + }) + } + if value, ok := ifu.mutation.TimeValue(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeTime, + Value: value, + Column: itemfield.FieldTimeValue, + }) + } + if ifu.mutation.ItemCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2O, + Inverse: true, + Table: itemfield.ItemTable, + Columns: []string{itemfield.ItemColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: item.FieldID, + }, + }, + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := ifu.mutation.ItemIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2O, + Inverse: true, + Table: itemfield.ItemTable, + Columns: []string{itemfield.ItemColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: item.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Add = append(_spec.Edges.Add, edge) + } + if n, err = sqlgraph.UpdateNodes(ctx, ifu.driver, _spec); err != nil { + if _, ok := err.(*sqlgraph.NotFoundError); ok { + err = &NotFoundError{itemfield.Label} + } else if sqlgraph.IsConstraintError(err) { + err = &ConstraintError{msg: err.Error(), wrap: err} + } + return 0, err + } + return n, nil +} + +// ItemFieldUpdateOne is the builder for updating a single ItemField entity. +type ItemFieldUpdateOne struct { + config + fields []string + hooks []Hook + mutation *ItemFieldMutation +} + +// SetUpdatedAt sets the "updated_at" field. +func (ifuo *ItemFieldUpdateOne) SetUpdatedAt(t time.Time) *ItemFieldUpdateOne { + ifuo.mutation.SetUpdatedAt(t) + return ifuo +} + +// SetName sets the "name" field. +func (ifuo *ItemFieldUpdateOne) SetName(s string) *ItemFieldUpdateOne { + ifuo.mutation.SetName(s) + return ifuo +} + +// SetDescription sets the "description" field. +func (ifuo *ItemFieldUpdateOne) SetDescription(s string) *ItemFieldUpdateOne { + ifuo.mutation.SetDescription(s) + return ifuo +} + +// SetNillableDescription sets the "description" field if the given value is not nil. +func (ifuo *ItemFieldUpdateOne) SetNillableDescription(s *string) *ItemFieldUpdateOne { + if s != nil { + ifuo.SetDescription(*s) + } + return ifuo +} + +// ClearDescription clears the value of the "description" field. +func (ifuo *ItemFieldUpdateOne) ClearDescription() *ItemFieldUpdateOne { + ifuo.mutation.ClearDescription() + return ifuo +} + +// SetType sets the "type" field. +func (ifuo *ItemFieldUpdateOne) SetType(i itemfield.Type) *ItemFieldUpdateOne { + ifuo.mutation.SetType(i) + return ifuo +} + +// SetTextValue sets the "text_value" field. +func (ifuo *ItemFieldUpdateOne) SetTextValue(s string) *ItemFieldUpdateOne { + ifuo.mutation.SetTextValue(s) + return ifuo +} + +// SetNillableTextValue sets the "text_value" field if the given value is not nil. +func (ifuo *ItemFieldUpdateOne) SetNillableTextValue(s *string) *ItemFieldUpdateOne { + if s != nil { + ifuo.SetTextValue(*s) + } + return ifuo +} + +// ClearTextValue clears the value of the "text_value" field. +func (ifuo *ItemFieldUpdateOne) ClearTextValue() *ItemFieldUpdateOne { + ifuo.mutation.ClearTextValue() + return ifuo +} + +// SetNumberValue sets the "number_value" field. +func (ifuo *ItemFieldUpdateOne) SetNumberValue(i int) *ItemFieldUpdateOne { + ifuo.mutation.ResetNumberValue() + ifuo.mutation.SetNumberValue(i) + return ifuo +} + +// SetNillableNumberValue sets the "number_value" field if the given value is not nil. +func (ifuo *ItemFieldUpdateOne) SetNillableNumberValue(i *int) *ItemFieldUpdateOne { + if i != nil { + ifuo.SetNumberValue(*i) + } + return ifuo +} + +// AddNumberValue adds i to the "number_value" field. +func (ifuo *ItemFieldUpdateOne) AddNumberValue(i int) *ItemFieldUpdateOne { + ifuo.mutation.AddNumberValue(i) + return ifuo +} + +// ClearNumberValue clears the value of the "number_value" field. +func (ifuo *ItemFieldUpdateOne) ClearNumberValue() *ItemFieldUpdateOne { + ifuo.mutation.ClearNumberValue() + return ifuo +} + +// SetBooleanValue sets the "boolean_value" field. +func (ifuo *ItemFieldUpdateOne) SetBooleanValue(b bool) *ItemFieldUpdateOne { + ifuo.mutation.SetBooleanValue(b) + return ifuo +} + +// SetNillableBooleanValue sets the "boolean_value" field if the given value is not nil. +func (ifuo *ItemFieldUpdateOne) SetNillableBooleanValue(b *bool) *ItemFieldUpdateOne { + if b != nil { + ifuo.SetBooleanValue(*b) + } + return ifuo +} + +// SetTimeValue sets the "time_value" field. +func (ifuo *ItemFieldUpdateOne) SetTimeValue(t time.Time) *ItemFieldUpdateOne { + ifuo.mutation.SetTimeValue(t) + return ifuo +} + +// SetNillableTimeValue sets the "time_value" field if the given value is not nil. +func (ifuo *ItemFieldUpdateOne) SetNillableTimeValue(t *time.Time) *ItemFieldUpdateOne { + if t != nil { + ifuo.SetTimeValue(*t) + } + return ifuo +} + +// SetItemID sets the "item" edge to the Item entity by ID. +func (ifuo *ItemFieldUpdateOne) SetItemID(id uuid.UUID) *ItemFieldUpdateOne { + ifuo.mutation.SetItemID(id) + return ifuo +} + +// SetNillableItemID sets the "item" edge to the Item entity by ID if the given value is not nil. +func (ifuo *ItemFieldUpdateOne) SetNillableItemID(id *uuid.UUID) *ItemFieldUpdateOne { + if id != nil { + ifuo = ifuo.SetItemID(*id) + } + return ifuo +} + +// SetItem sets the "item" edge to the Item entity. +func (ifuo *ItemFieldUpdateOne) SetItem(i *Item) *ItemFieldUpdateOne { + return ifuo.SetItemID(i.ID) +} + +// Mutation returns the ItemFieldMutation object of the builder. +func (ifuo *ItemFieldUpdateOne) Mutation() *ItemFieldMutation { + return ifuo.mutation +} + +// ClearItem clears the "item" edge to the Item entity. +func (ifuo *ItemFieldUpdateOne) ClearItem() *ItemFieldUpdateOne { + ifuo.mutation.ClearItem() + return ifuo +} + +// Select allows selecting one or more fields (columns) of the returned entity. +// The default is selecting all fields defined in the entity schema. +func (ifuo *ItemFieldUpdateOne) Select(field string, fields ...string) *ItemFieldUpdateOne { + ifuo.fields = append([]string{field}, fields...) + return ifuo +} + +// Save executes the query and returns the updated ItemField entity. +func (ifuo *ItemFieldUpdateOne) Save(ctx context.Context) (*ItemField, error) { + var ( + err error + node *ItemField + ) + ifuo.defaults() + if len(ifuo.hooks) == 0 { + if err = ifuo.check(); err != nil { + return nil, err + } + node, err = ifuo.sqlSave(ctx) + } else { + var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) { + mutation, ok := m.(*ItemFieldMutation) + if !ok { + return nil, fmt.Errorf("unexpected mutation type %T", m) + } + if err = ifuo.check(); err != nil { + return nil, err + } + ifuo.mutation = mutation + node, err = ifuo.sqlSave(ctx) + mutation.done = true + return node, err + }) + for i := len(ifuo.hooks) - 1; i >= 0; i-- { + if ifuo.hooks[i] == nil { + return nil, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)") + } + mut = ifuo.hooks[i](mut) + } + v, err := mut.Mutate(ctx, ifuo.mutation) + if err != nil { + return nil, err + } + nv, ok := v.(*ItemField) + if !ok { + return nil, fmt.Errorf("unexpected node type %T returned from ItemFieldMutation", v) + } + node = nv + } + return node, err +} + +// SaveX is like Save, but panics if an error occurs. +func (ifuo *ItemFieldUpdateOne) SaveX(ctx context.Context) *ItemField { + node, err := ifuo.Save(ctx) + if err != nil { + panic(err) + } + return node +} + +// Exec executes the query on the entity. +func (ifuo *ItemFieldUpdateOne) Exec(ctx context.Context) error { + _, err := ifuo.Save(ctx) + return err +} + +// ExecX is like Exec, but panics if an error occurs. +func (ifuo *ItemFieldUpdateOne) ExecX(ctx context.Context) { + if err := ifuo.Exec(ctx); err != nil { + panic(err) + } +} + +// defaults sets the default values of the builder before save. +func (ifuo *ItemFieldUpdateOne) defaults() { + if _, ok := ifuo.mutation.UpdatedAt(); !ok { + v := itemfield.UpdateDefaultUpdatedAt() + ifuo.mutation.SetUpdatedAt(v) + } +} + +// check runs all checks and user-defined validators on the builder. +func (ifuo *ItemFieldUpdateOne) check() error { + if v, ok := ifuo.mutation.Name(); ok { + if err := itemfield.NameValidator(v); err != nil { + return &ValidationError{Name: "name", err: fmt.Errorf(`ent: validator failed for field "ItemField.name": %w`, err)} + } + } + if v, ok := ifuo.mutation.Description(); ok { + if err := itemfield.DescriptionValidator(v); err != nil { + return &ValidationError{Name: "description", err: fmt.Errorf(`ent: validator failed for field "ItemField.description": %w`, err)} + } + } + if v, ok := ifuo.mutation.GetType(); ok { + if err := itemfield.TypeValidator(v); err != nil { + return &ValidationError{Name: "type", err: fmt.Errorf(`ent: validator failed for field "ItemField.type": %w`, err)} + } + } + if v, ok := ifuo.mutation.TextValue(); ok { + if err := itemfield.TextValueValidator(v); err != nil { + return &ValidationError{Name: "text_value", err: fmt.Errorf(`ent: validator failed for field "ItemField.text_value": %w`, err)} + } + } + return nil +} + +func (ifuo *ItemFieldUpdateOne) sqlSave(ctx context.Context) (_node *ItemField, err error) { + _spec := &sqlgraph.UpdateSpec{ + Node: &sqlgraph.NodeSpec{ + Table: itemfield.Table, + Columns: itemfield.Columns, + ID: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: itemfield.FieldID, + }, + }, + } + id, ok := ifuo.mutation.ID() + if !ok { + return nil, &ValidationError{Name: "id", err: errors.New(`ent: missing "ItemField.id" for update`)} + } + _spec.Node.ID.Value = id + if fields := ifuo.fields; len(fields) > 0 { + _spec.Node.Columns = make([]string, 0, len(fields)) + _spec.Node.Columns = append(_spec.Node.Columns, itemfield.FieldID) + for _, f := range fields { + if !itemfield.ValidColumn(f) { + return nil, &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)} + } + if f != itemfield.FieldID { + _spec.Node.Columns = append(_spec.Node.Columns, f) + } + } + } + if ps := ifuo.mutation.predicates; len(ps) > 0 { + _spec.Predicate = func(selector *sql.Selector) { + for i := range ps { + ps[i](selector) + } + } + } + if value, ok := ifuo.mutation.UpdatedAt(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeTime, + Value: value, + Column: itemfield.FieldUpdatedAt, + }) + } + if value, ok := ifuo.mutation.Name(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: itemfield.FieldName, + }) + } + if value, ok := ifuo.mutation.Description(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: itemfield.FieldDescription, + }) + } + if ifuo.mutation.DescriptionCleared() { + _spec.Fields.Clear = append(_spec.Fields.Clear, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Column: itemfield.FieldDescription, + }) + } + if value, ok := ifuo.mutation.GetType(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeEnum, + Value: value, + Column: itemfield.FieldType, + }) + } + if value, ok := ifuo.mutation.TextValue(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: itemfield.FieldTextValue, + }) + } + if ifuo.mutation.TextValueCleared() { + _spec.Fields.Clear = append(_spec.Fields.Clear, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Column: itemfield.FieldTextValue, + }) + } + if value, ok := ifuo.mutation.NumberValue(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeInt, + Value: value, + Column: itemfield.FieldNumberValue, + }) + } + if value, ok := ifuo.mutation.AddedNumberValue(); ok { + _spec.Fields.Add = append(_spec.Fields.Add, &sqlgraph.FieldSpec{ + Type: field.TypeInt, + Value: value, + Column: itemfield.FieldNumberValue, + }) + } + if ifuo.mutation.NumberValueCleared() { + _spec.Fields.Clear = append(_spec.Fields.Clear, &sqlgraph.FieldSpec{ + Type: field.TypeInt, + Column: itemfield.FieldNumberValue, + }) + } + if value, ok := ifuo.mutation.BooleanValue(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeBool, + Value: value, + Column: itemfield.FieldBooleanValue, + }) + } + if value, ok := ifuo.mutation.TimeValue(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeTime, + Value: value, + Column: itemfield.FieldTimeValue, + }) + } + if ifuo.mutation.ItemCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2O, + Inverse: true, + Table: itemfield.ItemTable, + Columns: []string{itemfield.ItemColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: item.FieldID, + }, + }, + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := ifuo.mutation.ItemIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2O, + Inverse: true, + Table: itemfield.ItemTable, + Columns: []string{itemfield.ItemColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: item.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Add = append(_spec.Edges.Add, edge) + } + _node = &ItemField{config: ifuo.config} + _spec.Assign = _node.assignValues + _spec.ScanValues = _node.scanValues + if err = sqlgraph.UpdateNode(ctx, ifuo.driver, _spec); err != nil { + if _, ok := err.(*sqlgraph.NotFoundError); ok { + err = &NotFoundError{itemfield.Label} + } else if sqlgraph.IsConstraintError(err) { + err = &ConstraintError{msg: err.Error(), wrap: err} + } + return nil, err + } + return _node, nil +} diff --git a/backend/ent/label.go b/backend/ent/label.go new file mode 100644 index 0000000..a010753 --- /dev/null +++ b/backend/ent/label.go @@ -0,0 +1,204 @@ +// Code generated by ent, DO NOT EDIT. + +package ent + +import ( + "fmt" + "strings" + "time" + + "entgo.io/ent/dialect/sql" + "github.com/google/uuid" + "github.com/hay-kot/content/backend/ent/group" + "github.com/hay-kot/content/backend/ent/label" +) + +// Label is the model entity for the Label schema. +type Label struct { + config `json:"-"` + // ID of the ent. + ID uuid.UUID `json:"id,omitempty"` + // CreatedAt holds the value of the "created_at" field. + CreatedAt time.Time `json:"created_at,omitempty"` + // UpdatedAt holds the value of the "updated_at" field. + UpdatedAt time.Time `json:"updated_at,omitempty"` + // Name holds the value of the "name" field. + Name string `json:"name,omitempty"` + // Description holds the value of the "description" field. + Description string `json:"description,omitempty"` + // Color holds the value of the "color" field. + Color string `json:"color,omitempty"` + // Edges holds the relations/edges for other nodes in the graph. + // The values are being populated by the LabelQuery when eager-loading is set. + Edges LabelEdges `json:"edges"` + group_labels *uuid.UUID +} + +// LabelEdges holds the relations/edges for other nodes in the graph. +type LabelEdges struct { + // Group holds the value of the group edge. + Group *Group `json:"group,omitempty"` + // Items holds the value of the items edge. + Items []*Item `json:"items,omitempty"` + // loadedTypes holds the information for reporting if a + // type was loaded (or requested) in eager-loading or not. + loadedTypes [2]bool +} + +// GroupOrErr returns the Group value or an error if the edge +// was not loaded in eager-loading, or loaded but was not found. +func (e LabelEdges) GroupOrErr() (*Group, error) { + if e.loadedTypes[0] { + if e.Group == nil { + // Edge was loaded but was not found. + return nil, &NotFoundError{label: group.Label} + } + return e.Group, nil + } + return nil, &NotLoadedError{edge: "group"} +} + +// ItemsOrErr returns the Items value or an error if the edge +// was not loaded in eager-loading. +func (e LabelEdges) ItemsOrErr() ([]*Item, error) { + if e.loadedTypes[1] { + return e.Items, nil + } + return nil, &NotLoadedError{edge: "items"} +} + +// scanValues returns the types for scanning values from sql.Rows. +func (*Label) scanValues(columns []string) ([]interface{}, error) { + values := make([]interface{}, len(columns)) + for i := range columns { + switch columns[i] { + case label.FieldName, label.FieldDescription, label.FieldColor: + values[i] = new(sql.NullString) + case label.FieldCreatedAt, label.FieldUpdatedAt: + values[i] = new(sql.NullTime) + case label.FieldID: + values[i] = new(uuid.UUID) + case label.ForeignKeys[0]: // group_labels + values[i] = &sql.NullScanner{S: new(uuid.UUID)} + default: + return nil, fmt.Errorf("unexpected column %q for type Label", columns[i]) + } + } + return values, nil +} + +// assignValues assigns the values that were returned from sql.Rows (after scanning) +// to the Label fields. +func (l *Label) assignValues(columns []string, values []interface{}) error { + if m, n := len(values), len(columns); m < n { + return fmt.Errorf("mismatch number of scan values: %d != %d", m, n) + } + for i := range columns { + switch columns[i] { + case label.FieldID: + if value, ok := values[i].(*uuid.UUID); !ok { + return fmt.Errorf("unexpected type %T for field id", values[i]) + } else if value != nil { + l.ID = *value + } + case label.FieldCreatedAt: + if value, ok := values[i].(*sql.NullTime); !ok { + return fmt.Errorf("unexpected type %T for field created_at", values[i]) + } else if value.Valid { + l.CreatedAt = value.Time + } + case label.FieldUpdatedAt: + if value, ok := values[i].(*sql.NullTime); !ok { + return fmt.Errorf("unexpected type %T for field updated_at", values[i]) + } else if value.Valid { + l.UpdatedAt = value.Time + } + case label.FieldName: + if value, ok := values[i].(*sql.NullString); !ok { + return fmt.Errorf("unexpected type %T for field name", values[i]) + } else if value.Valid { + l.Name = value.String + } + case label.FieldDescription: + if value, ok := values[i].(*sql.NullString); !ok { + return fmt.Errorf("unexpected type %T for field description", values[i]) + } else if value.Valid { + l.Description = value.String + } + case label.FieldColor: + if value, ok := values[i].(*sql.NullString); !ok { + return fmt.Errorf("unexpected type %T for field color", values[i]) + } else if value.Valid { + l.Color = value.String + } + case label.ForeignKeys[0]: + if value, ok := values[i].(*sql.NullScanner); !ok { + return fmt.Errorf("unexpected type %T for field group_labels", values[i]) + } else if value.Valid { + l.group_labels = new(uuid.UUID) + *l.group_labels = *value.S.(*uuid.UUID) + } + } + } + return nil +} + +// QueryGroup queries the "group" edge of the Label entity. +func (l *Label) QueryGroup() *GroupQuery { + return (&LabelClient{config: l.config}).QueryGroup(l) +} + +// QueryItems queries the "items" edge of the Label entity. +func (l *Label) QueryItems() *ItemQuery { + return (&LabelClient{config: l.config}).QueryItems(l) +} + +// Update returns a builder for updating this Label. +// Note that you need to call Label.Unwrap() before calling this method if this Label +// was returned from a transaction, and the transaction was committed or rolled back. +func (l *Label) Update() *LabelUpdateOne { + return (&LabelClient{config: l.config}).UpdateOne(l) +} + +// Unwrap unwraps the Label entity that was returned from a transaction after it was closed, +// so that all future queries will be executed through the driver which created the transaction. +func (l *Label) Unwrap() *Label { + _tx, ok := l.config.driver.(*txDriver) + if !ok { + panic("ent: Label is not a transactional entity") + } + l.config.driver = _tx.drv + return l +} + +// String implements the fmt.Stringer. +func (l *Label) String() string { + var builder strings.Builder + builder.WriteString("Label(") + builder.WriteString(fmt.Sprintf("id=%v, ", l.ID)) + builder.WriteString("created_at=") + builder.WriteString(l.CreatedAt.Format(time.ANSIC)) + builder.WriteString(", ") + builder.WriteString("updated_at=") + builder.WriteString(l.UpdatedAt.Format(time.ANSIC)) + builder.WriteString(", ") + builder.WriteString("name=") + builder.WriteString(l.Name) + builder.WriteString(", ") + builder.WriteString("description=") + builder.WriteString(l.Description) + builder.WriteString(", ") + builder.WriteString("color=") + builder.WriteString(l.Color) + builder.WriteByte(')') + return builder.String() +} + +// Labels is a parsable slice of Label. +type Labels []*Label + +func (l Labels) config(cfg config) { + for _i := range l { + l[_i].config = cfg + } +} diff --git a/backend/ent/label/label.go b/backend/ent/label/label.go new file mode 100644 index 0000000..82bcdbd --- /dev/null +++ b/backend/ent/label/label.go @@ -0,0 +1,98 @@ +// Code generated by ent, DO NOT EDIT. + +package label + +import ( + "time" + + "github.com/google/uuid" +) + +const ( + // Label holds the string label denoting the label type in the database. + Label = "label" + // FieldID holds the string denoting the id field in the database. + FieldID = "id" + // FieldCreatedAt holds the string denoting the created_at field in the database. + FieldCreatedAt = "created_at" + // FieldUpdatedAt holds the string denoting the updated_at field in the database. + FieldUpdatedAt = "updated_at" + // FieldName holds the string denoting the name field in the database. + FieldName = "name" + // FieldDescription holds the string denoting the description field in the database. + FieldDescription = "description" + // FieldColor holds the string denoting the color field in the database. + FieldColor = "color" + // EdgeGroup holds the string denoting the group edge name in mutations. + EdgeGroup = "group" + // EdgeItems holds the string denoting the items edge name in mutations. + EdgeItems = "items" + // Table holds the table name of the label in the database. + Table = "labels" + // GroupTable is the table that holds the group relation/edge. + GroupTable = "labels" + // GroupInverseTable is the table name for the Group entity. + // It exists in this package in order to avoid circular dependency with the "group" package. + GroupInverseTable = "groups" + // GroupColumn is the table column denoting the group relation/edge. + GroupColumn = "group_labels" + // ItemsTable is the table that holds the items relation/edge. The primary key declared below. + ItemsTable = "label_items" + // ItemsInverseTable is the table name for the Item entity. + // It exists in this package in order to avoid circular dependency with the "item" package. + ItemsInverseTable = "items" +) + +// Columns holds all SQL columns for label fields. +var Columns = []string{ + FieldID, + FieldCreatedAt, + FieldUpdatedAt, + FieldName, + FieldDescription, + FieldColor, +} + +// ForeignKeys holds the SQL foreign-keys that are owned by the "labels" +// table and are not defined as standalone fields in the schema. +var ForeignKeys = []string{ + "group_labels", +} + +var ( + // ItemsPrimaryKey and ItemsColumn2 are the table columns denoting the + // primary key for the items relation (M2M). + ItemsPrimaryKey = []string{"label_id", "item_id"} +) + +// ValidColumn reports if the column name is valid (part of the table columns). +func ValidColumn(column string) bool { + for i := range Columns { + if column == Columns[i] { + return true + } + } + for i := range ForeignKeys { + if column == ForeignKeys[i] { + return true + } + } + return false +} + +var ( + // DefaultCreatedAt holds the default value on creation for the "created_at" field. + DefaultCreatedAt func() time.Time + // DefaultUpdatedAt holds the default value on creation for the "updated_at" field. + DefaultUpdatedAt func() time.Time + // UpdateDefaultUpdatedAt holds the default value on update for the "updated_at" field. + UpdateDefaultUpdatedAt func() time.Time + // NameValidator is a validator for the "name" field. It is called by the builders before save. + NameValidator func(string) error + // DescriptionValidator is a validator for the "description" field. It is called by the builders before save. + DescriptionValidator func(string) error + // ColorValidator is a validator for the "color" field. It is called by the builders before save. + ColorValidator func(string) error + // DefaultID holds the default value on creation for the "id" field. + DefaultID func() uuid.UUID +) diff --git a/backend/ent/label/where.go b/backend/ent/label/where.go new file mode 100644 index 0000000..01d0f2d --- /dev/null +++ b/backend/ent/label/where.go @@ -0,0 +1,659 @@ +// Code generated by ent, DO NOT EDIT. + +package label + +import ( + "time" + + "entgo.io/ent/dialect/sql" + "entgo.io/ent/dialect/sql/sqlgraph" + "github.com/google/uuid" + "github.com/hay-kot/content/backend/ent/predicate" +) + +// ID filters vertices based on their ID field. +func ID(id uuid.UUID) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldID), id)) + }) +} + +// IDEQ applies the EQ predicate on the ID field. +func IDEQ(id uuid.UUID) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldID), id)) + }) +} + +// IDNEQ applies the NEQ predicate on the ID field. +func IDNEQ(id uuid.UUID) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldID), id)) + }) +} + +// IDIn applies the In predicate on the ID field. +func IDIn(ids ...uuid.UUID) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + v := make([]interface{}, len(ids)) + for i := range v { + v[i] = ids[i] + } + s.Where(sql.In(s.C(FieldID), v...)) + }) +} + +// IDNotIn applies the NotIn predicate on the ID field. +func IDNotIn(ids ...uuid.UUID) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + v := make([]interface{}, len(ids)) + for i := range v { + v[i] = ids[i] + } + s.Where(sql.NotIn(s.C(FieldID), v...)) + }) +} + +// IDGT applies the GT predicate on the ID field. +func IDGT(id uuid.UUID) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.GT(s.C(FieldID), id)) + }) +} + +// IDGTE applies the GTE predicate on the ID field. +func IDGTE(id uuid.UUID) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.GTE(s.C(FieldID), id)) + }) +} + +// IDLT applies the LT predicate on the ID field. +func IDLT(id uuid.UUID) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.LT(s.C(FieldID), id)) + }) +} + +// IDLTE applies the LTE predicate on the ID field. +func IDLTE(id uuid.UUID) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.LTE(s.C(FieldID), id)) + }) +} + +// CreatedAt applies equality check predicate on the "created_at" field. It's identical to CreatedAtEQ. +func CreatedAt(v time.Time) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldCreatedAt), v)) + }) +} + +// UpdatedAt applies equality check predicate on the "updated_at" field. It's identical to UpdatedAtEQ. +func UpdatedAt(v time.Time) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldUpdatedAt), v)) + }) +} + +// Name applies equality check predicate on the "name" field. It's identical to NameEQ. +func Name(v string) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldName), v)) + }) +} + +// Description applies equality check predicate on the "description" field. It's identical to DescriptionEQ. +func Description(v string) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldDescription), v)) + }) +} + +// Color applies equality check predicate on the "color" field. It's identical to ColorEQ. +func Color(v string) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldColor), v)) + }) +} + +// CreatedAtEQ applies the EQ predicate on the "created_at" field. +func CreatedAtEQ(v time.Time) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldCreatedAt), v)) + }) +} + +// CreatedAtNEQ applies the NEQ predicate on the "created_at" field. +func CreatedAtNEQ(v time.Time) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldCreatedAt), v)) + }) +} + +// CreatedAtIn applies the In predicate on the "created_at" field. +func CreatedAtIn(vs ...time.Time) predicate.Label { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.In(s.C(FieldCreatedAt), v...)) + }) +} + +// CreatedAtNotIn applies the NotIn predicate on the "created_at" field. +func CreatedAtNotIn(vs ...time.Time) predicate.Label { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.NotIn(s.C(FieldCreatedAt), v...)) + }) +} + +// CreatedAtGT applies the GT predicate on the "created_at" field. +func CreatedAtGT(v time.Time) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.GT(s.C(FieldCreatedAt), v)) + }) +} + +// CreatedAtGTE applies the GTE predicate on the "created_at" field. +func CreatedAtGTE(v time.Time) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.GTE(s.C(FieldCreatedAt), v)) + }) +} + +// CreatedAtLT applies the LT predicate on the "created_at" field. +func CreatedAtLT(v time.Time) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.LT(s.C(FieldCreatedAt), v)) + }) +} + +// CreatedAtLTE applies the LTE predicate on the "created_at" field. +func CreatedAtLTE(v time.Time) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.LTE(s.C(FieldCreatedAt), v)) + }) +} + +// UpdatedAtEQ applies the EQ predicate on the "updated_at" field. +func UpdatedAtEQ(v time.Time) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldUpdatedAt), v)) + }) +} + +// UpdatedAtNEQ applies the NEQ predicate on the "updated_at" field. +func UpdatedAtNEQ(v time.Time) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldUpdatedAt), v)) + }) +} + +// UpdatedAtIn applies the In predicate on the "updated_at" field. +func UpdatedAtIn(vs ...time.Time) predicate.Label { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.In(s.C(FieldUpdatedAt), v...)) + }) +} + +// UpdatedAtNotIn applies the NotIn predicate on the "updated_at" field. +func UpdatedAtNotIn(vs ...time.Time) predicate.Label { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.NotIn(s.C(FieldUpdatedAt), v...)) + }) +} + +// UpdatedAtGT applies the GT predicate on the "updated_at" field. +func UpdatedAtGT(v time.Time) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.GT(s.C(FieldUpdatedAt), v)) + }) +} + +// UpdatedAtGTE applies the GTE predicate on the "updated_at" field. +func UpdatedAtGTE(v time.Time) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.GTE(s.C(FieldUpdatedAt), v)) + }) +} + +// UpdatedAtLT applies the LT predicate on the "updated_at" field. +func UpdatedAtLT(v time.Time) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.LT(s.C(FieldUpdatedAt), v)) + }) +} + +// UpdatedAtLTE applies the LTE predicate on the "updated_at" field. +func UpdatedAtLTE(v time.Time) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.LTE(s.C(FieldUpdatedAt), v)) + }) +} + +// NameEQ applies the EQ predicate on the "name" field. +func NameEQ(v string) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldName), v)) + }) +} + +// NameNEQ applies the NEQ predicate on the "name" field. +func NameNEQ(v string) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldName), v)) + }) +} + +// NameIn applies the In predicate on the "name" field. +func NameIn(vs ...string) predicate.Label { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.In(s.C(FieldName), v...)) + }) +} + +// NameNotIn applies the NotIn predicate on the "name" field. +func NameNotIn(vs ...string) predicate.Label { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.NotIn(s.C(FieldName), v...)) + }) +} + +// NameGT applies the GT predicate on the "name" field. +func NameGT(v string) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.GT(s.C(FieldName), v)) + }) +} + +// NameGTE applies the GTE predicate on the "name" field. +func NameGTE(v string) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.GTE(s.C(FieldName), v)) + }) +} + +// NameLT applies the LT predicate on the "name" field. +func NameLT(v string) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.LT(s.C(FieldName), v)) + }) +} + +// NameLTE applies the LTE predicate on the "name" field. +func NameLTE(v string) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.LTE(s.C(FieldName), v)) + }) +} + +// NameContains applies the Contains predicate on the "name" field. +func NameContains(v string) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.Contains(s.C(FieldName), v)) + }) +} + +// NameHasPrefix applies the HasPrefix predicate on the "name" field. +func NameHasPrefix(v string) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.HasPrefix(s.C(FieldName), v)) + }) +} + +// NameHasSuffix applies the HasSuffix predicate on the "name" field. +func NameHasSuffix(v string) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.HasSuffix(s.C(FieldName), v)) + }) +} + +// NameEqualFold applies the EqualFold predicate on the "name" field. +func NameEqualFold(v string) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.EqualFold(s.C(FieldName), v)) + }) +} + +// NameContainsFold applies the ContainsFold predicate on the "name" field. +func NameContainsFold(v string) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.ContainsFold(s.C(FieldName), v)) + }) +} + +// DescriptionEQ applies the EQ predicate on the "description" field. +func DescriptionEQ(v string) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldDescription), v)) + }) +} + +// DescriptionNEQ applies the NEQ predicate on the "description" field. +func DescriptionNEQ(v string) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldDescription), v)) + }) +} + +// DescriptionIn applies the In predicate on the "description" field. +func DescriptionIn(vs ...string) predicate.Label { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.In(s.C(FieldDescription), v...)) + }) +} + +// DescriptionNotIn applies the NotIn predicate on the "description" field. +func DescriptionNotIn(vs ...string) predicate.Label { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.NotIn(s.C(FieldDescription), v...)) + }) +} + +// DescriptionGT applies the GT predicate on the "description" field. +func DescriptionGT(v string) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.GT(s.C(FieldDescription), v)) + }) +} + +// DescriptionGTE applies the GTE predicate on the "description" field. +func DescriptionGTE(v string) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.GTE(s.C(FieldDescription), v)) + }) +} + +// DescriptionLT applies the LT predicate on the "description" field. +func DescriptionLT(v string) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.LT(s.C(FieldDescription), v)) + }) +} + +// DescriptionLTE applies the LTE predicate on the "description" field. +func DescriptionLTE(v string) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.LTE(s.C(FieldDescription), v)) + }) +} + +// DescriptionContains applies the Contains predicate on the "description" field. +func DescriptionContains(v string) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.Contains(s.C(FieldDescription), v)) + }) +} + +// DescriptionHasPrefix applies the HasPrefix predicate on the "description" field. +func DescriptionHasPrefix(v string) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.HasPrefix(s.C(FieldDescription), v)) + }) +} + +// DescriptionHasSuffix applies the HasSuffix predicate on the "description" field. +func DescriptionHasSuffix(v string) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.HasSuffix(s.C(FieldDescription), v)) + }) +} + +// DescriptionIsNil applies the IsNil predicate on the "description" field. +func DescriptionIsNil() predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.IsNull(s.C(FieldDescription))) + }) +} + +// DescriptionNotNil applies the NotNil predicate on the "description" field. +func DescriptionNotNil() predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.NotNull(s.C(FieldDescription))) + }) +} + +// DescriptionEqualFold applies the EqualFold predicate on the "description" field. +func DescriptionEqualFold(v string) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.EqualFold(s.C(FieldDescription), v)) + }) +} + +// DescriptionContainsFold applies the ContainsFold predicate on the "description" field. +func DescriptionContainsFold(v string) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.ContainsFold(s.C(FieldDescription), v)) + }) +} + +// ColorEQ applies the EQ predicate on the "color" field. +func ColorEQ(v string) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldColor), v)) + }) +} + +// ColorNEQ applies the NEQ predicate on the "color" field. +func ColorNEQ(v string) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldColor), v)) + }) +} + +// ColorIn applies the In predicate on the "color" field. +func ColorIn(vs ...string) predicate.Label { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.In(s.C(FieldColor), v...)) + }) +} + +// ColorNotIn applies the NotIn predicate on the "color" field. +func ColorNotIn(vs ...string) predicate.Label { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.NotIn(s.C(FieldColor), v...)) + }) +} + +// ColorGT applies the GT predicate on the "color" field. +func ColorGT(v string) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.GT(s.C(FieldColor), v)) + }) +} + +// ColorGTE applies the GTE predicate on the "color" field. +func ColorGTE(v string) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.GTE(s.C(FieldColor), v)) + }) +} + +// ColorLT applies the LT predicate on the "color" field. +func ColorLT(v string) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.LT(s.C(FieldColor), v)) + }) +} + +// ColorLTE applies the LTE predicate on the "color" field. +func ColorLTE(v string) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.LTE(s.C(FieldColor), v)) + }) +} + +// ColorContains applies the Contains predicate on the "color" field. +func ColorContains(v string) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.Contains(s.C(FieldColor), v)) + }) +} + +// ColorHasPrefix applies the HasPrefix predicate on the "color" field. +func ColorHasPrefix(v string) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.HasPrefix(s.C(FieldColor), v)) + }) +} + +// ColorHasSuffix applies the HasSuffix predicate on the "color" field. +func ColorHasSuffix(v string) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.HasSuffix(s.C(FieldColor), v)) + }) +} + +// ColorIsNil applies the IsNil predicate on the "color" field. +func ColorIsNil() predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.IsNull(s.C(FieldColor))) + }) +} + +// ColorNotNil applies the NotNil predicate on the "color" field. +func ColorNotNil() predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.NotNull(s.C(FieldColor))) + }) +} + +// ColorEqualFold applies the EqualFold predicate on the "color" field. +func ColorEqualFold(v string) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.EqualFold(s.C(FieldColor), v)) + }) +} + +// ColorContainsFold applies the ContainsFold predicate on the "color" field. +func ColorContainsFold(v string) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s.Where(sql.ContainsFold(s.C(FieldColor), v)) + }) +} + +// HasGroup applies the HasEdge predicate on the "group" edge. +func HasGroup() predicate.Label { + return predicate.Label(func(s *sql.Selector) { + step := sqlgraph.NewStep( + sqlgraph.From(Table, FieldID), + sqlgraph.To(GroupTable, FieldID), + sqlgraph.Edge(sqlgraph.M2O, true, GroupTable, GroupColumn), + ) + sqlgraph.HasNeighbors(s, step) + }) +} + +// HasGroupWith applies the HasEdge predicate on the "group" edge with a given conditions (other predicates). +func HasGroupWith(preds ...predicate.Group) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + step := sqlgraph.NewStep( + sqlgraph.From(Table, FieldID), + sqlgraph.To(GroupInverseTable, FieldID), + sqlgraph.Edge(sqlgraph.M2O, true, GroupTable, GroupColumn), + ) + sqlgraph.HasNeighborsWith(s, step, func(s *sql.Selector) { + for _, p := range preds { + p(s) + } + }) + }) +} + +// HasItems applies the HasEdge predicate on the "items" edge. +func HasItems() predicate.Label { + return predicate.Label(func(s *sql.Selector) { + step := sqlgraph.NewStep( + sqlgraph.From(Table, FieldID), + sqlgraph.To(ItemsTable, FieldID), + sqlgraph.Edge(sqlgraph.M2M, false, ItemsTable, ItemsPrimaryKey...), + ) + sqlgraph.HasNeighbors(s, step) + }) +} + +// HasItemsWith applies the HasEdge predicate on the "items" edge with a given conditions (other predicates). +func HasItemsWith(preds ...predicate.Item) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + step := sqlgraph.NewStep( + sqlgraph.From(Table, FieldID), + sqlgraph.To(ItemsInverseTable, FieldID), + sqlgraph.Edge(sqlgraph.M2M, false, ItemsTable, ItemsPrimaryKey...), + ) + sqlgraph.HasNeighborsWith(s, step, func(s *sql.Selector) { + for _, p := range preds { + p(s) + } + }) + }) +} + +// And groups predicates with the AND operator between them. +func And(predicates ...predicate.Label) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s1 := s.Clone().SetP(nil) + for _, p := range predicates { + p(s1) + } + s.Where(s1.P()) + }) +} + +// Or groups predicates with the OR operator between them. +func Or(predicates ...predicate.Label) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + s1 := s.Clone().SetP(nil) + for i, p := range predicates { + if i > 0 { + s1.Or() + } + p(s1) + } + s.Where(s1.P()) + }) +} + +// Not applies the not operator on the given predicate. +func Not(p predicate.Label) predicate.Label { + return predicate.Label(func(s *sql.Selector) { + p(s.Not()) + }) +} diff --git a/backend/ent/label_create.go b/backend/ent/label_create.go new file mode 100644 index 0000000..65db3c6 --- /dev/null +++ b/backend/ent/label_create.go @@ -0,0 +1,444 @@ +// Code generated by ent, DO NOT EDIT. + +package ent + +import ( + "context" + "errors" + "fmt" + "time" + + "entgo.io/ent/dialect/sql/sqlgraph" + "entgo.io/ent/schema/field" + "github.com/google/uuid" + "github.com/hay-kot/content/backend/ent/group" + "github.com/hay-kot/content/backend/ent/item" + "github.com/hay-kot/content/backend/ent/label" +) + +// LabelCreate is the builder for creating a Label entity. +type LabelCreate struct { + config + mutation *LabelMutation + hooks []Hook +} + +// SetCreatedAt sets the "created_at" field. +func (lc *LabelCreate) SetCreatedAt(t time.Time) *LabelCreate { + lc.mutation.SetCreatedAt(t) + return lc +} + +// SetNillableCreatedAt sets the "created_at" field if the given value is not nil. +func (lc *LabelCreate) SetNillableCreatedAt(t *time.Time) *LabelCreate { + if t != nil { + lc.SetCreatedAt(*t) + } + return lc +} + +// SetUpdatedAt sets the "updated_at" field. +func (lc *LabelCreate) SetUpdatedAt(t time.Time) *LabelCreate { + lc.mutation.SetUpdatedAt(t) + return lc +} + +// SetNillableUpdatedAt sets the "updated_at" field if the given value is not nil. +func (lc *LabelCreate) SetNillableUpdatedAt(t *time.Time) *LabelCreate { + if t != nil { + lc.SetUpdatedAt(*t) + } + return lc +} + +// SetName sets the "name" field. +func (lc *LabelCreate) SetName(s string) *LabelCreate { + lc.mutation.SetName(s) + return lc +} + +// SetDescription sets the "description" field. +func (lc *LabelCreate) SetDescription(s string) *LabelCreate { + lc.mutation.SetDescription(s) + return lc +} + +// SetNillableDescription sets the "description" field if the given value is not nil. +func (lc *LabelCreate) SetNillableDescription(s *string) *LabelCreate { + if s != nil { + lc.SetDescription(*s) + } + return lc +} + +// SetColor sets the "color" field. +func (lc *LabelCreate) SetColor(s string) *LabelCreate { + lc.mutation.SetColor(s) + return lc +} + +// SetNillableColor sets the "color" field if the given value is not nil. +func (lc *LabelCreate) SetNillableColor(s *string) *LabelCreate { + if s != nil { + lc.SetColor(*s) + } + return lc +} + +// SetID sets the "id" field. +func (lc *LabelCreate) SetID(u uuid.UUID) *LabelCreate { + lc.mutation.SetID(u) + return lc +} + +// SetNillableID sets the "id" field if the given value is not nil. +func (lc *LabelCreate) SetNillableID(u *uuid.UUID) *LabelCreate { + if u != nil { + lc.SetID(*u) + } + return lc +} + +// SetGroupID sets the "group" edge to the Group entity by ID. +func (lc *LabelCreate) SetGroupID(id uuid.UUID) *LabelCreate { + lc.mutation.SetGroupID(id) + return lc +} + +// SetGroup sets the "group" edge to the Group entity. +func (lc *LabelCreate) SetGroup(g *Group) *LabelCreate { + return lc.SetGroupID(g.ID) +} + +// AddItemIDs adds the "items" edge to the Item entity by IDs. +func (lc *LabelCreate) AddItemIDs(ids ...uuid.UUID) *LabelCreate { + lc.mutation.AddItemIDs(ids...) + return lc +} + +// AddItems adds the "items" edges to the Item entity. +func (lc *LabelCreate) AddItems(i ...*Item) *LabelCreate { + ids := make([]uuid.UUID, len(i)) + for j := range i { + ids[j] = i[j].ID + } + return lc.AddItemIDs(ids...) +} + +// Mutation returns the LabelMutation object of the builder. +func (lc *LabelCreate) Mutation() *LabelMutation { + return lc.mutation +} + +// Save creates the Label in the database. +func (lc *LabelCreate) Save(ctx context.Context) (*Label, error) { + var ( + err error + node *Label + ) + lc.defaults() + if len(lc.hooks) == 0 { + if err = lc.check(); err != nil { + return nil, err + } + node, err = lc.sqlSave(ctx) + } else { + var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) { + mutation, ok := m.(*LabelMutation) + if !ok { + return nil, fmt.Errorf("unexpected mutation type %T", m) + } + if err = lc.check(); err != nil { + return nil, err + } + lc.mutation = mutation + if node, err = lc.sqlSave(ctx); err != nil { + return nil, err + } + mutation.id = &node.ID + mutation.done = true + return node, err + }) + for i := len(lc.hooks) - 1; i >= 0; i-- { + if lc.hooks[i] == nil { + return nil, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)") + } + mut = lc.hooks[i](mut) + } + v, err := mut.Mutate(ctx, lc.mutation) + if err != nil { + return nil, err + } + nv, ok := v.(*Label) + if !ok { + return nil, fmt.Errorf("unexpected node type %T returned from LabelMutation", v) + } + node = nv + } + return node, err +} + +// SaveX calls Save and panics if Save returns an error. +func (lc *LabelCreate) SaveX(ctx context.Context) *Label { + v, err := lc.Save(ctx) + if err != nil { + panic(err) + } + return v +} + +// Exec executes the query. +func (lc *LabelCreate) Exec(ctx context.Context) error { + _, err := lc.Save(ctx) + return err +} + +// ExecX is like Exec, but panics if an error occurs. +func (lc *LabelCreate) ExecX(ctx context.Context) { + if err := lc.Exec(ctx); err != nil { + panic(err) + } +} + +// defaults sets the default values of the builder before save. +func (lc *LabelCreate) defaults() { + if _, ok := lc.mutation.CreatedAt(); !ok { + v := label.DefaultCreatedAt() + lc.mutation.SetCreatedAt(v) + } + if _, ok := lc.mutation.UpdatedAt(); !ok { + v := label.DefaultUpdatedAt() + lc.mutation.SetUpdatedAt(v) + } + if _, ok := lc.mutation.ID(); !ok { + v := label.DefaultID() + lc.mutation.SetID(v) + } +} + +// check runs all checks and user-defined validators on the builder. +func (lc *LabelCreate) check() error { + if _, ok := lc.mutation.CreatedAt(); !ok { + return &ValidationError{Name: "created_at", err: errors.New(`ent: missing required field "Label.created_at"`)} + } + if _, ok := lc.mutation.UpdatedAt(); !ok { + return &ValidationError{Name: "updated_at", err: errors.New(`ent: missing required field "Label.updated_at"`)} + } + if _, ok := lc.mutation.Name(); !ok { + return &ValidationError{Name: "name", err: errors.New(`ent: missing required field "Label.name"`)} + } + if v, ok := lc.mutation.Name(); ok { + if err := label.NameValidator(v); err != nil { + return &ValidationError{Name: "name", err: fmt.Errorf(`ent: validator failed for field "Label.name": %w`, err)} + } + } + if v, ok := lc.mutation.Description(); ok { + if err := label.DescriptionValidator(v); err != nil { + return &ValidationError{Name: "description", err: fmt.Errorf(`ent: validator failed for field "Label.description": %w`, err)} + } + } + if v, ok := lc.mutation.Color(); ok { + if err := label.ColorValidator(v); err != nil { + return &ValidationError{Name: "color", err: fmt.Errorf(`ent: validator failed for field "Label.color": %w`, err)} + } + } + if _, ok := lc.mutation.GroupID(); !ok { + return &ValidationError{Name: "group", err: errors.New(`ent: missing required edge "Label.group"`)} + } + return nil +} + +func (lc *LabelCreate) sqlSave(ctx context.Context) (*Label, error) { + _node, _spec := lc.createSpec() + if err := sqlgraph.CreateNode(ctx, lc.driver, _spec); err != nil { + if sqlgraph.IsConstraintError(err) { + err = &ConstraintError{msg: err.Error(), wrap: err} + } + return nil, err + } + if _spec.ID.Value != nil { + if id, ok := _spec.ID.Value.(*uuid.UUID); ok { + _node.ID = *id + } else if err := _node.ID.Scan(_spec.ID.Value); err != nil { + return nil, err + } + } + return _node, nil +} + +func (lc *LabelCreate) createSpec() (*Label, *sqlgraph.CreateSpec) { + var ( + _node = &Label{config: lc.config} + _spec = &sqlgraph.CreateSpec{ + Table: label.Table, + ID: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: label.FieldID, + }, + } + ) + if id, ok := lc.mutation.ID(); ok { + _node.ID = id + _spec.ID.Value = &id + } + if value, ok := lc.mutation.CreatedAt(); ok { + _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ + Type: field.TypeTime, + Value: value, + Column: label.FieldCreatedAt, + }) + _node.CreatedAt = value + } + if value, ok := lc.mutation.UpdatedAt(); ok { + _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ + Type: field.TypeTime, + Value: value, + Column: label.FieldUpdatedAt, + }) + _node.UpdatedAt = value + } + if value, ok := lc.mutation.Name(); ok { + _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: label.FieldName, + }) + _node.Name = value + } + if value, ok := lc.mutation.Description(); ok { + _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: label.FieldDescription, + }) + _node.Description = value + } + if value, ok := lc.mutation.Color(); ok { + _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: label.FieldColor, + }) + _node.Color = value + } + if nodes := lc.mutation.GroupIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2O, + Inverse: true, + Table: label.GroupTable, + Columns: []string{label.GroupColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: group.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _node.group_labels = &nodes[0] + _spec.Edges = append(_spec.Edges, edge) + } + if nodes := lc.mutation.ItemsIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2M, + Inverse: false, + Table: label.ItemsTable, + Columns: label.ItemsPrimaryKey, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: item.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges = append(_spec.Edges, edge) + } + return _node, _spec +} + +// LabelCreateBulk is the builder for creating many Label entities in bulk. +type LabelCreateBulk struct { + config + builders []*LabelCreate +} + +// Save creates the Label entities in the database. +func (lcb *LabelCreateBulk) Save(ctx context.Context) ([]*Label, error) { + specs := make([]*sqlgraph.CreateSpec, len(lcb.builders)) + nodes := make([]*Label, len(lcb.builders)) + mutators := make([]Mutator, len(lcb.builders)) + for i := range lcb.builders { + func(i int, root context.Context) { + builder := lcb.builders[i] + builder.defaults() + var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) { + mutation, ok := m.(*LabelMutation) + if !ok { + return nil, fmt.Errorf("unexpected mutation type %T", m) + } + if err := builder.check(); err != nil { + return nil, err + } + builder.mutation = mutation + nodes[i], specs[i] = builder.createSpec() + var err error + if i < len(mutators)-1 { + _, err = mutators[i+1].Mutate(root, lcb.builders[i+1].mutation) + } else { + spec := &sqlgraph.BatchCreateSpec{Nodes: specs} + // Invoke the actual operation on the latest mutation in the chain. + if err = sqlgraph.BatchCreate(ctx, lcb.driver, spec); err != nil { + if sqlgraph.IsConstraintError(err) { + err = &ConstraintError{msg: err.Error(), wrap: err} + } + } + } + if err != nil { + return nil, err + } + mutation.id = &nodes[i].ID + mutation.done = true + return nodes[i], nil + }) + for i := len(builder.hooks) - 1; i >= 0; i-- { + mut = builder.hooks[i](mut) + } + mutators[i] = mut + }(i, ctx) + } + if len(mutators) > 0 { + if _, err := mutators[0].Mutate(ctx, lcb.builders[0].mutation); err != nil { + return nil, err + } + } + return nodes, nil +} + +// SaveX is like Save, but panics if an error occurs. +func (lcb *LabelCreateBulk) SaveX(ctx context.Context) []*Label { + v, err := lcb.Save(ctx) + if err != nil { + panic(err) + } + return v +} + +// Exec executes the query. +func (lcb *LabelCreateBulk) Exec(ctx context.Context) error { + _, err := lcb.Save(ctx) + return err +} + +// ExecX is like Exec, but panics if an error occurs. +func (lcb *LabelCreateBulk) ExecX(ctx context.Context) { + if err := lcb.Exec(ctx); err != nil { + panic(err) + } +} diff --git a/backend/ent/label_delete.go b/backend/ent/label_delete.go new file mode 100644 index 0000000..e9eb6bc --- /dev/null +++ b/backend/ent/label_delete.go @@ -0,0 +1,115 @@ +// Code generated by ent, DO NOT EDIT. + +package ent + +import ( + "context" + "fmt" + + "entgo.io/ent/dialect/sql" + "entgo.io/ent/dialect/sql/sqlgraph" + "entgo.io/ent/schema/field" + "github.com/hay-kot/content/backend/ent/label" + "github.com/hay-kot/content/backend/ent/predicate" +) + +// LabelDelete is the builder for deleting a Label entity. +type LabelDelete struct { + config + hooks []Hook + mutation *LabelMutation +} + +// Where appends a list predicates to the LabelDelete builder. +func (ld *LabelDelete) Where(ps ...predicate.Label) *LabelDelete { + ld.mutation.Where(ps...) + return ld +} + +// Exec executes the deletion query and returns how many vertices were deleted. +func (ld *LabelDelete) Exec(ctx context.Context) (int, error) { + var ( + err error + affected int + ) + if len(ld.hooks) == 0 { + affected, err = ld.sqlExec(ctx) + } else { + var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) { + mutation, ok := m.(*LabelMutation) + if !ok { + return nil, fmt.Errorf("unexpected mutation type %T", m) + } + ld.mutation = mutation + affected, err = ld.sqlExec(ctx) + mutation.done = true + return affected, err + }) + for i := len(ld.hooks) - 1; i >= 0; i-- { + if ld.hooks[i] == nil { + return 0, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)") + } + mut = ld.hooks[i](mut) + } + if _, err := mut.Mutate(ctx, ld.mutation); err != nil { + return 0, err + } + } + return affected, err +} + +// ExecX is like Exec, but panics if an error occurs. +func (ld *LabelDelete) ExecX(ctx context.Context) int { + n, err := ld.Exec(ctx) + if err != nil { + panic(err) + } + return n +} + +func (ld *LabelDelete) sqlExec(ctx context.Context) (int, error) { + _spec := &sqlgraph.DeleteSpec{ + Node: &sqlgraph.NodeSpec{ + Table: label.Table, + ID: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: label.FieldID, + }, + }, + } + if ps := ld.mutation.predicates; len(ps) > 0 { + _spec.Predicate = func(selector *sql.Selector) { + for i := range ps { + ps[i](selector) + } + } + } + affected, err := sqlgraph.DeleteNodes(ctx, ld.driver, _spec) + if err != nil && sqlgraph.IsConstraintError(err) { + err = &ConstraintError{msg: err.Error(), wrap: err} + } + return affected, err +} + +// LabelDeleteOne is the builder for deleting a single Label entity. +type LabelDeleteOne struct { + ld *LabelDelete +} + +// Exec executes the deletion query. +func (ldo *LabelDeleteOne) Exec(ctx context.Context) error { + n, err := ldo.ld.Exec(ctx) + switch { + case err != nil: + return err + case n == 0: + return &NotFoundError{label.Label} + default: + return nil + } +} + +// ExecX is like Exec, but panics if an error occurs. +func (ldo *LabelDeleteOne) ExecX(ctx context.Context) { + ldo.ld.ExecX(ctx) +} diff --git a/backend/ent/label_query.go b/backend/ent/label_query.go new file mode 100644 index 0000000..b54aa58 --- /dev/null +++ b/backend/ent/label_query.go @@ -0,0 +1,714 @@ +// Code generated by ent, DO NOT EDIT. + +package ent + +import ( + "context" + "database/sql/driver" + "fmt" + "math" + + "entgo.io/ent/dialect/sql" + "entgo.io/ent/dialect/sql/sqlgraph" + "entgo.io/ent/schema/field" + "github.com/google/uuid" + "github.com/hay-kot/content/backend/ent/group" + "github.com/hay-kot/content/backend/ent/item" + "github.com/hay-kot/content/backend/ent/label" + "github.com/hay-kot/content/backend/ent/predicate" +) + +// LabelQuery is the builder for querying Label entities. +type LabelQuery struct { + config + limit *int + offset *int + unique *bool + order []OrderFunc + fields []string + predicates []predicate.Label + withGroup *GroupQuery + withItems *ItemQuery + withFKs bool + // intermediate query (i.e. traversal path). + sql *sql.Selector + path func(context.Context) (*sql.Selector, error) +} + +// Where adds a new predicate for the LabelQuery builder. +func (lq *LabelQuery) Where(ps ...predicate.Label) *LabelQuery { + lq.predicates = append(lq.predicates, ps...) + return lq +} + +// Limit adds a limit step to the query. +func (lq *LabelQuery) Limit(limit int) *LabelQuery { + lq.limit = &limit + return lq +} + +// Offset adds an offset step to the query. +func (lq *LabelQuery) Offset(offset int) *LabelQuery { + lq.offset = &offset + return lq +} + +// Unique configures the query builder to filter duplicate records on query. +// By default, unique is set to true, and can be disabled using this method. +func (lq *LabelQuery) Unique(unique bool) *LabelQuery { + lq.unique = &unique + return lq +} + +// Order adds an order step to the query. +func (lq *LabelQuery) Order(o ...OrderFunc) *LabelQuery { + lq.order = append(lq.order, o...) + return lq +} + +// QueryGroup chains the current query on the "group" edge. +func (lq *LabelQuery) QueryGroup() *GroupQuery { + query := &GroupQuery{config: lq.config} + query.path = func(ctx context.Context) (fromU *sql.Selector, err error) { + if err := lq.prepareQuery(ctx); err != nil { + return nil, err + } + selector := lq.sqlQuery(ctx) + if err := selector.Err(); err != nil { + return nil, err + } + step := sqlgraph.NewStep( + sqlgraph.From(label.Table, label.FieldID, selector), + sqlgraph.To(group.Table, group.FieldID), + sqlgraph.Edge(sqlgraph.M2O, true, label.GroupTable, label.GroupColumn), + ) + fromU = sqlgraph.SetNeighbors(lq.driver.Dialect(), step) + return fromU, nil + } + return query +} + +// QueryItems chains the current query on the "items" edge. +func (lq *LabelQuery) QueryItems() *ItemQuery { + query := &ItemQuery{config: lq.config} + query.path = func(ctx context.Context) (fromU *sql.Selector, err error) { + if err := lq.prepareQuery(ctx); err != nil { + return nil, err + } + selector := lq.sqlQuery(ctx) + if err := selector.Err(); err != nil { + return nil, err + } + step := sqlgraph.NewStep( + sqlgraph.From(label.Table, label.FieldID, selector), + sqlgraph.To(item.Table, item.FieldID), + sqlgraph.Edge(sqlgraph.M2M, false, label.ItemsTable, label.ItemsPrimaryKey...), + ) + fromU = sqlgraph.SetNeighbors(lq.driver.Dialect(), step) + return fromU, nil + } + return query +} + +// First returns the first Label entity from the query. +// Returns a *NotFoundError when no Label was found. +func (lq *LabelQuery) First(ctx context.Context) (*Label, error) { + nodes, err := lq.Limit(1).All(ctx) + if err != nil { + return nil, err + } + if len(nodes) == 0 { + return nil, &NotFoundError{label.Label} + } + return nodes[0], nil +} + +// FirstX is like First, but panics if an error occurs. +func (lq *LabelQuery) FirstX(ctx context.Context) *Label { + node, err := lq.First(ctx) + if err != nil && !IsNotFound(err) { + panic(err) + } + return node +} + +// FirstID returns the first Label ID from the query. +// Returns a *NotFoundError when no Label ID was found. +func (lq *LabelQuery) FirstID(ctx context.Context) (id uuid.UUID, err error) { + var ids []uuid.UUID + if ids, err = lq.Limit(1).IDs(ctx); err != nil { + return + } + if len(ids) == 0 { + err = &NotFoundError{label.Label} + return + } + return ids[0], nil +} + +// FirstIDX is like FirstID, but panics if an error occurs. +func (lq *LabelQuery) FirstIDX(ctx context.Context) uuid.UUID { + id, err := lq.FirstID(ctx) + if err != nil && !IsNotFound(err) { + panic(err) + } + return id +} + +// Only returns a single Label entity found by the query, ensuring it only returns one. +// Returns a *NotSingularError when more than one Label entity is found. +// Returns a *NotFoundError when no Label entities are found. +func (lq *LabelQuery) Only(ctx context.Context) (*Label, error) { + nodes, err := lq.Limit(2).All(ctx) + if err != nil { + return nil, err + } + switch len(nodes) { + case 1: + return nodes[0], nil + case 0: + return nil, &NotFoundError{label.Label} + default: + return nil, &NotSingularError{label.Label} + } +} + +// OnlyX is like Only, but panics if an error occurs. +func (lq *LabelQuery) OnlyX(ctx context.Context) *Label { + node, err := lq.Only(ctx) + if err != nil { + panic(err) + } + return node +} + +// OnlyID is like Only, but returns the only Label ID in the query. +// Returns a *NotSingularError when more than one Label ID is found. +// Returns a *NotFoundError when no entities are found. +func (lq *LabelQuery) OnlyID(ctx context.Context) (id uuid.UUID, err error) { + var ids []uuid.UUID + if ids, err = lq.Limit(2).IDs(ctx); err != nil { + return + } + switch len(ids) { + case 1: + id = ids[0] + case 0: + err = &NotFoundError{label.Label} + default: + err = &NotSingularError{label.Label} + } + return +} + +// OnlyIDX is like OnlyID, but panics if an error occurs. +func (lq *LabelQuery) OnlyIDX(ctx context.Context) uuid.UUID { + id, err := lq.OnlyID(ctx) + if err != nil { + panic(err) + } + return id +} + +// All executes the query and returns a list of Labels. +func (lq *LabelQuery) All(ctx context.Context) ([]*Label, error) { + if err := lq.prepareQuery(ctx); err != nil { + return nil, err + } + return lq.sqlAll(ctx) +} + +// AllX is like All, but panics if an error occurs. +func (lq *LabelQuery) AllX(ctx context.Context) []*Label { + nodes, err := lq.All(ctx) + if err != nil { + panic(err) + } + return nodes +} + +// IDs executes the query and returns a list of Label IDs. +func (lq *LabelQuery) IDs(ctx context.Context) ([]uuid.UUID, error) { + var ids []uuid.UUID + if err := lq.Select(label.FieldID).Scan(ctx, &ids); err != nil { + return nil, err + } + return ids, nil +} + +// IDsX is like IDs, but panics if an error occurs. +func (lq *LabelQuery) IDsX(ctx context.Context) []uuid.UUID { + ids, err := lq.IDs(ctx) + if err != nil { + panic(err) + } + return ids +} + +// Count returns the count of the given query. +func (lq *LabelQuery) Count(ctx context.Context) (int, error) { + if err := lq.prepareQuery(ctx); err != nil { + return 0, err + } + return lq.sqlCount(ctx) +} + +// CountX is like Count, but panics if an error occurs. +func (lq *LabelQuery) CountX(ctx context.Context) int { + count, err := lq.Count(ctx) + if err != nil { + panic(err) + } + return count +} + +// Exist returns true if the query has elements in the graph. +func (lq *LabelQuery) Exist(ctx context.Context) (bool, error) { + if err := lq.prepareQuery(ctx); err != nil { + return false, err + } + return lq.sqlExist(ctx) +} + +// ExistX is like Exist, but panics if an error occurs. +func (lq *LabelQuery) ExistX(ctx context.Context) bool { + exist, err := lq.Exist(ctx) + if err != nil { + panic(err) + } + return exist +} + +// Clone returns a duplicate of the LabelQuery builder, including all associated steps. It can be +// used to prepare common query builders and use them differently after the clone is made. +func (lq *LabelQuery) Clone() *LabelQuery { + if lq == nil { + return nil + } + return &LabelQuery{ + config: lq.config, + limit: lq.limit, + offset: lq.offset, + order: append([]OrderFunc{}, lq.order...), + predicates: append([]predicate.Label{}, lq.predicates...), + withGroup: lq.withGroup.Clone(), + withItems: lq.withItems.Clone(), + // clone intermediate query. + sql: lq.sql.Clone(), + path: lq.path, + unique: lq.unique, + } +} + +// WithGroup tells the query-builder to eager-load the nodes that are connected to +// the "group" edge. The optional arguments are used to configure the query builder of the edge. +func (lq *LabelQuery) WithGroup(opts ...func(*GroupQuery)) *LabelQuery { + query := &GroupQuery{config: lq.config} + for _, opt := range opts { + opt(query) + } + lq.withGroup = query + return lq +} + +// WithItems tells the query-builder to eager-load the nodes that are connected to +// the "items" edge. The optional arguments are used to configure the query builder of the edge. +func (lq *LabelQuery) WithItems(opts ...func(*ItemQuery)) *LabelQuery { + query := &ItemQuery{config: lq.config} + for _, opt := range opts { + opt(query) + } + lq.withItems = query + return lq +} + +// GroupBy is used to group vertices by one or more fields/columns. +// It is often used with aggregate functions, like: count, max, mean, min, sum. +// +// Example: +// +// var v []struct { +// CreatedAt time.Time `json:"created_at,omitempty"` +// Count int `json:"count,omitempty"` +// } +// +// client.Label.Query(). +// GroupBy(label.FieldCreatedAt). +// Aggregate(ent.Count()). +// Scan(ctx, &v) +func (lq *LabelQuery) GroupBy(field string, fields ...string) *LabelGroupBy { + grbuild := &LabelGroupBy{config: lq.config} + grbuild.fields = append([]string{field}, fields...) + grbuild.path = func(ctx context.Context) (prev *sql.Selector, err error) { + if err := lq.prepareQuery(ctx); err != nil { + return nil, err + } + return lq.sqlQuery(ctx), nil + } + grbuild.label = label.Label + grbuild.flds, grbuild.scan = &grbuild.fields, grbuild.Scan + return grbuild +} + +// Select allows the selection one or more fields/columns for the given query, +// instead of selecting all fields in the entity. +// +// Example: +// +// var v []struct { +// CreatedAt time.Time `json:"created_at,omitempty"` +// } +// +// client.Label.Query(). +// Select(label.FieldCreatedAt). +// Scan(ctx, &v) +func (lq *LabelQuery) Select(fields ...string) *LabelSelect { + lq.fields = append(lq.fields, fields...) + selbuild := &LabelSelect{LabelQuery: lq} + selbuild.label = label.Label + selbuild.flds, selbuild.scan = &lq.fields, selbuild.Scan + return selbuild +} + +func (lq *LabelQuery) prepareQuery(ctx context.Context) error { + for _, f := range lq.fields { + if !label.ValidColumn(f) { + return &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)} + } + } + if lq.path != nil { + prev, err := lq.path(ctx) + if err != nil { + return err + } + lq.sql = prev + } + return nil +} + +func (lq *LabelQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*Label, error) { + var ( + nodes = []*Label{} + withFKs = lq.withFKs + _spec = lq.querySpec() + loadedTypes = [2]bool{ + lq.withGroup != nil, + lq.withItems != nil, + } + ) + if lq.withGroup != nil { + withFKs = true + } + if withFKs { + _spec.Node.Columns = append(_spec.Node.Columns, label.ForeignKeys...) + } + _spec.ScanValues = func(columns []string) ([]interface{}, error) { + return (*Label).scanValues(nil, columns) + } + _spec.Assign = func(columns []string, values []interface{}) error { + node := &Label{config: lq.config} + nodes = append(nodes, node) + node.Edges.loadedTypes = loadedTypes + return node.assignValues(columns, values) + } + for i := range hooks { + hooks[i](ctx, _spec) + } + if err := sqlgraph.QueryNodes(ctx, lq.driver, _spec); err != nil { + return nil, err + } + if len(nodes) == 0 { + return nodes, nil + } + if query := lq.withGroup; query != nil { + if err := lq.loadGroup(ctx, query, nodes, nil, + func(n *Label, e *Group) { n.Edges.Group = e }); err != nil { + return nil, err + } + } + if query := lq.withItems; query != nil { + if err := lq.loadItems(ctx, query, nodes, + func(n *Label) { n.Edges.Items = []*Item{} }, + func(n *Label, e *Item) { n.Edges.Items = append(n.Edges.Items, e) }); err != nil { + return nil, err + } + } + return nodes, nil +} + +func (lq *LabelQuery) loadGroup(ctx context.Context, query *GroupQuery, nodes []*Label, init func(*Label), assign func(*Label, *Group)) error { + ids := make([]uuid.UUID, 0, len(nodes)) + nodeids := make(map[uuid.UUID][]*Label) + for i := range nodes { + if nodes[i].group_labels == nil { + continue + } + fk := *nodes[i].group_labels + if _, ok := nodeids[fk]; !ok { + ids = append(ids, fk) + } + nodeids[fk] = append(nodeids[fk], nodes[i]) + } + query.Where(group.IDIn(ids...)) + neighbors, err := query.All(ctx) + if err != nil { + return err + } + for _, n := range neighbors { + nodes, ok := nodeids[n.ID] + if !ok { + return fmt.Errorf(`unexpected foreign-key "group_labels" returned %v`, n.ID) + } + for i := range nodes { + assign(nodes[i], n) + } + } + return nil +} +func (lq *LabelQuery) loadItems(ctx context.Context, query *ItemQuery, nodes []*Label, init func(*Label), assign func(*Label, *Item)) error { + edgeIDs := make([]driver.Value, len(nodes)) + byID := make(map[uuid.UUID]*Label) + nids := make(map[uuid.UUID]map[*Label]struct{}) + for i, node := range nodes { + edgeIDs[i] = node.ID + byID[node.ID] = node + if init != nil { + init(node) + } + } + query.Where(func(s *sql.Selector) { + joinT := sql.Table(label.ItemsTable) + s.Join(joinT).On(s.C(item.FieldID), joinT.C(label.ItemsPrimaryKey[1])) + s.Where(sql.InValues(joinT.C(label.ItemsPrimaryKey[0]), edgeIDs...)) + columns := s.SelectedColumns() + s.Select(joinT.C(label.ItemsPrimaryKey[0])) + s.AppendSelect(columns...) + s.SetDistinct(false) + }) + if err := query.prepareQuery(ctx); err != nil { + return err + } + neighbors, err := query.sqlAll(ctx, func(_ context.Context, spec *sqlgraph.QuerySpec) { + assign := spec.Assign + values := spec.ScanValues + spec.ScanValues = func(columns []string) ([]interface{}, error) { + values, err := values(columns[1:]) + if err != nil { + return nil, err + } + return append([]interface{}{new(uuid.UUID)}, values...), nil + } + spec.Assign = func(columns []string, values []interface{}) error { + outValue := *values[0].(*uuid.UUID) + inValue := *values[1].(*uuid.UUID) + if nids[inValue] == nil { + nids[inValue] = map[*Label]struct{}{byID[outValue]: struct{}{}} + return assign(columns[1:], values[1:]) + } + nids[inValue][byID[outValue]] = struct{}{} + return nil + } + }) + if err != nil { + return err + } + for _, n := range neighbors { + nodes, ok := nids[n.ID] + if !ok { + return fmt.Errorf(`unexpected "items" node returned %v`, n.ID) + } + for kn := range nodes { + assign(kn, n) + } + } + return nil +} + +func (lq *LabelQuery) sqlCount(ctx context.Context) (int, error) { + _spec := lq.querySpec() + _spec.Node.Columns = lq.fields + if len(lq.fields) > 0 { + _spec.Unique = lq.unique != nil && *lq.unique + } + return sqlgraph.CountNodes(ctx, lq.driver, _spec) +} + +func (lq *LabelQuery) sqlExist(ctx context.Context) (bool, error) { + n, err := lq.sqlCount(ctx) + if err != nil { + return false, fmt.Errorf("ent: check existence: %w", err) + } + return n > 0, nil +} + +func (lq *LabelQuery) querySpec() *sqlgraph.QuerySpec { + _spec := &sqlgraph.QuerySpec{ + Node: &sqlgraph.NodeSpec{ + Table: label.Table, + Columns: label.Columns, + ID: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: label.FieldID, + }, + }, + From: lq.sql, + Unique: true, + } + if unique := lq.unique; unique != nil { + _spec.Unique = *unique + } + if fields := lq.fields; len(fields) > 0 { + _spec.Node.Columns = make([]string, 0, len(fields)) + _spec.Node.Columns = append(_spec.Node.Columns, label.FieldID) + for i := range fields { + if fields[i] != label.FieldID { + _spec.Node.Columns = append(_spec.Node.Columns, fields[i]) + } + } + } + if ps := lq.predicates; len(ps) > 0 { + _spec.Predicate = func(selector *sql.Selector) { + for i := range ps { + ps[i](selector) + } + } + } + if limit := lq.limit; limit != nil { + _spec.Limit = *limit + } + if offset := lq.offset; offset != nil { + _spec.Offset = *offset + } + if ps := lq.order; len(ps) > 0 { + _spec.Order = func(selector *sql.Selector) { + for i := range ps { + ps[i](selector) + } + } + } + return _spec +} + +func (lq *LabelQuery) sqlQuery(ctx context.Context) *sql.Selector { + builder := sql.Dialect(lq.driver.Dialect()) + t1 := builder.Table(label.Table) + columns := lq.fields + if len(columns) == 0 { + columns = label.Columns + } + selector := builder.Select(t1.Columns(columns...)...).From(t1) + if lq.sql != nil { + selector = lq.sql + selector.Select(selector.Columns(columns...)...) + } + if lq.unique != nil && *lq.unique { + selector.Distinct() + } + for _, p := range lq.predicates { + p(selector) + } + for _, p := range lq.order { + p(selector) + } + if offset := lq.offset; offset != nil { + // limit is mandatory for offset clause. We start + // with default value, and override it below if needed. + selector.Offset(*offset).Limit(math.MaxInt32) + } + if limit := lq.limit; limit != nil { + selector.Limit(*limit) + } + return selector +} + +// LabelGroupBy is the group-by builder for Label entities. +type LabelGroupBy struct { + config + selector + fields []string + fns []AggregateFunc + // intermediate query (i.e. traversal path). + sql *sql.Selector + path func(context.Context) (*sql.Selector, error) +} + +// Aggregate adds the given aggregation functions to the group-by query. +func (lgb *LabelGroupBy) Aggregate(fns ...AggregateFunc) *LabelGroupBy { + lgb.fns = append(lgb.fns, fns...) + return lgb +} + +// Scan applies the group-by query and scans the result into the given value. +func (lgb *LabelGroupBy) Scan(ctx context.Context, v interface{}) error { + query, err := lgb.path(ctx) + if err != nil { + return err + } + lgb.sql = query + return lgb.sqlScan(ctx, v) +} + +func (lgb *LabelGroupBy) sqlScan(ctx context.Context, v interface{}) error { + for _, f := range lgb.fields { + if !label.ValidColumn(f) { + return &ValidationError{Name: f, err: fmt.Errorf("invalid field %q for group-by", f)} + } + } + selector := lgb.sqlQuery() + if err := selector.Err(); err != nil { + return err + } + rows := &sql.Rows{} + query, args := selector.Query() + if err := lgb.driver.Query(ctx, query, args, rows); err != nil { + return err + } + defer rows.Close() + return sql.ScanSlice(rows, v) +} + +func (lgb *LabelGroupBy) sqlQuery() *sql.Selector { + selector := lgb.sql.Select() + aggregation := make([]string, 0, len(lgb.fns)) + for _, fn := range lgb.fns { + aggregation = append(aggregation, fn(selector)) + } + // If no columns were selected in a custom aggregation function, the default + // selection is the fields used for "group-by", and the aggregation functions. + if len(selector.SelectedColumns()) == 0 { + columns := make([]string, 0, len(lgb.fields)+len(lgb.fns)) + for _, f := range lgb.fields { + columns = append(columns, selector.C(f)) + } + columns = append(columns, aggregation...) + selector.Select(columns...) + } + return selector.GroupBy(selector.Columns(lgb.fields...)...) +} + +// LabelSelect is the builder for selecting fields of Label entities. +type LabelSelect struct { + *LabelQuery + selector + // intermediate query (i.e. traversal path). + sql *sql.Selector +} + +// Scan applies the selector query and scans the result into the given value. +func (ls *LabelSelect) Scan(ctx context.Context, v interface{}) error { + if err := ls.prepareQuery(ctx); err != nil { + return err + } + ls.sql = ls.LabelQuery.sqlQuery(ctx) + return ls.sqlScan(ctx, v) +} + +func (ls *LabelSelect) sqlScan(ctx context.Context, v interface{}) error { + rows := &sql.Rows{} + query, args := ls.sql.Query() + if err := ls.driver.Query(ctx, query, args, rows); err != nil { + return err + } + defer rows.Close() + return sql.ScanSlice(rows, v) +} diff --git a/backend/ent/label_update.go b/backend/ent/label_update.go new file mode 100644 index 0000000..8135a56 --- /dev/null +++ b/backend/ent/label_update.go @@ -0,0 +1,793 @@ +// Code generated by ent, DO NOT EDIT. + +package ent + +import ( + "context" + "errors" + "fmt" + "time" + + "entgo.io/ent/dialect/sql" + "entgo.io/ent/dialect/sql/sqlgraph" + "entgo.io/ent/schema/field" + "github.com/google/uuid" + "github.com/hay-kot/content/backend/ent/group" + "github.com/hay-kot/content/backend/ent/item" + "github.com/hay-kot/content/backend/ent/label" + "github.com/hay-kot/content/backend/ent/predicate" +) + +// LabelUpdate is the builder for updating Label entities. +type LabelUpdate struct { + config + hooks []Hook + mutation *LabelMutation +} + +// Where appends a list predicates to the LabelUpdate builder. +func (lu *LabelUpdate) Where(ps ...predicate.Label) *LabelUpdate { + lu.mutation.Where(ps...) + return lu +} + +// SetUpdatedAt sets the "updated_at" field. +func (lu *LabelUpdate) SetUpdatedAt(t time.Time) *LabelUpdate { + lu.mutation.SetUpdatedAt(t) + return lu +} + +// SetName sets the "name" field. +func (lu *LabelUpdate) SetName(s string) *LabelUpdate { + lu.mutation.SetName(s) + return lu +} + +// SetDescription sets the "description" field. +func (lu *LabelUpdate) SetDescription(s string) *LabelUpdate { + lu.mutation.SetDescription(s) + return lu +} + +// SetNillableDescription sets the "description" field if the given value is not nil. +func (lu *LabelUpdate) SetNillableDescription(s *string) *LabelUpdate { + if s != nil { + lu.SetDescription(*s) + } + return lu +} + +// ClearDescription clears the value of the "description" field. +func (lu *LabelUpdate) ClearDescription() *LabelUpdate { + lu.mutation.ClearDescription() + return lu +} + +// SetColor sets the "color" field. +func (lu *LabelUpdate) SetColor(s string) *LabelUpdate { + lu.mutation.SetColor(s) + return lu +} + +// SetNillableColor sets the "color" field if the given value is not nil. +func (lu *LabelUpdate) SetNillableColor(s *string) *LabelUpdate { + if s != nil { + lu.SetColor(*s) + } + return lu +} + +// ClearColor clears the value of the "color" field. +func (lu *LabelUpdate) ClearColor() *LabelUpdate { + lu.mutation.ClearColor() + return lu +} + +// SetGroupID sets the "group" edge to the Group entity by ID. +func (lu *LabelUpdate) SetGroupID(id uuid.UUID) *LabelUpdate { + lu.mutation.SetGroupID(id) + return lu +} + +// SetGroup sets the "group" edge to the Group entity. +func (lu *LabelUpdate) SetGroup(g *Group) *LabelUpdate { + return lu.SetGroupID(g.ID) +} + +// AddItemIDs adds the "items" edge to the Item entity by IDs. +func (lu *LabelUpdate) AddItemIDs(ids ...uuid.UUID) *LabelUpdate { + lu.mutation.AddItemIDs(ids...) + return lu +} + +// AddItems adds the "items" edges to the Item entity. +func (lu *LabelUpdate) AddItems(i ...*Item) *LabelUpdate { + ids := make([]uuid.UUID, len(i)) + for j := range i { + ids[j] = i[j].ID + } + return lu.AddItemIDs(ids...) +} + +// Mutation returns the LabelMutation object of the builder. +func (lu *LabelUpdate) Mutation() *LabelMutation { + return lu.mutation +} + +// ClearGroup clears the "group" edge to the Group entity. +func (lu *LabelUpdate) ClearGroup() *LabelUpdate { + lu.mutation.ClearGroup() + return lu +} + +// ClearItems clears all "items" edges to the Item entity. +func (lu *LabelUpdate) ClearItems() *LabelUpdate { + lu.mutation.ClearItems() + return lu +} + +// RemoveItemIDs removes the "items" edge to Item entities by IDs. +func (lu *LabelUpdate) RemoveItemIDs(ids ...uuid.UUID) *LabelUpdate { + lu.mutation.RemoveItemIDs(ids...) + return lu +} + +// RemoveItems removes "items" edges to Item entities. +func (lu *LabelUpdate) RemoveItems(i ...*Item) *LabelUpdate { + ids := make([]uuid.UUID, len(i)) + for j := range i { + ids[j] = i[j].ID + } + return lu.RemoveItemIDs(ids...) +} + +// Save executes the query and returns the number of nodes affected by the update operation. +func (lu *LabelUpdate) Save(ctx context.Context) (int, error) { + var ( + err error + affected int + ) + lu.defaults() + if len(lu.hooks) == 0 { + if err = lu.check(); err != nil { + return 0, err + } + affected, err = lu.sqlSave(ctx) + } else { + var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) { + mutation, ok := m.(*LabelMutation) + if !ok { + return nil, fmt.Errorf("unexpected mutation type %T", m) + } + if err = lu.check(); err != nil { + return 0, err + } + lu.mutation = mutation + affected, err = lu.sqlSave(ctx) + mutation.done = true + return affected, err + }) + for i := len(lu.hooks) - 1; i >= 0; i-- { + if lu.hooks[i] == nil { + return 0, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)") + } + mut = lu.hooks[i](mut) + } + if _, err := mut.Mutate(ctx, lu.mutation); err != nil { + return 0, err + } + } + return affected, err +} + +// SaveX is like Save, but panics if an error occurs. +func (lu *LabelUpdate) SaveX(ctx context.Context) int { + affected, err := lu.Save(ctx) + if err != nil { + panic(err) + } + return affected +} + +// Exec executes the query. +func (lu *LabelUpdate) Exec(ctx context.Context) error { + _, err := lu.Save(ctx) + return err +} + +// ExecX is like Exec, but panics if an error occurs. +func (lu *LabelUpdate) ExecX(ctx context.Context) { + if err := lu.Exec(ctx); err != nil { + panic(err) + } +} + +// defaults sets the default values of the builder before save. +func (lu *LabelUpdate) defaults() { + if _, ok := lu.mutation.UpdatedAt(); !ok { + v := label.UpdateDefaultUpdatedAt() + lu.mutation.SetUpdatedAt(v) + } +} + +// check runs all checks and user-defined validators on the builder. +func (lu *LabelUpdate) check() error { + if v, ok := lu.mutation.Name(); ok { + if err := label.NameValidator(v); err != nil { + return &ValidationError{Name: "name", err: fmt.Errorf(`ent: validator failed for field "Label.name": %w`, err)} + } + } + if v, ok := lu.mutation.Description(); ok { + if err := label.DescriptionValidator(v); err != nil { + return &ValidationError{Name: "description", err: fmt.Errorf(`ent: validator failed for field "Label.description": %w`, err)} + } + } + if v, ok := lu.mutation.Color(); ok { + if err := label.ColorValidator(v); err != nil { + return &ValidationError{Name: "color", err: fmt.Errorf(`ent: validator failed for field "Label.color": %w`, err)} + } + } + if _, ok := lu.mutation.GroupID(); lu.mutation.GroupCleared() && !ok { + return errors.New(`ent: clearing a required unique edge "Label.group"`) + } + return nil +} + +func (lu *LabelUpdate) sqlSave(ctx context.Context) (n int, err error) { + _spec := &sqlgraph.UpdateSpec{ + Node: &sqlgraph.NodeSpec{ + Table: label.Table, + Columns: label.Columns, + ID: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: label.FieldID, + }, + }, + } + if ps := lu.mutation.predicates; len(ps) > 0 { + _spec.Predicate = func(selector *sql.Selector) { + for i := range ps { + ps[i](selector) + } + } + } + if value, ok := lu.mutation.UpdatedAt(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeTime, + Value: value, + Column: label.FieldUpdatedAt, + }) + } + if value, ok := lu.mutation.Name(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: label.FieldName, + }) + } + if value, ok := lu.mutation.Description(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: label.FieldDescription, + }) + } + if lu.mutation.DescriptionCleared() { + _spec.Fields.Clear = append(_spec.Fields.Clear, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Column: label.FieldDescription, + }) + } + if value, ok := lu.mutation.Color(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: label.FieldColor, + }) + } + if lu.mutation.ColorCleared() { + _spec.Fields.Clear = append(_spec.Fields.Clear, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Column: label.FieldColor, + }) + } + if lu.mutation.GroupCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2O, + Inverse: true, + Table: label.GroupTable, + Columns: []string{label.GroupColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: group.FieldID, + }, + }, + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := lu.mutation.GroupIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2O, + Inverse: true, + Table: label.GroupTable, + Columns: []string{label.GroupColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: group.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Add = append(_spec.Edges.Add, edge) + } + if lu.mutation.ItemsCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2M, + Inverse: false, + Table: label.ItemsTable, + Columns: label.ItemsPrimaryKey, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: item.FieldID, + }, + }, + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := lu.mutation.RemovedItemsIDs(); len(nodes) > 0 && !lu.mutation.ItemsCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2M, + Inverse: false, + Table: label.ItemsTable, + Columns: label.ItemsPrimaryKey, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: item.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := lu.mutation.ItemsIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2M, + Inverse: false, + Table: label.ItemsTable, + Columns: label.ItemsPrimaryKey, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: item.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Add = append(_spec.Edges.Add, edge) + } + if n, err = sqlgraph.UpdateNodes(ctx, lu.driver, _spec); err != nil { + if _, ok := err.(*sqlgraph.NotFoundError); ok { + err = &NotFoundError{label.Label} + } else if sqlgraph.IsConstraintError(err) { + err = &ConstraintError{msg: err.Error(), wrap: err} + } + return 0, err + } + return n, nil +} + +// LabelUpdateOne is the builder for updating a single Label entity. +type LabelUpdateOne struct { + config + fields []string + hooks []Hook + mutation *LabelMutation +} + +// SetUpdatedAt sets the "updated_at" field. +func (luo *LabelUpdateOne) SetUpdatedAt(t time.Time) *LabelUpdateOne { + luo.mutation.SetUpdatedAt(t) + return luo +} + +// SetName sets the "name" field. +func (luo *LabelUpdateOne) SetName(s string) *LabelUpdateOne { + luo.mutation.SetName(s) + return luo +} + +// SetDescription sets the "description" field. +func (luo *LabelUpdateOne) SetDescription(s string) *LabelUpdateOne { + luo.mutation.SetDescription(s) + return luo +} + +// SetNillableDescription sets the "description" field if the given value is not nil. +func (luo *LabelUpdateOne) SetNillableDescription(s *string) *LabelUpdateOne { + if s != nil { + luo.SetDescription(*s) + } + return luo +} + +// ClearDescription clears the value of the "description" field. +func (luo *LabelUpdateOne) ClearDescription() *LabelUpdateOne { + luo.mutation.ClearDescription() + return luo +} + +// SetColor sets the "color" field. +func (luo *LabelUpdateOne) SetColor(s string) *LabelUpdateOne { + luo.mutation.SetColor(s) + return luo +} + +// SetNillableColor sets the "color" field if the given value is not nil. +func (luo *LabelUpdateOne) SetNillableColor(s *string) *LabelUpdateOne { + if s != nil { + luo.SetColor(*s) + } + return luo +} + +// ClearColor clears the value of the "color" field. +func (luo *LabelUpdateOne) ClearColor() *LabelUpdateOne { + luo.mutation.ClearColor() + return luo +} + +// SetGroupID sets the "group" edge to the Group entity by ID. +func (luo *LabelUpdateOne) SetGroupID(id uuid.UUID) *LabelUpdateOne { + luo.mutation.SetGroupID(id) + return luo +} + +// SetGroup sets the "group" edge to the Group entity. +func (luo *LabelUpdateOne) SetGroup(g *Group) *LabelUpdateOne { + return luo.SetGroupID(g.ID) +} + +// AddItemIDs adds the "items" edge to the Item entity by IDs. +func (luo *LabelUpdateOne) AddItemIDs(ids ...uuid.UUID) *LabelUpdateOne { + luo.mutation.AddItemIDs(ids...) + return luo +} + +// AddItems adds the "items" edges to the Item entity. +func (luo *LabelUpdateOne) AddItems(i ...*Item) *LabelUpdateOne { + ids := make([]uuid.UUID, len(i)) + for j := range i { + ids[j] = i[j].ID + } + return luo.AddItemIDs(ids...) +} + +// Mutation returns the LabelMutation object of the builder. +func (luo *LabelUpdateOne) Mutation() *LabelMutation { + return luo.mutation +} + +// ClearGroup clears the "group" edge to the Group entity. +func (luo *LabelUpdateOne) ClearGroup() *LabelUpdateOne { + luo.mutation.ClearGroup() + return luo +} + +// ClearItems clears all "items" edges to the Item entity. +func (luo *LabelUpdateOne) ClearItems() *LabelUpdateOne { + luo.mutation.ClearItems() + return luo +} + +// RemoveItemIDs removes the "items" edge to Item entities by IDs. +func (luo *LabelUpdateOne) RemoveItemIDs(ids ...uuid.UUID) *LabelUpdateOne { + luo.mutation.RemoveItemIDs(ids...) + return luo +} + +// RemoveItems removes "items" edges to Item entities. +func (luo *LabelUpdateOne) RemoveItems(i ...*Item) *LabelUpdateOne { + ids := make([]uuid.UUID, len(i)) + for j := range i { + ids[j] = i[j].ID + } + return luo.RemoveItemIDs(ids...) +} + +// Select allows selecting one or more fields (columns) of the returned entity. +// The default is selecting all fields defined in the entity schema. +func (luo *LabelUpdateOne) Select(field string, fields ...string) *LabelUpdateOne { + luo.fields = append([]string{field}, fields...) + return luo +} + +// Save executes the query and returns the updated Label entity. +func (luo *LabelUpdateOne) Save(ctx context.Context) (*Label, error) { + var ( + err error + node *Label + ) + luo.defaults() + if len(luo.hooks) == 0 { + if err = luo.check(); err != nil { + return nil, err + } + node, err = luo.sqlSave(ctx) + } else { + var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) { + mutation, ok := m.(*LabelMutation) + if !ok { + return nil, fmt.Errorf("unexpected mutation type %T", m) + } + if err = luo.check(); err != nil { + return nil, err + } + luo.mutation = mutation + node, err = luo.sqlSave(ctx) + mutation.done = true + return node, err + }) + for i := len(luo.hooks) - 1; i >= 0; i-- { + if luo.hooks[i] == nil { + return nil, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)") + } + mut = luo.hooks[i](mut) + } + v, err := mut.Mutate(ctx, luo.mutation) + if err != nil { + return nil, err + } + nv, ok := v.(*Label) + if !ok { + return nil, fmt.Errorf("unexpected node type %T returned from LabelMutation", v) + } + node = nv + } + return node, err +} + +// SaveX is like Save, but panics if an error occurs. +func (luo *LabelUpdateOne) SaveX(ctx context.Context) *Label { + node, err := luo.Save(ctx) + if err != nil { + panic(err) + } + return node +} + +// Exec executes the query on the entity. +func (luo *LabelUpdateOne) Exec(ctx context.Context) error { + _, err := luo.Save(ctx) + return err +} + +// ExecX is like Exec, but panics if an error occurs. +func (luo *LabelUpdateOne) ExecX(ctx context.Context) { + if err := luo.Exec(ctx); err != nil { + panic(err) + } +} + +// defaults sets the default values of the builder before save. +func (luo *LabelUpdateOne) defaults() { + if _, ok := luo.mutation.UpdatedAt(); !ok { + v := label.UpdateDefaultUpdatedAt() + luo.mutation.SetUpdatedAt(v) + } +} + +// check runs all checks and user-defined validators on the builder. +func (luo *LabelUpdateOne) check() error { + if v, ok := luo.mutation.Name(); ok { + if err := label.NameValidator(v); err != nil { + return &ValidationError{Name: "name", err: fmt.Errorf(`ent: validator failed for field "Label.name": %w`, err)} + } + } + if v, ok := luo.mutation.Description(); ok { + if err := label.DescriptionValidator(v); err != nil { + return &ValidationError{Name: "description", err: fmt.Errorf(`ent: validator failed for field "Label.description": %w`, err)} + } + } + if v, ok := luo.mutation.Color(); ok { + if err := label.ColorValidator(v); err != nil { + return &ValidationError{Name: "color", err: fmt.Errorf(`ent: validator failed for field "Label.color": %w`, err)} + } + } + if _, ok := luo.mutation.GroupID(); luo.mutation.GroupCleared() && !ok { + return errors.New(`ent: clearing a required unique edge "Label.group"`) + } + return nil +} + +func (luo *LabelUpdateOne) sqlSave(ctx context.Context) (_node *Label, err error) { + _spec := &sqlgraph.UpdateSpec{ + Node: &sqlgraph.NodeSpec{ + Table: label.Table, + Columns: label.Columns, + ID: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: label.FieldID, + }, + }, + } + id, ok := luo.mutation.ID() + if !ok { + return nil, &ValidationError{Name: "id", err: errors.New(`ent: missing "Label.id" for update`)} + } + _spec.Node.ID.Value = id + if fields := luo.fields; len(fields) > 0 { + _spec.Node.Columns = make([]string, 0, len(fields)) + _spec.Node.Columns = append(_spec.Node.Columns, label.FieldID) + for _, f := range fields { + if !label.ValidColumn(f) { + return nil, &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)} + } + if f != label.FieldID { + _spec.Node.Columns = append(_spec.Node.Columns, f) + } + } + } + if ps := luo.mutation.predicates; len(ps) > 0 { + _spec.Predicate = func(selector *sql.Selector) { + for i := range ps { + ps[i](selector) + } + } + } + if value, ok := luo.mutation.UpdatedAt(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeTime, + Value: value, + Column: label.FieldUpdatedAt, + }) + } + if value, ok := luo.mutation.Name(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: label.FieldName, + }) + } + if value, ok := luo.mutation.Description(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: label.FieldDescription, + }) + } + if luo.mutation.DescriptionCleared() { + _spec.Fields.Clear = append(_spec.Fields.Clear, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Column: label.FieldDescription, + }) + } + if value, ok := luo.mutation.Color(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: label.FieldColor, + }) + } + if luo.mutation.ColorCleared() { + _spec.Fields.Clear = append(_spec.Fields.Clear, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Column: label.FieldColor, + }) + } + if luo.mutation.GroupCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2O, + Inverse: true, + Table: label.GroupTable, + Columns: []string{label.GroupColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: group.FieldID, + }, + }, + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := luo.mutation.GroupIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2O, + Inverse: true, + Table: label.GroupTable, + Columns: []string{label.GroupColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: group.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Add = append(_spec.Edges.Add, edge) + } + if luo.mutation.ItemsCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2M, + Inverse: false, + Table: label.ItemsTable, + Columns: label.ItemsPrimaryKey, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: item.FieldID, + }, + }, + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := luo.mutation.RemovedItemsIDs(); len(nodes) > 0 && !luo.mutation.ItemsCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2M, + Inverse: false, + Table: label.ItemsTable, + Columns: label.ItemsPrimaryKey, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: item.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := luo.mutation.ItemsIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2M, + Inverse: false, + Table: label.ItemsTable, + Columns: label.ItemsPrimaryKey, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: item.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Add = append(_spec.Edges.Add, edge) + } + _node = &Label{config: luo.config} + _spec.Assign = _node.assignValues + _spec.ScanValues = _node.scanValues + if err = sqlgraph.UpdateNode(ctx, luo.driver, _spec); err != nil { + if _, ok := err.(*sqlgraph.NotFoundError); ok { + err = &NotFoundError{label.Label} + } else if sqlgraph.IsConstraintError(err) { + err = &ConstraintError{msg: err.Error(), wrap: err} + } + return nil, err + } + return _node, nil +} diff --git a/backend/ent/location.go b/backend/ent/location.go new file mode 100644 index 0000000..4222152 --- /dev/null +++ b/backend/ent/location.go @@ -0,0 +1,193 @@ +// Code generated by ent, DO NOT EDIT. + +package ent + +import ( + "fmt" + "strings" + "time" + + "entgo.io/ent/dialect/sql" + "github.com/google/uuid" + "github.com/hay-kot/content/backend/ent/group" + "github.com/hay-kot/content/backend/ent/location" +) + +// Location is the model entity for the Location schema. +type Location struct { + config `json:"-"` + // ID of the ent. + ID uuid.UUID `json:"id,omitempty"` + // CreatedAt holds the value of the "created_at" field. + CreatedAt time.Time `json:"created_at,omitempty"` + // UpdatedAt holds the value of the "updated_at" field. + UpdatedAt time.Time `json:"updated_at,omitempty"` + // Name holds the value of the "name" field. + Name string `json:"name,omitempty"` + // Description holds the value of the "description" field. + Description string `json:"description,omitempty"` + // Edges holds the relations/edges for other nodes in the graph. + // The values are being populated by the LocationQuery when eager-loading is set. + Edges LocationEdges `json:"edges"` + group_locations *uuid.UUID +} + +// LocationEdges holds the relations/edges for other nodes in the graph. +type LocationEdges struct { + // Group holds the value of the group edge. + Group *Group `json:"group,omitempty"` + // Items holds the value of the items edge. + Items []*Item `json:"items,omitempty"` + // loadedTypes holds the information for reporting if a + // type was loaded (or requested) in eager-loading or not. + loadedTypes [2]bool +} + +// GroupOrErr returns the Group value or an error if the edge +// was not loaded in eager-loading, or loaded but was not found. +func (e LocationEdges) GroupOrErr() (*Group, error) { + if e.loadedTypes[0] { + if e.Group == nil { + // Edge was loaded but was not found. + return nil, &NotFoundError{label: group.Label} + } + return e.Group, nil + } + return nil, &NotLoadedError{edge: "group"} +} + +// ItemsOrErr returns the Items value or an error if the edge +// was not loaded in eager-loading. +func (e LocationEdges) ItemsOrErr() ([]*Item, error) { + if e.loadedTypes[1] { + return e.Items, nil + } + return nil, &NotLoadedError{edge: "items"} +} + +// scanValues returns the types for scanning values from sql.Rows. +func (*Location) scanValues(columns []string) ([]interface{}, error) { + values := make([]interface{}, len(columns)) + for i := range columns { + switch columns[i] { + case location.FieldName, location.FieldDescription: + values[i] = new(sql.NullString) + case location.FieldCreatedAt, location.FieldUpdatedAt: + values[i] = new(sql.NullTime) + case location.FieldID: + values[i] = new(uuid.UUID) + case location.ForeignKeys[0]: // group_locations + values[i] = &sql.NullScanner{S: new(uuid.UUID)} + default: + return nil, fmt.Errorf("unexpected column %q for type Location", columns[i]) + } + } + return values, nil +} + +// assignValues assigns the values that were returned from sql.Rows (after scanning) +// to the Location fields. +func (l *Location) assignValues(columns []string, values []interface{}) error { + if m, n := len(values), len(columns); m < n { + return fmt.Errorf("mismatch number of scan values: %d != %d", m, n) + } + for i := range columns { + switch columns[i] { + case location.FieldID: + if value, ok := values[i].(*uuid.UUID); !ok { + return fmt.Errorf("unexpected type %T for field id", values[i]) + } else if value != nil { + l.ID = *value + } + case location.FieldCreatedAt: + if value, ok := values[i].(*sql.NullTime); !ok { + return fmt.Errorf("unexpected type %T for field created_at", values[i]) + } else if value.Valid { + l.CreatedAt = value.Time + } + case location.FieldUpdatedAt: + if value, ok := values[i].(*sql.NullTime); !ok { + return fmt.Errorf("unexpected type %T for field updated_at", values[i]) + } else if value.Valid { + l.UpdatedAt = value.Time + } + case location.FieldName: + if value, ok := values[i].(*sql.NullString); !ok { + return fmt.Errorf("unexpected type %T for field name", values[i]) + } else if value.Valid { + l.Name = value.String + } + case location.FieldDescription: + if value, ok := values[i].(*sql.NullString); !ok { + return fmt.Errorf("unexpected type %T for field description", values[i]) + } else if value.Valid { + l.Description = value.String + } + case location.ForeignKeys[0]: + if value, ok := values[i].(*sql.NullScanner); !ok { + return fmt.Errorf("unexpected type %T for field group_locations", values[i]) + } else if value.Valid { + l.group_locations = new(uuid.UUID) + *l.group_locations = *value.S.(*uuid.UUID) + } + } + } + return nil +} + +// QueryGroup queries the "group" edge of the Location entity. +func (l *Location) QueryGroup() *GroupQuery { + return (&LocationClient{config: l.config}).QueryGroup(l) +} + +// QueryItems queries the "items" edge of the Location entity. +func (l *Location) QueryItems() *ItemQuery { + return (&LocationClient{config: l.config}).QueryItems(l) +} + +// Update returns a builder for updating this Location. +// Note that you need to call Location.Unwrap() before calling this method if this Location +// was returned from a transaction, and the transaction was committed or rolled back. +func (l *Location) Update() *LocationUpdateOne { + return (&LocationClient{config: l.config}).UpdateOne(l) +} + +// Unwrap unwraps the Location entity that was returned from a transaction after it was closed, +// so that all future queries will be executed through the driver which created the transaction. +func (l *Location) Unwrap() *Location { + _tx, ok := l.config.driver.(*txDriver) + if !ok { + panic("ent: Location is not a transactional entity") + } + l.config.driver = _tx.drv + return l +} + +// String implements the fmt.Stringer. +func (l *Location) String() string { + var builder strings.Builder + builder.WriteString("Location(") + builder.WriteString(fmt.Sprintf("id=%v, ", l.ID)) + builder.WriteString("created_at=") + builder.WriteString(l.CreatedAt.Format(time.ANSIC)) + builder.WriteString(", ") + builder.WriteString("updated_at=") + builder.WriteString(l.UpdatedAt.Format(time.ANSIC)) + builder.WriteString(", ") + builder.WriteString("name=") + builder.WriteString(l.Name) + builder.WriteString(", ") + builder.WriteString("description=") + builder.WriteString(l.Description) + builder.WriteByte(')') + return builder.String() +} + +// Locations is a parsable slice of Location. +type Locations []*Location + +func (l Locations) config(cfg config) { + for _i := range l { + l[_i].config = cfg + } +} diff --git a/backend/ent/location/location.go b/backend/ent/location/location.go new file mode 100644 index 0000000..322658e --- /dev/null +++ b/backend/ent/location/location.go @@ -0,0 +1,89 @@ +// Code generated by ent, DO NOT EDIT. + +package location + +import ( + "time" + + "github.com/google/uuid" +) + +const ( + // Label holds the string label denoting the location type in the database. + Label = "location" + // FieldID holds the string denoting the id field in the database. + FieldID = "id" + // FieldCreatedAt holds the string denoting the created_at field in the database. + FieldCreatedAt = "created_at" + // FieldUpdatedAt holds the string denoting the updated_at field in the database. + FieldUpdatedAt = "updated_at" + // FieldName holds the string denoting the name field in the database. + FieldName = "name" + // FieldDescription holds the string denoting the description field in the database. + FieldDescription = "description" + // EdgeGroup holds the string denoting the group edge name in mutations. + EdgeGroup = "group" + // EdgeItems holds the string denoting the items edge name in mutations. + EdgeItems = "items" + // Table holds the table name of the location in the database. + Table = "locations" + // GroupTable is the table that holds the group relation/edge. + GroupTable = "locations" + // GroupInverseTable is the table name for the Group entity. + // It exists in this package in order to avoid circular dependency with the "group" package. + GroupInverseTable = "groups" + // GroupColumn is the table column denoting the group relation/edge. + GroupColumn = "group_locations" + // ItemsTable is the table that holds the items relation/edge. + ItemsTable = "items" + // ItemsInverseTable is the table name for the Item entity. + // It exists in this package in order to avoid circular dependency with the "item" package. + ItemsInverseTable = "items" + // ItemsColumn is the table column denoting the items relation/edge. + ItemsColumn = "location_items" +) + +// Columns holds all SQL columns for location fields. +var Columns = []string{ + FieldID, + FieldCreatedAt, + FieldUpdatedAt, + FieldName, + FieldDescription, +} + +// ForeignKeys holds the SQL foreign-keys that are owned by the "locations" +// table and are not defined as standalone fields in the schema. +var ForeignKeys = []string{ + "group_locations", +} + +// ValidColumn reports if the column name is valid (part of the table columns). +func ValidColumn(column string) bool { + for i := range Columns { + if column == Columns[i] { + return true + } + } + for i := range ForeignKeys { + if column == ForeignKeys[i] { + return true + } + } + return false +} + +var ( + // DefaultCreatedAt holds the default value on creation for the "created_at" field. + DefaultCreatedAt func() time.Time + // DefaultUpdatedAt holds the default value on creation for the "updated_at" field. + DefaultUpdatedAt func() time.Time + // UpdateDefaultUpdatedAt holds the default value on update for the "updated_at" field. + UpdateDefaultUpdatedAt func() time.Time + // NameValidator is a validator for the "name" field. It is called by the builders before save. + NameValidator func(string) error + // DescriptionValidator is a validator for the "description" field. It is called by the builders before save. + DescriptionValidator func(string) error + // DefaultID holds the default value on creation for the "id" field. + DefaultID func() uuid.UUID +) diff --git a/backend/ent/location/where.go b/backend/ent/location/where.go new file mode 100644 index 0000000..625a038 --- /dev/null +++ b/backend/ent/location/where.go @@ -0,0 +1,539 @@ +// Code generated by ent, DO NOT EDIT. + +package location + +import ( + "time" + + "entgo.io/ent/dialect/sql" + "entgo.io/ent/dialect/sql/sqlgraph" + "github.com/google/uuid" + "github.com/hay-kot/content/backend/ent/predicate" +) + +// ID filters vertices based on their ID field. +func ID(id uuid.UUID) predicate.Location { + return predicate.Location(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldID), id)) + }) +} + +// IDEQ applies the EQ predicate on the ID field. +func IDEQ(id uuid.UUID) predicate.Location { + return predicate.Location(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldID), id)) + }) +} + +// IDNEQ applies the NEQ predicate on the ID field. +func IDNEQ(id uuid.UUID) predicate.Location { + return predicate.Location(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldID), id)) + }) +} + +// IDIn applies the In predicate on the ID field. +func IDIn(ids ...uuid.UUID) predicate.Location { + return predicate.Location(func(s *sql.Selector) { + v := make([]interface{}, len(ids)) + for i := range v { + v[i] = ids[i] + } + s.Where(sql.In(s.C(FieldID), v...)) + }) +} + +// IDNotIn applies the NotIn predicate on the ID field. +func IDNotIn(ids ...uuid.UUID) predicate.Location { + return predicate.Location(func(s *sql.Selector) { + v := make([]interface{}, len(ids)) + for i := range v { + v[i] = ids[i] + } + s.Where(sql.NotIn(s.C(FieldID), v...)) + }) +} + +// IDGT applies the GT predicate on the ID field. +func IDGT(id uuid.UUID) predicate.Location { + return predicate.Location(func(s *sql.Selector) { + s.Where(sql.GT(s.C(FieldID), id)) + }) +} + +// IDGTE applies the GTE predicate on the ID field. +func IDGTE(id uuid.UUID) predicate.Location { + return predicate.Location(func(s *sql.Selector) { + s.Where(sql.GTE(s.C(FieldID), id)) + }) +} + +// IDLT applies the LT predicate on the ID field. +func IDLT(id uuid.UUID) predicate.Location { + return predicate.Location(func(s *sql.Selector) { + s.Where(sql.LT(s.C(FieldID), id)) + }) +} + +// IDLTE applies the LTE predicate on the ID field. +func IDLTE(id uuid.UUID) predicate.Location { + return predicate.Location(func(s *sql.Selector) { + s.Where(sql.LTE(s.C(FieldID), id)) + }) +} + +// CreatedAt applies equality check predicate on the "created_at" field. It's identical to CreatedAtEQ. +func CreatedAt(v time.Time) predicate.Location { + return predicate.Location(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldCreatedAt), v)) + }) +} + +// UpdatedAt applies equality check predicate on the "updated_at" field. It's identical to UpdatedAtEQ. +func UpdatedAt(v time.Time) predicate.Location { + return predicate.Location(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldUpdatedAt), v)) + }) +} + +// Name applies equality check predicate on the "name" field. It's identical to NameEQ. +func Name(v string) predicate.Location { + return predicate.Location(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldName), v)) + }) +} + +// Description applies equality check predicate on the "description" field. It's identical to DescriptionEQ. +func Description(v string) predicate.Location { + return predicate.Location(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldDescription), v)) + }) +} + +// CreatedAtEQ applies the EQ predicate on the "created_at" field. +func CreatedAtEQ(v time.Time) predicate.Location { + return predicate.Location(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldCreatedAt), v)) + }) +} + +// CreatedAtNEQ applies the NEQ predicate on the "created_at" field. +func CreatedAtNEQ(v time.Time) predicate.Location { + return predicate.Location(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldCreatedAt), v)) + }) +} + +// CreatedAtIn applies the In predicate on the "created_at" field. +func CreatedAtIn(vs ...time.Time) predicate.Location { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Location(func(s *sql.Selector) { + s.Where(sql.In(s.C(FieldCreatedAt), v...)) + }) +} + +// CreatedAtNotIn applies the NotIn predicate on the "created_at" field. +func CreatedAtNotIn(vs ...time.Time) predicate.Location { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Location(func(s *sql.Selector) { + s.Where(sql.NotIn(s.C(FieldCreatedAt), v...)) + }) +} + +// CreatedAtGT applies the GT predicate on the "created_at" field. +func CreatedAtGT(v time.Time) predicate.Location { + return predicate.Location(func(s *sql.Selector) { + s.Where(sql.GT(s.C(FieldCreatedAt), v)) + }) +} + +// CreatedAtGTE applies the GTE predicate on the "created_at" field. +func CreatedAtGTE(v time.Time) predicate.Location { + return predicate.Location(func(s *sql.Selector) { + s.Where(sql.GTE(s.C(FieldCreatedAt), v)) + }) +} + +// CreatedAtLT applies the LT predicate on the "created_at" field. +func CreatedAtLT(v time.Time) predicate.Location { + return predicate.Location(func(s *sql.Selector) { + s.Where(sql.LT(s.C(FieldCreatedAt), v)) + }) +} + +// CreatedAtLTE applies the LTE predicate on the "created_at" field. +func CreatedAtLTE(v time.Time) predicate.Location { + return predicate.Location(func(s *sql.Selector) { + s.Where(sql.LTE(s.C(FieldCreatedAt), v)) + }) +} + +// UpdatedAtEQ applies the EQ predicate on the "updated_at" field. +func UpdatedAtEQ(v time.Time) predicate.Location { + return predicate.Location(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldUpdatedAt), v)) + }) +} + +// UpdatedAtNEQ applies the NEQ predicate on the "updated_at" field. +func UpdatedAtNEQ(v time.Time) predicate.Location { + return predicate.Location(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldUpdatedAt), v)) + }) +} + +// UpdatedAtIn applies the In predicate on the "updated_at" field. +func UpdatedAtIn(vs ...time.Time) predicate.Location { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Location(func(s *sql.Selector) { + s.Where(sql.In(s.C(FieldUpdatedAt), v...)) + }) +} + +// UpdatedAtNotIn applies the NotIn predicate on the "updated_at" field. +func UpdatedAtNotIn(vs ...time.Time) predicate.Location { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Location(func(s *sql.Selector) { + s.Where(sql.NotIn(s.C(FieldUpdatedAt), v...)) + }) +} + +// UpdatedAtGT applies the GT predicate on the "updated_at" field. +func UpdatedAtGT(v time.Time) predicate.Location { + return predicate.Location(func(s *sql.Selector) { + s.Where(sql.GT(s.C(FieldUpdatedAt), v)) + }) +} + +// UpdatedAtGTE applies the GTE predicate on the "updated_at" field. +func UpdatedAtGTE(v time.Time) predicate.Location { + return predicate.Location(func(s *sql.Selector) { + s.Where(sql.GTE(s.C(FieldUpdatedAt), v)) + }) +} + +// UpdatedAtLT applies the LT predicate on the "updated_at" field. +func UpdatedAtLT(v time.Time) predicate.Location { + return predicate.Location(func(s *sql.Selector) { + s.Where(sql.LT(s.C(FieldUpdatedAt), v)) + }) +} + +// UpdatedAtLTE applies the LTE predicate on the "updated_at" field. +func UpdatedAtLTE(v time.Time) predicate.Location { + return predicate.Location(func(s *sql.Selector) { + s.Where(sql.LTE(s.C(FieldUpdatedAt), v)) + }) +} + +// NameEQ applies the EQ predicate on the "name" field. +func NameEQ(v string) predicate.Location { + return predicate.Location(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldName), v)) + }) +} + +// NameNEQ applies the NEQ predicate on the "name" field. +func NameNEQ(v string) predicate.Location { + return predicate.Location(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldName), v)) + }) +} + +// NameIn applies the In predicate on the "name" field. +func NameIn(vs ...string) predicate.Location { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Location(func(s *sql.Selector) { + s.Where(sql.In(s.C(FieldName), v...)) + }) +} + +// NameNotIn applies the NotIn predicate on the "name" field. +func NameNotIn(vs ...string) predicate.Location { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Location(func(s *sql.Selector) { + s.Where(sql.NotIn(s.C(FieldName), v...)) + }) +} + +// NameGT applies the GT predicate on the "name" field. +func NameGT(v string) predicate.Location { + return predicate.Location(func(s *sql.Selector) { + s.Where(sql.GT(s.C(FieldName), v)) + }) +} + +// NameGTE applies the GTE predicate on the "name" field. +func NameGTE(v string) predicate.Location { + return predicate.Location(func(s *sql.Selector) { + s.Where(sql.GTE(s.C(FieldName), v)) + }) +} + +// NameLT applies the LT predicate on the "name" field. +func NameLT(v string) predicate.Location { + return predicate.Location(func(s *sql.Selector) { + s.Where(sql.LT(s.C(FieldName), v)) + }) +} + +// NameLTE applies the LTE predicate on the "name" field. +func NameLTE(v string) predicate.Location { + return predicate.Location(func(s *sql.Selector) { + s.Where(sql.LTE(s.C(FieldName), v)) + }) +} + +// NameContains applies the Contains predicate on the "name" field. +func NameContains(v string) predicate.Location { + return predicate.Location(func(s *sql.Selector) { + s.Where(sql.Contains(s.C(FieldName), v)) + }) +} + +// NameHasPrefix applies the HasPrefix predicate on the "name" field. +func NameHasPrefix(v string) predicate.Location { + return predicate.Location(func(s *sql.Selector) { + s.Where(sql.HasPrefix(s.C(FieldName), v)) + }) +} + +// NameHasSuffix applies the HasSuffix predicate on the "name" field. +func NameHasSuffix(v string) predicate.Location { + return predicate.Location(func(s *sql.Selector) { + s.Where(sql.HasSuffix(s.C(FieldName), v)) + }) +} + +// NameEqualFold applies the EqualFold predicate on the "name" field. +func NameEqualFold(v string) predicate.Location { + return predicate.Location(func(s *sql.Selector) { + s.Where(sql.EqualFold(s.C(FieldName), v)) + }) +} + +// NameContainsFold applies the ContainsFold predicate on the "name" field. +func NameContainsFold(v string) predicate.Location { + return predicate.Location(func(s *sql.Selector) { + s.Where(sql.ContainsFold(s.C(FieldName), v)) + }) +} + +// DescriptionEQ applies the EQ predicate on the "description" field. +func DescriptionEQ(v string) predicate.Location { + return predicate.Location(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldDescription), v)) + }) +} + +// DescriptionNEQ applies the NEQ predicate on the "description" field. +func DescriptionNEQ(v string) predicate.Location { + return predicate.Location(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldDescription), v)) + }) +} + +// DescriptionIn applies the In predicate on the "description" field. +func DescriptionIn(vs ...string) predicate.Location { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Location(func(s *sql.Selector) { + s.Where(sql.In(s.C(FieldDescription), v...)) + }) +} + +// DescriptionNotIn applies the NotIn predicate on the "description" field. +func DescriptionNotIn(vs ...string) predicate.Location { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Location(func(s *sql.Selector) { + s.Where(sql.NotIn(s.C(FieldDescription), v...)) + }) +} + +// DescriptionGT applies the GT predicate on the "description" field. +func DescriptionGT(v string) predicate.Location { + return predicate.Location(func(s *sql.Selector) { + s.Where(sql.GT(s.C(FieldDescription), v)) + }) +} + +// DescriptionGTE applies the GTE predicate on the "description" field. +func DescriptionGTE(v string) predicate.Location { + return predicate.Location(func(s *sql.Selector) { + s.Where(sql.GTE(s.C(FieldDescription), v)) + }) +} + +// DescriptionLT applies the LT predicate on the "description" field. +func DescriptionLT(v string) predicate.Location { + return predicate.Location(func(s *sql.Selector) { + s.Where(sql.LT(s.C(FieldDescription), v)) + }) +} + +// DescriptionLTE applies the LTE predicate on the "description" field. +func DescriptionLTE(v string) predicate.Location { + return predicate.Location(func(s *sql.Selector) { + s.Where(sql.LTE(s.C(FieldDescription), v)) + }) +} + +// DescriptionContains applies the Contains predicate on the "description" field. +func DescriptionContains(v string) predicate.Location { + return predicate.Location(func(s *sql.Selector) { + s.Where(sql.Contains(s.C(FieldDescription), v)) + }) +} + +// DescriptionHasPrefix applies the HasPrefix predicate on the "description" field. +func DescriptionHasPrefix(v string) predicate.Location { + return predicate.Location(func(s *sql.Selector) { + s.Where(sql.HasPrefix(s.C(FieldDescription), v)) + }) +} + +// DescriptionHasSuffix applies the HasSuffix predicate on the "description" field. +func DescriptionHasSuffix(v string) predicate.Location { + return predicate.Location(func(s *sql.Selector) { + s.Where(sql.HasSuffix(s.C(FieldDescription), v)) + }) +} + +// DescriptionIsNil applies the IsNil predicate on the "description" field. +func DescriptionIsNil() predicate.Location { + return predicate.Location(func(s *sql.Selector) { + s.Where(sql.IsNull(s.C(FieldDescription))) + }) +} + +// DescriptionNotNil applies the NotNil predicate on the "description" field. +func DescriptionNotNil() predicate.Location { + return predicate.Location(func(s *sql.Selector) { + s.Where(sql.NotNull(s.C(FieldDescription))) + }) +} + +// DescriptionEqualFold applies the EqualFold predicate on the "description" field. +func DescriptionEqualFold(v string) predicate.Location { + return predicate.Location(func(s *sql.Selector) { + s.Where(sql.EqualFold(s.C(FieldDescription), v)) + }) +} + +// DescriptionContainsFold applies the ContainsFold predicate on the "description" field. +func DescriptionContainsFold(v string) predicate.Location { + return predicate.Location(func(s *sql.Selector) { + s.Where(sql.ContainsFold(s.C(FieldDescription), v)) + }) +} + +// HasGroup applies the HasEdge predicate on the "group" edge. +func HasGroup() predicate.Location { + return predicate.Location(func(s *sql.Selector) { + step := sqlgraph.NewStep( + sqlgraph.From(Table, FieldID), + sqlgraph.To(GroupTable, FieldID), + sqlgraph.Edge(sqlgraph.M2O, true, GroupTable, GroupColumn), + ) + sqlgraph.HasNeighbors(s, step) + }) +} + +// HasGroupWith applies the HasEdge predicate on the "group" edge with a given conditions (other predicates). +func HasGroupWith(preds ...predicate.Group) predicate.Location { + return predicate.Location(func(s *sql.Selector) { + step := sqlgraph.NewStep( + sqlgraph.From(Table, FieldID), + sqlgraph.To(GroupInverseTable, FieldID), + sqlgraph.Edge(sqlgraph.M2O, true, GroupTable, GroupColumn), + ) + sqlgraph.HasNeighborsWith(s, step, func(s *sql.Selector) { + for _, p := range preds { + p(s) + } + }) + }) +} + +// HasItems applies the HasEdge predicate on the "items" edge. +func HasItems() predicate.Location { + return predicate.Location(func(s *sql.Selector) { + step := sqlgraph.NewStep( + sqlgraph.From(Table, FieldID), + sqlgraph.To(ItemsTable, FieldID), + sqlgraph.Edge(sqlgraph.O2M, false, ItemsTable, ItemsColumn), + ) + sqlgraph.HasNeighbors(s, step) + }) +} + +// HasItemsWith applies the HasEdge predicate on the "items" edge with a given conditions (other predicates). +func HasItemsWith(preds ...predicate.Item) predicate.Location { + return predicate.Location(func(s *sql.Selector) { + step := sqlgraph.NewStep( + sqlgraph.From(Table, FieldID), + sqlgraph.To(ItemsInverseTable, FieldID), + sqlgraph.Edge(sqlgraph.O2M, false, ItemsTable, ItemsColumn), + ) + sqlgraph.HasNeighborsWith(s, step, func(s *sql.Selector) { + for _, p := range preds { + p(s) + } + }) + }) +} + +// And groups predicates with the AND operator between them. +func And(predicates ...predicate.Location) predicate.Location { + return predicate.Location(func(s *sql.Selector) { + s1 := s.Clone().SetP(nil) + for _, p := range predicates { + p(s1) + } + s.Where(s1.P()) + }) +} + +// Or groups predicates with the OR operator between them. +func Or(predicates ...predicate.Location) predicate.Location { + return predicate.Location(func(s *sql.Selector) { + s1 := s.Clone().SetP(nil) + for i, p := range predicates { + if i > 0 { + s1.Or() + } + p(s1) + } + s.Where(s1.P()) + }) +} + +// Not applies the not operator on the given predicate. +func Not(p predicate.Location) predicate.Location { + return predicate.Location(func(s *sql.Selector) { + p(s.Not()) + }) +} diff --git a/backend/ent/location_create.go b/backend/ent/location_create.go new file mode 100644 index 0000000..a468ce9 --- /dev/null +++ b/backend/ent/location_create.go @@ -0,0 +1,417 @@ +// Code generated by ent, DO NOT EDIT. + +package ent + +import ( + "context" + "errors" + "fmt" + "time" + + "entgo.io/ent/dialect/sql/sqlgraph" + "entgo.io/ent/schema/field" + "github.com/google/uuid" + "github.com/hay-kot/content/backend/ent/group" + "github.com/hay-kot/content/backend/ent/item" + "github.com/hay-kot/content/backend/ent/location" +) + +// LocationCreate is the builder for creating a Location entity. +type LocationCreate struct { + config + mutation *LocationMutation + hooks []Hook +} + +// SetCreatedAt sets the "created_at" field. +func (lc *LocationCreate) SetCreatedAt(t time.Time) *LocationCreate { + lc.mutation.SetCreatedAt(t) + return lc +} + +// SetNillableCreatedAt sets the "created_at" field if the given value is not nil. +func (lc *LocationCreate) SetNillableCreatedAt(t *time.Time) *LocationCreate { + if t != nil { + lc.SetCreatedAt(*t) + } + return lc +} + +// SetUpdatedAt sets the "updated_at" field. +func (lc *LocationCreate) SetUpdatedAt(t time.Time) *LocationCreate { + lc.mutation.SetUpdatedAt(t) + return lc +} + +// SetNillableUpdatedAt sets the "updated_at" field if the given value is not nil. +func (lc *LocationCreate) SetNillableUpdatedAt(t *time.Time) *LocationCreate { + if t != nil { + lc.SetUpdatedAt(*t) + } + return lc +} + +// SetName sets the "name" field. +func (lc *LocationCreate) SetName(s string) *LocationCreate { + lc.mutation.SetName(s) + return lc +} + +// SetDescription sets the "description" field. +func (lc *LocationCreate) SetDescription(s string) *LocationCreate { + lc.mutation.SetDescription(s) + return lc +} + +// SetNillableDescription sets the "description" field if the given value is not nil. +func (lc *LocationCreate) SetNillableDescription(s *string) *LocationCreate { + if s != nil { + lc.SetDescription(*s) + } + return lc +} + +// SetID sets the "id" field. +func (lc *LocationCreate) SetID(u uuid.UUID) *LocationCreate { + lc.mutation.SetID(u) + return lc +} + +// SetNillableID sets the "id" field if the given value is not nil. +func (lc *LocationCreate) SetNillableID(u *uuid.UUID) *LocationCreate { + if u != nil { + lc.SetID(*u) + } + return lc +} + +// SetGroupID sets the "group" edge to the Group entity by ID. +func (lc *LocationCreate) SetGroupID(id uuid.UUID) *LocationCreate { + lc.mutation.SetGroupID(id) + return lc +} + +// SetGroup sets the "group" edge to the Group entity. +func (lc *LocationCreate) SetGroup(g *Group) *LocationCreate { + return lc.SetGroupID(g.ID) +} + +// AddItemIDs adds the "items" edge to the Item entity by IDs. +func (lc *LocationCreate) AddItemIDs(ids ...uuid.UUID) *LocationCreate { + lc.mutation.AddItemIDs(ids...) + return lc +} + +// AddItems adds the "items" edges to the Item entity. +func (lc *LocationCreate) AddItems(i ...*Item) *LocationCreate { + ids := make([]uuid.UUID, len(i)) + for j := range i { + ids[j] = i[j].ID + } + return lc.AddItemIDs(ids...) +} + +// Mutation returns the LocationMutation object of the builder. +func (lc *LocationCreate) Mutation() *LocationMutation { + return lc.mutation +} + +// Save creates the Location in the database. +func (lc *LocationCreate) Save(ctx context.Context) (*Location, error) { + var ( + err error + node *Location + ) + lc.defaults() + if len(lc.hooks) == 0 { + if err = lc.check(); err != nil { + return nil, err + } + node, err = lc.sqlSave(ctx) + } else { + var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) { + mutation, ok := m.(*LocationMutation) + if !ok { + return nil, fmt.Errorf("unexpected mutation type %T", m) + } + if err = lc.check(); err != nil { + return nil, err + } + lc.mutation = mutation + if node, err = lc.sqlSave(ctx); err != nil { + return nil, err + } + mutation.id = &node.ID + mutation.done = true + return node, err + }) + for i := len(lc.hooks) - 1; i >= 0; i-- { + if lc.hooks[i] == nil { + return nil, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)") + } + mut = lc.hooks[i](mut) + } + v, err := mut.Mutate(ctx, lc.mutation) + if err != nil { + return nil, err + } + nv, ok := v.(*Location) + if !ok { + return nil, fmt.Errorf("unexpected node type %T returned from LocationMutation", v) + } + node = nv + } + return node, err +} + +// SaveX calls Save and panics if Save returns an error. +func (lc *LocationCreate) SaveX(ctx context.Context) *Location { + v, err := lc.Save(ctx) + if err != nil { + panic(err) + } + return v +} + +// Exec executes the query. +func (lc *LocationCreate) Exec(ctx context.Context) error { + _, err := lc.Save(ctx) + return err +} + +// ExecX is like Exec, but panics if an error occurs. +func (lc *LocationCreate) ExecX(ctx context.Context) { + if err := lc.Exec(ctx); err != nil { + panic(err) + } +} + +// defaults sets the default values of the builder before save. +func (lc *LocationCreate) defaults() { + if _, ok := lc.mutation.CreatedAt(); !ok { + v := location.DefaultCreatedAt() + lc.mutation.SetCreatedAt(v) + } + if _, ok := lc.mutation.UpdatedAt(); !ok { + v := location.DefaultUpdatedAt() + lc.mutation.SetUpdatedAt(v) + } + if _, ok := lc.mutation.ID(); !ok { + v := location.DefaultID() + lc.mutation.SetID(v) + } +} + +// check runs all checks and user-defined validators on the builder. +func (lc *LocationCreate) check() error { + if _, ok := lc.mutation.CreatedAt(); !ok { + return &ValidationError{Name: "created_at", err: errors.New(`ent: missing required field "Location.created_at"`)} + } + if _, ok := lc.mutation.UpdatedAt(); !ok { + return &ValidationError{Name: "updated_at", err: errors.New(`ent: missing required field "Location.updated_at"`)} + } + if _, ok := lc.mutation.Name(); !ok { + return &ValidationError{Name: "name", err: errors.New(`ent: missing required field "Location.name"`)} + } + if v, ok := lc.mutation.Name(); ok { + if err := location.NameValidator(v); err != nil { + return &ValidationError{Name: "name", err: fmt.Errorf(`ent: validator failed for field "Location.name": %w`, err)} + } + } + if v, ok := lc.mutation.Description(); ok { + if err := location.DescriptionValidator(v); err != nil { + return &ValidationError{Name: "description", err: fmt.Errorf(`ent: validator failed for field "Location.description": %w`, err)} + } + } + if _, ok := lc.mutation.GroupID(); !ok { + return &ValidationError{Name: "group", err: errors.New(`ent: missing required edge "Location.group"`)} + } + return nil +} + +func (lc *LocationCreate) sqlSave(ctx context.Context) (*Location, error) { + _node, _spec := lc.createSpec() + if err := sqlgraph.CreateNode(ctx, lc.driver, _spec); err != nil { + if sqlgraph.IsConstraintError(err) { + err = &ConstraintError{msg: err.Error(), wrap: err} + } + return nil, err + } + if _spec.ID.Value != nil { + if id, ok := _spec.ID.Value.(*uuid.UUID); ok { + _node.ID = *id + } else if err := _node.ID.Scan(_spec.ID.Value); err != nil { + return nil, err + } + } + return _node, nil +} + +func (lc *LocationCreate) createSpec() (*Location, *sqlgraph.CreateSpec) { + var ( + _node = &Location{config: lc.config} + _spec = &sqlgraph.CreateSpec{ + Table: location.Table, + ID: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: location.FieldID, + }, + } + ) + if id, ok := lc.mutation.ID(); ok { + _node.ID = id + _spec.ID.Value = &id + } + if value, ok := lc.mutation.CreatedAt(); ok { + _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ + Type: field.TypeTime, + Value: value, + Column: location.FieldCreatedAt, + }) + _node.CreatedAt = value + } + if value, ok := lc.mutation.UpdatedAt(); ok { + _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ + Type: field.TypeTime, + Value: value, + Column: location.FieldUpdatedAt, + }) + _node.UpdatedAt = value + } + if value, ok := lc.mutation.Name(); ok { + _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: location.FieldName, + }) + _node.Name = value + } + if value, ok := lc.mutation.Description(); ok { + _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: location.FieldDescription, + }) + _node.Description = value + } + if nodes := lc.mutation.GroupIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2O, + Inverse: true, + Table: location.GroupTable, + Columns: []string{location.GroupColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: group.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _node.group_locations = &nodes[0] + _spec.Edges = append(_spec.Edges, edge) + } + if nodes := lc.mutation.ItemsIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: location.ItemsTable, + Columns: []string{location.ItemsColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: item.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges = append(_spec.Edges, edge) + } + return _node, _spec +} + +// LocationCreateBulk is the builder for creating many Location entities in bulk. +type LocationCreateBulk struct { + config + builders []*LocationCreate +} + +// Save creates the Location entities in the database. +func (lcb *LocationCreateBulk) Save(ctx context.Context) ([]*Location, error) { + specs := make([]*sqlgraph.CreateSpec, len(lcb.builders)) + nodes := make([]*Location, len(lcb.builders)) + mutators := make([]Mutator, len(lcb.builders)) + for i := range lcb.builders { + func(i int, root context.Context) { + builder := lcb.builders[i] + builder.defaults() + var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) { + mutation, ok := m.(*LocationMutation) + if !ok { + return nil, fmt.Errorf("unexpected mutation type %T", m) + } + if err := builder.check(); err != nil { + return nil, err + } + builder.mutation = mutation + nodes[i], specs[i] = builder.createSpec() + var err error + if i < len(mutators)-1 { + _, err = mutators[i+1].Mutate(root, lcb.builders[i+1].mutation) + } else { + spec := &sqlgraph.BatchCreateSpec{Nodes: specs} + // Invoke the actual operation on the latest mutation in the chain. + if err = sqlgraph.BatchCreate(ctx, lcb.driver, spec); err != nil { + if sqlgraph.IsConstraintError(err) { + err = &ConstraintError{msg: err.Error(), wrap: err} + } + } + } + if err != nil { + return nil, err + } + mutation.id = &nodes[i].ID + mutation.done = true + return nodes[i], nil + }) + for i := len(builder.hooks) - 1; i >= 0; i-- { + mut = builder.hooks[i](mut) + } + mutators[i] = mut + }(i, ctx) + } + if len(mutators) > 0 { + if _, err := mutators[0].Mutate(ctx, lcb.builders[0].mutation); err != nil { + return nil, err + } + } + return nodes, nil +} + +// SaveX is like Save, but panics if an error occurs. +func (lcb *LocationCreateBulk) SaveX(ctx context.Context) []*Location { + v, err := lcb.Save(ctx) + if err != nil { + panic(err) + } + return v +} + +// Exec executes the query. +func (lcb *LocationCreateBulk) Exec(ctx context.Context) error { + _, err := lcb.Save(ctx) + return err +} + +// ExecX is like Exec, but panics if an error occurs. +func (lcb *LocationCreateBulk) ExecX(ctx context.Context) { + if err := lcb.Exec(ctx); err != nil { + panic(err) + } +} diff --git a/backend/ent/location_delete.go b/backend/ent/location_delete.go new file mode 100644 index 0000000..5ec513b --- /dev/null +++ b/backend/ent/location_delete.go @@ -0,0 +1,115 @@ +// Code generated by ent, DO NOT EDIT. + +package ent + +import ( + "context" + "fmt" + + "entgo.io/ent/dialect/sql" + "entgo.io/ent/dialect/sql/sqlgraph" + "entgo.io/ent/schema/field" + "github.com/hay-kot/content/backend/ent/location" + "github.com/hay-kot/content/backend/ent/predicate" +) + +// LocationDelete is the builder for deleting a Location entity. +type LocationDelete struct { + config + hooks []Hook + mutation *LocationMutation +} + +// Where appends a list predicates to the LocationDelete builder. +func (ld *LocationDelete) Where(ps ...predicate.Location) *LocationDelete { + ld.mutation.Where(ps...) + return ld +} + +// Exec executes the deletion query and returns how many vertices were deleted. +func (ld *LocationDelete) Exec(ctx context.Context) (int, error) { + var ( + err error + affected int + ) + if len(ld.hooks) == 0 { + affected, err = ld.sqlExec(ctx) + } else { + var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) { + mutation, ok := m.(*LocationMutation) + if !ok { + return nil, fmt.Errorf("unexpected mutation type %T", m) + } + ld.mutation = mutation + affected, err = ld.sqlExec(ctx) + mutation.done = true + return affected, err + }) + for i := len(ld.hooks) - 1; i >= 0; i-- { + if ld.hooks[i] == nil { + return 0, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)") + } + mut = ld.hooks[i](mut) + } + if _, err := mut.Mutate(ctx, ld.mutation); err != nil { + return 0, err + } + } + return affected, err +} + +// ExecX is like Exec, but panics if an error occurs. +func (ld *LocationDelete) ExecX(ctx context.Context) int { + n, err := ld.Exec(ctx) + if err != nil { + panic(err) + } + return n +} + +func (ld *LocationDelete) sqlExec(ctx context.Context) (int, error) { + _spec := &sqlgraph.DeleteSpec{ + Node: &sqlgraph.NodeSpec{ + Table: location.Table, + ID: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: location.FieldID, + }, + }, + } + if ps := ld.mutation.predicates; len(ps) > 0 { + _spec.Predicate = func(selector *sql.Selector) { + for i := range ps { + ps[i](selector) + } + } + } + affected, err := sqlgraph.DeleteNodes(ctx, ld.driver, _spec) + if err != nil && sqlgraph.IsConstraintError(err) { + err = &ConstraintError{msg: err.Error(), wrap: err} + } + return affected, err +} + +// LocationDeleteOne is the builder for deleting a single Location entity. +type LocationDeleteOne struct { + ld *LocationDelete +} + +// Exec executes the deletion query. +func (ldo *LocationDeleteOne) Exec(ctx context.Context) error { + n, err := ldo.ld.Exec(ctx) + switch { + case err != nil: + return err + case n == 0: + return &NotFoundError{location.Label} + default: + return nil + } +} + +// ExecX is like Exec, but panics if an error occurs. +func (ldo *LocationDeleteOne) ExecX(ctx context.Context) { + ldo.ld.ExecX(ctx) +} diff --git a/backend/ent/location_query.go b/backend/ent/location_query.go new file mode 100644 index 0000000..447ed9f --- /dev/null +++ b/backend/ent/location_query.go @@ -0,0 +1,687 @@ +// Code generated by ent, DO NOT EDIT. + +package ent + +import ( + "context" + "database/sql/driver" + "fmt" + "math" + + "entgo.io/ent/dialect/sql" + "entgo.io/ent/dialect/sql/sqlgraph" + "entgo.io/ent/schema/field" + "github.com/google/uuid" + "github.com/hay-kot/content/backend/ent/group" + "github.com/hay-kot/content/backend/ent/item" + "github.com/hay-kot/content/backend/ent/location" + "github.com/hay-kot/content/backend/ent/predicate" +) + +// LocationQuery is the builder for querying Location entities. +type LocationQuery struct { + config + limit *int + offset *int + unique *bool + order []OrderFunc + fields []string + predicates []predicate.Location + withGroup *GroupQuery + withItems *ItemQuery + withFKs bool + // intermediate query (i.e. traversal path). + sql *sql.Selector + path func(context.Context) (*sql.Selector, error) +} + +// Where adds a new predicate for the LocationQuery builder. +func (lq *LocationQuery) Where(ps ...predicate.Location) *LocationQuery { + lq.predicates = append(lq.predicates, ps...) + return lq +} + +// Limit adds a limit step to the query. +func (lq *LocationQuery) Limit(limit int) *LocationQuery { + lq.limit = &limit + return lq +} + +// Offset adds an offset step to the query. +func (lq *LocationQuery) Offset(offset int) *LocationQuery { + lq.offset = &offset + return lq +} + +// Unique configures the query builder to filter duplicate records on query. +// By default, unique is set to true, and can be disabled using this method. +func (lq *LocationQuery) Unique(unique bool) *LocationQuery { + lq.unique = &unique + return lq +} + +// Order adds an order step to the query. +func (lq *LocationQuery) Order(o ...OrderFunc) *LocationQuery { + lq.order = append(lq.order, o...) + return lq +} + +// QueryGroup chains the current query on the "group" edge. +func (lq *LocationQuery) QueryGroup() *GroupQuery { + query := &GroupQuery{config: lq.config} + query.path = func(ctx context.Context) (fromU *sql.Selector, err error) { + if err := lq.prepareQuery(ctx); err != nil { + return nil, err + } + selector := lq.sqlQuery(ctx) + if err := selector.Err(); err != nil { + return nil, err + } + step := sqlgraph.NewStep( + sqlgraph.From(location.Table, location.FieldID, selector), + sqlgraph.To(group.Table, group.FieldID), + sqlgraph.Edge(sqlgraph.M2O, true, location.GroupTable, location.GroupColumn), + ) + fromU = sqlgraph.SetNeighbors(lq.driver.Dialect(), step) + return fromU, nil + } + return query +} + +// QueryItems chains the current query on the "items" edge. +func (lq *LocationQuery) QueryItems() *ItemQuery { + query := &ItemQuery{config: lq.config} + query.path = func(ctx context.Context) (fromU *sql.Selector, err error) { + if err := lq.prepareQuery(ctx); err != nil { + return nil, err + } + selector := lq.sqlQuery(ctx) + if err := selector.Err(); err != nil { + return nil, err + } + step := sqlgraph.NewStep( + sqlgraph.From(location.Table, location.FieldID, selector), + sqlgraph.To(item.Table, item.FieldID), + sqlgraph.Edge(sqlgraph.O2M, false, location.ItemsTable, location.ItemsColumn), + ) + fromU = sqlgraph.SetNeighbors(lq.driver.Dialect(), step) + return fromU, nil + } + return query +} + +// First returns the first Location entity from the query. +// Returns a *NotFoundError when no Location was found. +func (lq *LocationQuery) First(ctx context.Context) (*Location, error) { + nodes, err := lq.Limit(1).All(ctx) + if err != nil { + return nil, err + } + if len(nodes) == 0 { + return nil, &NotFoundError{location.Label} + } + return nodes[0], nil +} + +// FirstX is like First, but panics if an error occurs. +func (lq *LocationQuery) FirstX(ctx context.Context) *Location { + node, err := lq.First(ctx) + if err != nil && !IsNotFound(err) { + panic(err) + } + return node +} + +// FirstID returns the first Location ID from the query. +// Returns a *NotFoundError when no Location ID was found. +func (lq *LocationQuery) FirstID(ctx context.Context) (id uuid.UUID, err error) { + var ids []uuid.UUID + if ids, err = lq.Limit(1).IDs(ctx); err != nil { + return + } + if len(ids) == 0 { + err = &NotFoundError{location.Label} + return + } + return ids[0], nil +} + +// FirstIDX is like FirstID, but panics if an error occurs. +func (lq *LocationQuery) FirstIDX(ctx context.Context) uuid.UUID { + id, err := lq.FirstID(ctx) + if err != nil && !IsNotFound(err) { + panic(err) + } + return id +} + +// Only returns a single Location entity found by the query, ensuring it only returns one. +// Returns a *NotSingularError when more than one Location entity is found. +// Returns a *NotFoundError when no Location entities are found. +func (lq *LocationQuery) Only(ctx context.Context) (*Location, error) { + nodes, err := lq.Limit(2).All(ctx) + if err != nil { + return nil, err + } + switch len(nodes) { + case 1: + return nodes[0], nil + case 0: + return nil, &NotFoundError{location.Label} + default: + return nil, &NotSingularError{location.Label} + } +} + +// OnlyX is like Only, but panics if an error occurs. +func (lq *LocationQuery) OnlyX(ctx context.Context) *Location { + node, err := lq.Only(ctx) + if err != nil { + panic(err) + } + return node +} + +// OnlyID is like Only, but returns the only Location ID in the query. +// Returns a *NotSingularError when more than one Location ID is found. +// Returns a *NotFoundError when no entities are found. +func (lq *LocationQuery) OnlyID(ctx context.Context) (id uuid.UUID, err error) { + var ids []uuid.UUID + if ids, err = lq.Limit(2).IDs(ctx); err != nil { + return + } + switch len(ids) { + case 1: + id = ids[0] + case 0: + err = &NotFoundError{location.Label} + default: + err = &NotSingularError{location.Label} + } + return +} + +// OnlyIDX is like OnlyID, but panics if an error occurs. +func (lq *LocationQuery) OnlyIDX(ctx context.Context) uuid.UUID { + id, err := lq.OnlyID(ctx) + if err != nil { + panic(err) + } + return id +} + +// All executes the query and returns a list of Locations. +func (lq *LocationQuery) All(ctx context.Context) ([]*Location, error) { + if err := lq.prepareQuery(ctx); err != nil { + return nil, err + } + return lq.sqlAll(ctx) +} + +// AllX is like All, but panics if an error occurs. +func (lq *LocationQuery) AllX(ctx context.Context) []*Location { + nodes, err := lq.All(ctx) + if err != nil { + panic(err) + } + return nodes +} + +// IDs executes the query and returns a list of Location IDs. +func (lq *LocationQuery) IDs(ctx context.Context) ([]uuid.UUID, error) { + var ids []uuid.UUID + if err := lq.Select(location.FieldID).Scan(ctx, &ids); err != nil { + return nil, err + } + return ids, nil +} + +// IDsX is like IDs, but panics if an error occurs. +func (lq *LocationQuery) IDsX(ctx context.Context) []uuid.UUID { + ids, err := lq.IDs(ctx) + if err != nil { + panic(err) + } + return ids +} + +// Count returns the count of the given query. +func (lq *LocationQuery) Count(ctx context.Context) (int, error) { + if err := lq.prepareQuery(ctx); err != nil { + return 0, err + } + return lq.sqlCount(ctx) +} + +// CountX is like Count, but panics if an error occurs. +func (lq *LocationQuery) CountX(ctx context.Context) int { + count, err := lq.Count(ctx) + if err != nil { + panic(err) + } + return count +} + +// Exist returns true if the query has elements in the graph. +func (lq *LocationQuery) Exist(ctx context.Context) (bool, error) { + if err := lq.prepareQuery(ctx); err != nil { + return false, err + } + return lq.sqlExist(ctx) +} + +// ExistX is like Exist, but panics if an error occurs. +func (lq *LocationQuery) ExistX(ctx context.Context) bool { + exist, err := lq.Exist(ctx) + if err != nil { + panic(err) + } + return exist +} + +// Clone returns a duplicate of the LocationQuery builder, including all associated steps. It can be +// used to prepare common query builders and use them differently after the clone is made. +func (lq *LocationQuery) Clone() *LocationQuery { + if lq == nil { + return nil + } + return &LocationQuery{ + config: lq.config, + limit: lq.limit, + offset: lq.offset, + order: append([]OrderFunc{}, lq.order...), + predicates: append([]predicate.Location{}, lq.predicates...), + withGroup: lq.withGroup.Clone(), + withItems: lq.withItems.Clone(), + // clone intermediate query. + sql: lq.sql.Clone(), + path: lq.path, + unique: lq.unique, + } +} + +// WithGroup tells the query-builder to eager-load the nodes that are connected to +// the "group" edge. The optional arguments are used to configure the query builder of the edge. +func (lq *LocationQuery) WithGroup(opts ...func(*GroupQuery)) *LocationQuery { + query := &GroupQuery{config: lq.config} + for _, opt := range opts { + opt(query) + } + lq.withGroup = query + return lq +} + +// WithItems tells the query-builder to eager-load the nodes that are connected to +// the "items" edge. The optional arguments are used to configure the query builder of the edge. +func (lq *LocationQuery) WithItems(opts ...func(*ItemQuery)) *LocationQuery { + query := &ItemQuery{config: lq.config} + for _, opt := range opts { + opt(query) + } + lq.withItems = query + return lq +} + +// GroupBy is used to group vertices by one or more fields/columns. +// It is often used with aggregate functions, like: count, max, mean, min, sum. +// +// Example: +// +// var v []struct { +// CreatedAt time.Time `json:"created_at,omitempty"` +// Count int `json:"count,omitempty"` +// } +// +// client.Location.Query(). +// GroupBy(location.FieldCreatedAt). +// Aggregate(ent.Count()). +// Scan(ctx, &v) +func (lq *LocationQuery) GroupBy(field string, fields ...string) *LocationGroupBy { + grbuild := &LocationGroupBy{config: lq.config} + grbuild.fields = append([]string{field}, fields...) + grbuild.path = func(ctx context.Context) (prev *sql.Selector, err error) { + if err := lq.prepareQuery(ctx); err != nil { + return nil, err + } + return lq.sqlQuery(ctx), nil + } + grbuild.label = location.Label + grbuild.flds, grbuild.scan = &grbuild.fields, grbuild.Scan + return grbuild +} + +// Select allows the selection one or more fields/columns for the given query, +// instead of selecting all fields in the entity. +// +// Example: +// +// var v []struct { +// CreatedAt time.Time `json:"created_at,omitempty"` +// } +// +// client.Location.Query(). +// Select(location.FieldCreatedAt). +// Scan(ctx, &v) +func (lq *LocationQuery) Select(fields ...string) *LocationSelect { + lq.fields = append(lq.fields, fields...) + selbuild := &LocationSelect{LocationQuery: lq} + selbuild.label = location.Label + selbuild.flds, selbuild.scan = &lq.fields, selbuild.Scan + return selbuild +} + +func (lq *LocationQuery) prepareQuery(ctx context.Context) error { + for _, f := range lq.fields { + if !location.ValidColumn(f) { + return &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)} + } + } + if lq.path != nil { + prev, err := lq.path(ctx) + if err != nil { + return err + } + lq.sql = prev + } + return nil +} + +func (lq *LocationQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*Location, error) { + var ( + nodes = []*Location{} + withFKs = lq.withFKs + _spec = lq.querySpec() + loadedTypes = [2]bool{ + lq.withGroup != nil, + lq.withItems != nil, + } + ) + if lq.withGroup != nil { + withFKs = true + } + if withFKs { + _spec.Node.Columns = append(_spec.Node.Columns, location.ForeignKeys...) + } + _spec.ScanValues = func(columns []string) ([]interface{}, error) { + return (*Location).scanValues(nil, columns) + } + _spec.Assign = func(columns []string, values []interface{}) error { + node := &Location{config: lq.config} + nodes = append(nodes, node) + node.Edges.loadedTypes = loadedTypes + return node.assignValues(columns, values) + } + for i := range hooks { + hooks[i](ctx, _spec) + } + if err := sqlgraph.QueryNodes(ctx, lq.driver, _spec); err != nil { + return nil, err + } + if len(nodes) == 0 { + return nodes, nil + } + if query := lq.withGroup; query != nil { + if err := lq.loadGroup(ctx, query, nodes, nil, + func(n *Location, e *Group) { n.Edges.Group = e }); err != nil { + return nil, err + } + } + if query := lq.withItems; query != nil { + if err := lq.loadItems(ctx, query, nodes, + func(n *Location) { n.Edges.Items = []*Item{} }, + func(n *Location, e *Item) { n.Edges.Items = append(n.Edges.Items, e) }); err != nil { + return nil, err + } + } + return nodes, nil +} + +func (lq *LocationQuery) loadGroup(ctx context.Context, query *GroupQuery, nodes []*Location, init func(*Location), assign func(*Location, *Group)) error { + ids := make([]uuid.UUID, 0, len(nodes)) + nodeids := make(map[uuid.UUID][]*Location) + for i := range nodes { + if nodes[i].group_locations == nil { + continue + } + fk := *nodes[i].group_locations + if _, ok := nodeids[fk]; !ok { + ids = append(ids, fk) + } + nodeids[fk] = append(nodeids[fk], nodes[i]) + } + query.Where(group.IDIn(ids...)) + neighbors, err := query.All(ctx) + if err != nil { + return err + } + for _, n := range neighbors { + nodes, ok := nodeids[n.ID] + if !ok { + return fmt.Errorf(`unexpected foreign-key "group_locations" returned %v`, n.ID) + } + for i := range nodes { + assign(nodes[i], n) + } + } + return nil +} +func (lq *LocationQuery) loadItems(ctx context.Context, query *ItemQuery, nodes []*Location, init func(*Location), assign func(*Location, *Item)) error { + fks := make([]driver.Value, 0, len(nodes)) + nodeids := make(map[uuid.UUID]*Location) + for i := range nodes { + fks = append(fks, nodes[i].ID) + nodeids[nodes[i].ID] = nodes[i] + if init != nil { + init(nodes[i]) + } + } + query.withFKs = true + query.Where(predicate.Item(func(s *sql.Selector) { + s.Where(sql.InValues(location.ItemsColumn, fks...)) + })) + neighbors, err := query.All(ctx) + if err != nil { + return err + } + for _, n := range neighbors { + fk := n.location_items + if fk == nil { + return fmt.Errorf(`foreign-key "location_items" is nil for node %v`, n.ID) + } + node, ok := nodeids[*fk] + if !ok { + return fmt.Errorf(`unexpected foreign-key "location_items" returned %v for node %v`, *fk, n.ID) + } + assign(node, n) + } + return nil +} + +func (lq *LocationQuery) sqlCount(ctx context.Context) (int, error) { + _spec := lq.querySpec() + _spec.Node.Columns = lq.fields + if len(lq.fields) > 0 { + _spec.Unique = lq.unique != nil && *lq.unique + } + return sqlgraph.CountNodes(ctx, lq.driver, _spec) +} + +func (lq *LocationQuery) sqlExist(ctx context.Context) (bool, error) { + n, err := lq.sqlCount(ctx) + if err != nil { + return false, fmt.Errorf("ent: check existence: %w", err) + } + return n > 0, nil +} + +func (lq *LocationQuery) querySpec() *sqlgraph.QuerySpec { + _spec := &sqlgraph.QuerySpec{ + Node: &sqlgraph.NodeSpec{ + Table: location.Table, + Columns: location.Columns, + ID: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: location.FieldID, + }, + }, + From: lq.sql, + Unique: true, + } + if unique := lq.unique; unique != nil { + _spec.Unique = *unique + } + if fields := lq.fields; len(fields) > 0 { + _spec.Node.Columns = make([]string, 0, len(fields)) + _spec.Node.Columns = append(_spec.Node.Columns, location.FieldID) + for i := range fields { + if fields[i] != location.FieldID { + _spec.Node.Columns = append(_spec.Node.Columns, fields[i]) + } + } + } + if ps := lq.predicates; len(ps) > 0 { + _spec.Predicate = func(selector *sql.Selector) { + for i := range ps { + ps[i](selector) + } + } + } + if limit := lq.limit; limit != nil { + _spec.Limit = *limit + } + if offset := lq.offset; offset != nil { + _spec.Offset = *offset + } + if ps := lq.order; len(ps) > 0 { + _spec.Order = func(selector *sql.Selector) { + for i := range ps { + ps[i](selector) + } + } + } + return _spec +} + +func (lq *LocationQuery) sqlQuery(ctx context.Context) *sql.Selector { + builder := sql.Dialect(lq.driver.Dialect()) + t1 := builder.Table(location.Table) + columns := lq.fields + if len(columns) == 0 { + columns = location.Columns + } + selector := builder.Select(t1.Columns(columns...)...).From(t1) + if lq.sql != nil { + selector = lq.sql + selector.Select(selector.Columns(columns...)...) + } + if lq.unique != nil && *lq.unique { + selector.Distinct() + } + for _, p := range lq.predicates { + p(selector) + } + for _, p := range lq.order { + p(selector) + } + if offset := lq.offset; offset != nil { + // limit is mandatory for offset clause. We start + // with default value, and override it below if needed. + selector.Offset(*offset).Limit(math.MaxInt32) + } + if limit := lq.limit; limit != nil { + selector.Limit(*limit) + } + return selector +} + +// LocationGroupBy is the group-by builder for Location entities. +type LocationGroupBy struct { + config + selector + fields []string + fns []AggregateFunc + // intermediate query (i.e. traversal path). + sql *sql.Selector + path func(context.Context) (*sql.Selector, error) +} + +// Aggregate adds the given aggregation functions to the group-by query. +func (lgb *LocationGroupBy) Aggregate(fns ...AggregateFunc) *LocationGroupBy { + lgb.fns = append(lgb.fns, fns...) + return lgb +} + +// Scan applies the group-by query and scans the result into the given value. +func (lgb *LocationGroupBy) Scan(ctx context.Context, v interface{}) error { + query, err := lgb.path(ctx) + if err != nil { + return err + } + lgb.sql = query + return lgb.sqlScan(ctx, v) +} + +func (lgb *LocationGroupBy) sqlScan(ctx context.Context, v interface{}) error { + for _, f := range lgb.fields { + if !location.ValidColumn(f) { + return &ValidationError{Name: f, err: fmt.Errorf("invalid field %q for group-by", f)} + } + } + selector := lgb.sqlQuery() + if err := selector.Err(); err != nil { + return err + } + rows := &sql.Rows{} + query, args := selector.Query() + if err := lgb.driver.Query(ctx, query, args, rows); err != nil { + return err + } + defer rows.Close() + return sql.ScanSlice(rows, v) +} + +func (lgb *LocationGroupBy) sqlQuery() *sql.Selector { + selector := lgb.sql.Select() + aggregation := make([]string, 0, len(lgb.fns)) + for _, fn := range lgb.fns { + aggregation = append(aggregation, fn(selector)) + } + // If no columns were selected in a custom aggregation function, the default + // selection is the fields used for "group-by", and the aggregation functions. + if len(selector.SelectedColumns()) == 0 { + columns := make([]string, 0, len(lgb.fields)+len(lgb.fns)) + for _, f := range lgb.fields { + columns = append(columns, selector.C(f)) + } + columns = append(columns, aggregation...) + selector.Select(columns...) + } + return selector.GroupBy(selector.Columns(lgb.fields...)...) +} + +// LocationSelect is the builder for selecting fields of Location entities. +type LocationSelect struct { + *LocationQuery + selector + // intermediate query (i.e. traversal path). + sql *sql.Selector +} + +// Scan applies the selector query and scans the result into the given value. +func (ls *LocationSelect) Scan(ctx context.Context, v interface{}) error { + if err := ls.prepareQuery(ctx); err != nil { + return err + } + ls.sql = ls.LocationQuery.sqlQuery(ctx) + return ls.sqlScan(ctx, v) +} + +func (ls *LocationSelect) sqlScan(ctx context.Context, v interface{}) error { + rows := &sql.Rows{} + query, args := ls.sql.Query() + if err := ls.driver.Query(ctx, query, args, rows); err != nil { + return err + } + defer rows.Close() + return sql.ScanSlice(rows, v) +} diff --git a/backend/ent/location_update.go b/backend/ent/location_update.go new file mode 100644 index 0000000..0506558 --- /dev/null +++ b/backend/ent/location_update.go @@ -0,0 +1,717 @@ +// Code generated by ent, DO NOT EDIT. + +package ent + +import ( + "context" + "errors" + "fmt" + "time" + + "entgo.io/ent/dialect/sql" + "entgo.io/ent/dialect/sql/sqlgraph" + "entgo.io/ent/schema/field" + "github.com/google/uuid" + "github.com/hay-kot/content/backend/ent/group" + "github.com/hay-kot/content/backend/ent/item" + "github.com/hay-kot/content/backend/ent/location" + "github.com/hay-kot/content/backend/ent/predicate" +) + +// LocationUpdate is the builder for updating Location entities. +type LocationUpdate struct { + config + hooks []Hook + mutation *LocationMutation +} + +// Where appends a list predicates to the LocationUpdate builder. +func (lu *LocationUpdate) Where(ps ...predicate.Location) *LocationUpdate { + lu.mutation.Where(ps...) + return lu +} + +// SetUpdatedAt sets the "updated_at" field. +func (lu *LocationUpdate) SetUpdatedAt(t time.Time) *LocationUpdate { + lu.mutation.SetUpdatedAt(t) + return lu +} + +// SetName sets the "name" field. +func (lu *LocationUpdate) SetName(s string) *LocationUpdate { + lu.mutation.SetName(s) + return lu +} + +// SetDescription sets the "description" field. +func (lu *LocationUpdate) SetDescription(s string) *LocationUpdate { + lu.mutation.SetDescription(s) + return lu +} + +// SetNillableDescription sets the "description" field if the given value is not nil. +func (lu *LocationUpdate) SetNillableDescription(s *string) *LocationUpdate { + if s != nil { + lu.SetDescription(*s) + } + return lu +} + +// ClearDescription clears the value of the "description" field. +func (lu *LocationUpdate) ClearDescription() *LocationUpdate { + lu.mutation.ClearDescription() + return lu +} + +// SetGroupID sets the "group" edge to the Group entity by ID. +func (lu *LocationUpdate) SetGroupID(id uuid.UUID) *LocationUpdate { + lu.mutation.SetGroupID(id) + return lu +} + +// SetGroup sets the "group" edge to the Group entity. +func (lu *LocationUpdate) SetGroup(g *Group) *LocationUpdate { + return lu.SetGroupID(g.ID) +} + +// AddItemIDs adds the "items" edge to the Item entity by IDs. +func (lu *LocationUpdate) AddItemIDs(ids ...uuid.UUID) *LocationUpdate { + lu.mutation.AddItemIDs(ids...) + return lu +} + +// AddItems adds the "items" edges to the Item entity. +func (lu *LocationUpdate) AddItems(i ...*Item) *LocationUpdate { + ids := make([]uuid.UUID, len(i)) + for j := range i { + ids[j] = i[j].ID + } + return lu.AddItemIDs(ids...) +} + +// Mutation returns the LocationMutation object of the builder. +func (lu *LocationUpdate) Mutation() *LocationMutation { + return lu.mutation +} + +// ClearGroup clears the "group" edge to the Group entity. +func (lu *LocationUpdate) ClearGroup() *LocationUpdate { + lu.mutation.ClearGroup() + return lu +} + +// ClearItems clears all "items" edges to the Item entity. +func (lu *LocationUpdate) ClearItems() *LocationUpdate { + lu.mutation.ClearItems() + return lu +} + +// RemoveItemIDs removes the "items" edge to Item entities by IDs. +func (lu *LocationUpdate) RemoveItemIDs(ids ...uuid.UUID) *LocationUpdate { + lu.mutation.RemoveItemIDs(ids...) + return lu +} + +// RemoveItems removes "items" edges to Item entities. +func (lu *LocationUpdate) RemoveItems(i ...*Item) *LocationUpdate { + ids := make([]uuid.UUID, len(i)) + for j := range i { + ids[j] = i[j].ID + } + return lu.RemoveItemIDs(ids...) +} + +// Save executes the query and returns the number of nodes affected by the update operation. +func (lu *LocationUpdate) Save(ctx context.Context) (int, error) { + var ( + err error + affected int + ) + lu.defaults() + if len(lu.hooks) == 0 { + if err = lu.check(); err != nil { + return 0, err + } + affected, err = lu.sqlSave(ctx) + } else { + var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) { + mutation, ok := m.(*LocationMutation) + if !ok { + return nil, fmt.Errorf("unexpected mutation type %T", m) + } + if err = lu.check(); err != nil { + return 0, err + } + lu.mutation = mutation + affected, err = lu.sqlSave(ctx) + mutation.done = true + return affected, err + }) + for i := len(lu.hooks) - 1; i >= 0; i-- { + if lu.hooks[i] == nil { + return 0, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)") + } + mut = lu.hooks[i](mut) + } + if _, err := mut.Mutate(ctx, lu.mutation); err != nil { + return 0, err + } + } + return affected, err +} + +// SaveX is like Save, but panics if an error occurs. +func (lu *LocationUpdate) SaveX(ctx context.Context) int { + affected, err := lu.Save(ctx) + if err != nil { + panic(err) + } + return affected +} + +// Exec executes the query. +func (lu *LocationUpdate) Exec(ctx context.Context) error { + _, err := lu.Save(ctx) + return err +} + +// ExecX is like Exec, but panics if an error occurs. +func (lu *LocationUpdate) ExecX(ctx context.Context) { + if err := lu.Exec(ctx); err != nil { + panic(err) + } +} + +// defaults sets the default values of the builder before save. +func (lu *LocationUpdate) defaults() { + if _, ok := lu.mutation.UpdatedAt(); !ok { + v := location.UpdateDefaultUpdatedAt() + lu.mutation.SetUpdatedAt(v) + } +} + +// check runs all checks and user-defined validators on the builder. +func (lu *LocationUpdate) check() error { + if v, ok := lu.mutation.Name(); ok { + if err := location.NameValidator(v); err != nil { + return &ValidationError{Name: "name", err: fmt.Errorf(`ent: validator failed for field "Location.name": %w`, err)} + } + } + if v, ok := lu.mutation.Description(); ok { + if err := location.DescriptionValidator(v); err != nil { + return &ValidationError{Name: "description", err: fmt.Errorf(`ent: validator failed for field "Location.description": %w`, err)} + } + } + if _, ok := lu.mutation.GroupID(); lu.mutation.GroupCleared() && !ok { + return errors.New(`ent: clearing a required unique edge "Location.group"`) + } + return nil +} + +func (lu *LocationUpdate) sqlSave(ctx context.Context) (n int, err error) { + _spec := &sqlgraph.UpdateSpec{ + Node: &sqlgraph.NodeSpec{ + Table: location.Table, + Columns: location.Columns, + ID: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: location.FieldID, + }, + }, + } + if ps := lu.mutation.predicates; len(ps) > 0 { + _spec.Predicate = func(selector *sql.Selector) { + for i := range ps { + ps[i](selector) + } + } + } + if value, ok := lu.mutation.UpdatedAt(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeTime, + Value: value, + Column: location.FieldUpdatedAt, + }) + } + if value, ok := lu.mutation.Name(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: location.FieldName, + }) + } + if value, ok := lu.mutation.Description(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: location.FieldDescription, + }) + } + if lu.mutation.DescriptionCleared() { + _spec.Fields.Clear = append(_spec.Fields.Clear, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Column: location.FieldDescription, + }) + } + if lu.mutation.GroupCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2O, + Inverse: true, + Table: location.GroupTable, + Columns: []string{location.GroupColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: group.FieldID, + }, + }, + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := lu.mutation.GroupIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2O, + Inverse: true, + Table: location.GroupTable, + Columns: []string{location.GroupColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: group.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Add = append(_spec.Edges.Add, edge) + } + if lu.mutation.ItemsCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: location.ItemsTable, + Columns: []string{location.ItemsColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: item.FieldID, + }, + }, + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := lu.mutation.RemovedItemsIDs(); len(nodes) > 0 && !lu.mutation.ItemsCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: location.ItemsTable, + Columns: []string{location.ItemsColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: item.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := lu.mutation.ItemsIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: location.ItemsTable, + Columns: []string{location.ItemsColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: item.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Add = append(_spec.Edges.Add, edge) + } + if n, err = sqlgraph.UpdateNodes(ctx, lu.driver, _spec); err != nil { + if _, ok := err.(*sqlgraph.NotFoundError); ok { + err = &NotFoundError{location.Label} + } else if sqlgraph.IsConstraintError(err) { + err = &ConstraintError{msg: err.Error(), wrap: err} + } + return 0, err + } + return n, nil +} + +// LocationUpdateOne is the builder for updating a single Location entity. +type LocationUpdateOne struct { + config + fields []string + hooks []Hook + mutation *LocationMutation +} + +// SetUpdatedAt sets the "updated_at" field. +func (luo *LocationUpdateOne) SetUpdatedAt(t time.Time) *LocationUpdateOne { + luo.mutation.SetUpdatedAt(t) + return luo +} + +// SetName sets the "name" field. +func (luo *LocationUpdateOne) SetName(s string) *LocationUpdateOne { + luo.mutation.SetName(s) + return luo +} + +// SetDescription sets the "description" field. +func (luo *LocationUpdateOne) SetDescription(s string) *LocationUpdateOne { + luo.mutation.SetDescription(s) + return luo +} + +// SetNillableDescription sets the "description" field if the given value is not nil. +func (luo *LocationUpdateOne) SetNillableDescription(s *string) *LocationUpdateOne { + if s != nil { + luo.SetDescription(*s) + } + return luo +} + +// ClearDescription clears the value of the "description" field. +func (luo *LocationUpdateOne) ClearDescription() *LocationUpdateOne { + luo.mutation.ClearDescription() + return luo +} + +// SetGroupID sets the "group" edge to the Group entity by ID. +func (luo *LocationUpdateOne) SetGroupID(id uuid.UUID) *LocationUpdateOne { + luo.mutation.SetGroupID(id) + return luo +} + +// SetGroup sets the "group" edge to the Group entity. +func (luo *LocationUpdateOne) SetGroup(g *Group) *LocationUpdateOne { + return luo.SetGroupID(g.ID) +} + +// AddItemIDs adds the "items" edge to the Item entity by IDs. +func (luo *LocationUpdateOne) AddItemIDs(ids ...uuid.UUID) *LocationUpdateOne { + luo.mutation.AddItemIDs(ids...) + return luo +} + +// AddItems adds the "items" edges to the Item entity. +func (luo *LocationUpdateOne) AddItems(i ...*Item) *LocationUpdateOne { + ids := make([]uuid.UUID, len(i)) + for j := range i { + ids[j] = i[j].ID + } + return luo.AddItemIDs(ids...) +} + +// Mutation returns the LocationMutation object of the builder. +func (luo *LocationUpdateOne) Mutation() *LocationMutation { + return luo.mutation +} + +// ClearGroup clears the "group" edge to the Group entity. +func (luo *LocationUpdateOne) ClearGroup() *LocationUpdateOne { + luo.mutation.ClearGroup() + return luo +} + +// ClearItems clears all "items" edges to the Item entity. +func (luo *LocationUpdateOne) ClearItems() *LocationUpdateOne { + luo.mutation.ClearItems() + return luo +} + +// RemoveItemIDs removes the "items" edge to Item entities by IDs. +func (luo *LocationUpdateOne) RemoveItemIDs(ids ...uuid.UUID) *LocationUpdateOne { + luo.mutation.RemoveItemIDs(ids...) + return luo +} + +// RemoveItems removes "items" edges to Item entities. +func (luo *LocationUpdateOne) RemoveItems(i ...*Item) *LocationUpdateOne { + ids := make([]uuid.UUID, len(i)) + for j := range i { + ids[j] = i[j].ID + } + return luo.RemoveItemIDs(ids...) +} + +// Select allows selecting one or more fields (columns) of the returned entity. +// The default is selecting all fields defined in the entity schema. +func (luo *LocationUpdateOne) Select(field string, fields ...string) *LocationUpdateOne { + luo.fields = append([]string{field}, fields...) + return luo +} + +// Save executes the query and returns the updated Location entity. +func (luo *LocationUpdateOne) Save(ctx context.Context) (*Location, error) { + var ( + err error + node *Location + ) + luo.defaults() + if len(luo.hooks) == 0 { + if err = luo.check(); err != nil { + return nil, err + } + node, err = luo.sqlSave(ctx) + } else { + var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) { + mutation, ok := m.(*LocationMutation) + if !ok { + return nil, fmt.Errorf("unexpected mutation type %T", m) + } + if err = luo.check(); err != nil { + return nil, err + } + luo.mutation = mutation + node, err = luo.sqlSave(ctx) + mutation.done = true + return node, err + }) + for i := len(luo.hooks) - 1; i >= 0; i-- { + if luo.hooks[i] == nil { + return nil, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)") + } + mut = luo.hooks[i](mut) + } + v, err := mut.Mutate(ctx, luo.mutation) + if err != nil { + return nil, err + } + nv, ok := v.(*Location) + if !ok { + return nil, fmt.Errorf("unexpected node type %T returned from LocationMutation", v) + } + node = nv + } + return node, err +} + +// SaveX is like Save, but panics if an error occurs. +func (luo *LocationUpdateOne) SaveX(ctx context.Context) *Location { + node, err := luo.Save(ctx) + if err != nil { + panic(err) + } + return node +} + +// Exec executes the query on the entity. +func (luo *LocationUpdateOne) Exec(ctx context.Context) error { + _, err := luo.Save(ctx) + return err +} + +// ExecX is like Exec, but panics if an error occurs. +func (luo *LocationUpdateOne) ExecX(ctx context.Context) { + if err := luo.Exec(ctx); err != nil { + panic(err) + } +} + +// defaults sets the default values of the builder before save. +func (luo *LocationUpdateOne) defaults() { + if _, ok := luo.mutation.UpdatedAt(); !ok { + v := location.UpdateDefaultUpdatedAt() + luo.mutation.SetUpdatedAt(v) + } +} + +// check runs all checks and user-defined validators on the builder. +func (luo *LocationUpdateOne) check() error { + if v, ok := luo.mutation.Name(); ok { + if err := location.NameValidator(v); err != nil { + return &ValidationError{Name: "name", err: fmt.Errorf(`ent: validator failed for field "Location.name": %w`, err)} + } + } + if v, ok := luo.mutation.Description(); ok { + if err := location.DescriptionValidator(v); err != nil { + return &ValidationError{Name: "description", err: fmt.Errorf(`ent: validator failed for field "Location.description": %w`, err)} + } + } + if _, ok := luo.mutation.GroupID(); luo.mutation.GroupCleared() && !ok { + return errors.New(`ent: clearing a required unique edge "Location.group"`) + } + return nil +} + +func (luo *LocationUpdateOne) sqlSave(ctx context.Context) (_node *Location, err error) { + _spec := &sqlgraph.UpdateSpec{ + Node: &sqlgraph.NodeSpec{ + Table: location.Table, + Columns: location.Columns, + ID: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: location.FieldID, + }, + }, + } + id, ok := luo.mutation.ID() + if !ok { + return nil, &ValidationError{Name: "id", err: errors.New(`ent: missing "Location.id" for update`)} + } + _spec.Node.ID.Value = id + if fields := luo.fields; len(fields) > 0 { + _spec.Node.Columns = make([]string, 0, len(fields)) + _spec.Node.Columns = append(_spec.Node.Columns, location.FieldID) + for _, f := range fields { + if !location.ValidColumn(f) { + return nil, &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)} + } + if f != location.FieldID { + _spec.Node.Columns = append(_spec.Node.Columns, f) + } + } + } + if ps := luo.mutation.predicates; len(ps) > 0 { + _spec.Predicate = func(selector *sql.Selector) { + for i := range ps { + ps[i](selector) + } + } + } + if value, ok := luo.mutation.UpdatedAt(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeTime, + Value: value, + Column: location.FieldUpdatedAt, + }) + } + if value, ok := luo.mutation.Name(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: location.FieldName, + }) + } + if value, ok := luo.mutation.Description(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: location.FieldDescription, + }) + } + if luo.mutation.DescriptionCleared() { + _spec.Fields.Clear = append(_spec.Fields.Clear, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Column: location.FieldDescription, + }) + } + if luo.mutation.GroupCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2O, + Inverse: true, + Table: location.GroupTable, + Columns: []string{location.GroupColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: group.FieldID, + }, + }, + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := luo.mutation.GroupIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2O, + Inverse: true, + Table: location.GroupTable, + Columns: []string{location.GroupColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: group.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Add = append(_spec.Edges.Add, edge) + } + if luo.mutation.ItemsCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: location.ItemsTable, + Columns: []string{location.ItemsColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: item.FieldID, + }, + }, + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := luo.mutation.RemovedItemsIDs(); len(nodes) > 0 && !luo.mutation.ItemsCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: location.ItemsTable, + Columns: []string{location.ItemsColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: item.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := luo.mutation.ItemsIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: location.ItemsTable, + Columns: []string{location.ItemsColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: item.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Add = append(_spec.Edges.Add, edge) + } + _node = &Location{config: luo.config} + _spec.Assign = _node.assignValues + _spec.ScanValues = _node.scanValues + if err = sqlgraph.UpdateNode(ctx, luo.driver, _spec); err != nil { + if _, ok := err.(*sqlgraph.NotFoundError); ok { + err = &NotFoundError{location.Label} + } else if sqlgraph.IsConstraintError(err) { + err = &ConstraintError{msg: err.Error(), wrap: err} + } + return nil, err + } + return _node, nil +} diff --git a/backend/ent/migrate/migrate.go b/backend/ent/migrate/migrate.go index 9bdaf52..1956a6b 100644 --- a/backend/ent/migrate/migrate.go +++ b/backend/ent/migrate/migrate.go @@ -1,4 +1,4 @@ -// Code generated by entc, DO NOT EDIT. +// Code generated by ent, DO NOT EDIT. package migrate @@ -28,9 +28,6 @@ var ( // and therefore, it's recommended to enable this option to get more // flexibility in the schema changes. WithDropIndex = schema.WithDropIndex - // WithFixture sets the foreign-key renaming option to the migration when upgrading - // ent from v0.1.0 (issue-#285). Defaults to false. - WithFixture = schema.WithFixture // WithForeignKeys enables creating foreign-key in schema DDL. This defaults to true. WithForeignKeys = schema.WithForeignKeys ) @@ -45,27 +42,23 @@ func NewSchema(drv dialect.Driver) *Schema { return &Schema{drv: drv} } // Create creates all schema resources. func (s *Schema) Create(ctx context.Context, opts ...schema.MigrateOption) error { + return Create(ctx, s, Tables, opts...) +} + +// Create creates all table resources using the given schema driver. +func Create(ctx context.Context, s *Schema, tables []*schema.Table, opts ...schema.MigrateOption) error { migrate, err := schema.NewMigrate(s.drv, opts...) if err != nil { return fmt.Errorf("ent/migrate: %w", err) } - return migrate.Create(ctx, Tables...) + return migrate.Create(ctx, tables...) } // WriteTo writes the schema changes to w instead of running them against the database. // -// if err := client.Schema.WriteTo(context.Background(), os.Stdout); err != nil { +// if err := client.Schema.WriteTo(context.Background(), os.Stdout); err != nil { // log.Fatal(err) -// } -// +// } func (s *Schema) WriteTo(ctx context.Context, w io.Writer, opts ...schema.MigrateOption) error { - drv := &schema.WriteDriver{ - Writer: w, - Driver: s.drv, - } - migrate, err := schema.NewMigrate(drv, opts...) - if err != nil { - return fmt.Errorf("ent/migrate: %w", err) - } - return migrate.Create(ctx, Tables...) + return Create(ctx, &Schema{drv: &schema.WriteDriver{Writer: w, Driver: s.drv}}, Tables, opts...) } diff --git a/backend/ent/migrate/schema.go b/backend/ent/migrate/schema.go index 203f01c..e6391db 100644 --- a/backend/ent/migrate/schema.go +++ b/backend/ent/migrate/schema.go @@ -1,4 +1,4 @@ -// Code generated by entc, DO NOT EDIT. +// Code generated by ent, DO NOT EDIT. package migrate @@ -10,10 +10,11 @@ import ( var ( // AuthTokensColumns holds the columns for the "auth_tokens" table. AuthTokensColumns = []*schema.Column{ - {Name: "id", Type: field.TypeInt, Increment: true}, + {Name: "id", Type: field.TypeUUID}, + {Name: "created_at", Type: field.TypeTime}, + {Name: "updated_at", Type: field.TypeTime}, {Name: "token", Type: field.TypeBytes, Unique: true}, {Name: "expires_at", Type: field.TypeTime}, - {Name: "created_at", Type: field.TypeTime}, {Name: "user_auth_tokens", Type: field.TypeUUID, Nullable: true}, } // AuthTokensTable holds the schema information for the "auth_tokens" table. @@ -24,7 +25,7 @@ var ( ForeignKeys: []*schema.ForeignKey{ { Symbol: "auth_tokens_users_auth_tokens", - Columns: []*schema.Column{AuthTokensColumns[4]}, + Columns: []*schema.Column{AuthTokensColumns[5]}, RefColumns: []*schema.Column{UsersColumns[0]}, OnDelete: schema.SetNull, }, @@ -33,31 +34,213 @@ var ( { Name: "authtokens_token", Unique: false, - Columns: []*schema.Column{AuthTokensColumns[1]}, + Columns: []*schema.Column{AuthTokensColumns[3]}, + }, + }, + } + // GroupsColumns holds the columns for the "groups" table. + GroupsColumns = []*schema.Column{ + {Name: "id", Type: field.TypeUUID}, + {Name: "created_at", Type: field.TypeTime}, + {Name: "updated_at", Type: field.TypeTime}, + {Name: "name", Type: field.TypeString, Size: 255}, + {Name: "currency", Type: field.TypeEnum, Enums: []string{"usd"}, Default: "usd"}, + } + // GroupsTable holds the schema information for the "groups" table. + GroupsTable = &schema.Table{ + Name: "groups", + Columns: GroupsColumns, + PrimaryKey: []*schema.Column{GroupsColumns[0]}, + } + // ItemsColumns holds the columns for the "items" table. + ItemsColumns = []*schema.Column{ + {Name: "id", Type: field.TypeUUID}, + {Name: "created_at", Type: field.TypeTime}, + {Name: "updated_at", Type: field.TypeTime}, + {Name: "name", Type: field.TypeString, Size: 255}, + {Name: "description", Type: field.TypeString, Nullable: true, Size: 1000}, + {Name: "notes", Type: field.TypeString, Nullable: true, Size: 1000}, + {Name: "serial_number", Type: field.TypeString, Nullable: true, Size: 255}, + {Name: "model_number", Type: field.TypeString, Nullable: true, Size: 255}, + {Name: "manufacturer", Type: field.TypeString, Nullable: true, Size: 255}, + {Name: "purchase_time", Type: field.TypeTime, Nullable: true}, + {Name: "purchase_from", Type: field.TypeString, Nullable: true}, + {Name: "purchase_price", Type: field.TypeFloat64, Default: 0}, + {Name: "purchase_receipt_id", Type: field.TypeUUID, Nullable: true}, + {Name: "sold_time", Type: field.TypeTime, Nullable: true}, + {Name: "sold_to", Type: field.TypeString, Nullable: true}, + {Name: "sold_price", Type: field.TypeFloat64, Default: 0}, + {Name: "sold_receipt_id", Type: field.TypeUUID, Nullable: true}, + {Name: "sold_notes", Type: field.TypeString, Nullable: true, Size: 1000}, + {Name: "group_items", Type: field.TypeUUID}, + {Name: "location_items", Type: field.TypeUUID, Nullable: true}, + } + // ItemsTable holds the schema information for the "items" table. + ItemsTable = &schema.Table{ + Name: "items", + Columns: ItemsColumns, + PrimaryKey: []*schema.Column{ItemsColumns[0]}, + ForeignKeys: []*schema.ForeignKey{ + { + Symbol: "items_groups_items", + Columns: []*schema.Column{ItemsColumns[18]}, + RefColumns: []*schema.Column{GroupsColumns[0]}, + OnDelete: schema.NoAction, + }, + { + Symbol: "items_locations_items", + Columns: []*schema.Column{ItemsColumns[19]}, + RefColumns: []*schema.Column{LocationsColumns[0]}, + OnDelete: schema.SetNull, + }, + }, + } + // ItemFieldsColumns holds the columns for the "item_fields" table. + ItemFieldsColumns = []*schema.Column{ + {Name: "id", Type: field.TypeUUID}, + {Name: "created_at", Type: field.TypeTime}, + {Name: "updated_at", Type: field.TypeTime}, + {Name: "name", Type: field.TypeString, Size: 255}, + {Name: "description", Type: field.TypeString, Nullable: true, Size: 1000}, + {Name: "type", Type: field.TypeEnum, Enums: []string{"text", "number", "boolean", "time"}}, + {Name: "text_value", Type: field.TypeString, Nullable: true, Size: 500}, + {Name: "number_value", Type: field.TypeInt, Nullable: true}, + {Name: "boolean_value", Type: field.TypeBool, Default: false}, + {Name: "time_value", Type: field.TypeTime}, + {Name: "item_fields", Type: field.TypeUUID, Nullable: true}, + } + // ItemFieldsTable holds the schema information for the "item_fields" table. + ItemFieldsTable = &schema.Table{ + Name: "item_fields", + Columns: ItemFieldsColumns, + PrimaryKey: []*schema.Column{ItemFieldsColumns[0]}, + ForeignKeys: []*schema.ForeignKey{ + { + Symbol: "item_fields_items_fields", + Columns: []*schema.Column{ItemFieldsColumns[10]}, + RefColumns: []*schema.Column{ItemsColumns[0]}, + OnDelete: schema.SetNull, + }, + }, + } + // LabelsColumns holds the columns for the "labels" table. + LabelsColumns = []*schema.Column{ + {Name: "id", Type: field.TypeUUID}, + {Name: "created_at", Type: field.TypeTime}, + {Name: "updated_at", Type: field.TypeTime}, + {Name: "name", Type: field.TypeString, Size: 255}, + {Name: "description", Type: field.TypeString, Nullable: true, Size: 1000}, + {Name: "color", Type: field.TypeString, Nullable: true, Size: 255}, + {Name: "group_labels", Type: field.TypeUUID}, + } + // LabelsTable holds the schema information for the "labels" table. + LabelsTable = &schema.Table{ + Name: "labels", + Columns: LabelsColumns, + PrimaryKey: []*schema.Column{LabelsColumns[0]}, + ForeignKeys: []*schema.ForeignKey{ + { + Symbol: "labels_groups_labels", + Columns: []*schema.Column{LabelsColumns[6]}, + RefColumns: []*schema.Column{GroupsColumns[0]}, + OnDelete: schema.NoAction, + }, + }, + } + // LocationsColumns holds the columns for the "locations" table. + LocationsColumns = []*schema.Column{ + {Name: "id", Type: field.TypeUUID}, + {Name: "created_at", Type: field.TypeTime}, + {Name: "updated_at", Type: field.TypeTime}, + {Name: "name", Type: field.TypeString, Size: 255}, + {Name: "description", Type: field.TypeString, Nullable: true, Size: 1000}, + {Name: "group_locations", Type: field.TypeUUID}, + } + // LocationsTable holds the schema information for the "locations" table. + LocationsTable = &schema.Table{ + Name: "locations", + Columns: LocationsColumns, + PrimaryKey: []*schema.Column{LocationsColumns[0]}, + ForeignKeys: []*schema.ForeignKey{ + { + Symbol: "locations_groups_locations", + Columns: []*schema.Column{LocationsColumns[5]}, + RefColumns: []*schema.Column{GroupsColumns[0]}, + OnDelete: schema.NoAction, }, }, } // UsersColumns holds the columns for the "users" table. UsersColumns = []*schema.Column{ {Name: "id", Type: field.TypeUUID}, - {Name: "name", Type: field.TypeString}, - {Name: "email", Type: field.TypeString, Unique: true}, - {Name: "password", Type: field.TypeString}, + {Name: "created_at", Type: field.TypeTime}, + {Name: "updated_at", Type: field.TypeTime}, + {Name: "name", Type: field.TypeString, Size: 255}, + {Name: "email", Type: field.TypeString, Unique: true, Size: 255}, + {Name: "password", Type: field.TypeString, Size: 255}, {Name: "is_superuser", Type: field.TypeBool, Default: false}, + {Name: "group_users", Type: field.TypeUUID}, } // UsersTable holds the schema information for the "users" table. UsersTable = &schema.Table{ Name: "users", Columns: UsersColumns, PrimaryKey: []*schema.Column{UsersColumns[0]}, + ForeignKeys: []*schema.ForeignKey{ + { + Symbol: "users_groups_users", + Columns: []*schema.Column{UsersColumns[7]}, + RefColumns: []*schema.Column{GroupsColumns[0]}, + OnDelete: schema.NoAction, + }, + }, + } + // LabelItemsColumns holds the columns for the "label_items" table. + LabelItemsColumns = []*schema.Column{ + {Name: "label_id", Type: field.TypeUUID}, + {Name: "item_id", Type: field.TypeUUID}, + } + // LabelItemsTable holds the schema information for the "label_items" table. + LabelItemsTable = &schema.Table{ + Name: "label_items", + Columns: LabelItemsColumns, + PrimaryKey: []*schema.Column{LabelItemsColumns[0], LabelItemsColumns[1]}, + ForeignKeys: []*schema.ForeignKey{ + { + Symbol: "label_items_label_id", + Columns: []*schema.Column{LabelItemsColumns[0]}, + RefColumns: []*schema.Column{LabelsColumns[0]}, + OnDelete: schema.Cascade, + }, + { + Symbol: "label_items_item_id", + Columns: []*schema.Column{LabelItemsColumns[1]}, + RefColumns: []*schema.Column{ItemsColumns[0]}, + OnDelete: schema.Cascade, + }, + }, } // Tables holds all the tables in the schema. Tables = []*schema.Table{ AuthTokensTable, + GroupsTable, + ItemsTable, + ItemFieldsTable, + LabelsTable, + LocationsTable, UsersTable, + LabelItemsTable, } ) func init() { AuthTokensTable.ForeignKeys[0].RefTable = UsersTable + ItemsTable.ForeignKeys[0].RefTable = GroupsTable + ItemsTable.ForeignKeys[1].RefTable = LocationsTable + ItemFieldsTable.ForeignKeys[0].RefTable = ItemsTable + LabelsTable.ForeignKeys[0].RefTable = GroupsTable + LocationsTable.ForeignKeys[0].RefTable = GroupsTable + UsersTable.ForeignKeys[0].RefTable = GroupsTable + LabelItemsTable.ForeignKeys[0].RefTable = LabelsTable + LabelItemsTable.ForeignKeys[1].RefTable = ItemsTable } diff --git a/backend/ent/mutation.go b/backend/ent/mutation.go index 851990d..0f39d08 100644 --- a/backend/ent/mutation.go +++ b/backend/ent/mutation.go @@ -1,4 +1,4 @@ -// Code generated by entc, DO NOT EDIT. +// Code generated by ent, DO NOT EDIT. package ent @@ -11,6 +11,11 @@ import ( "github.com/google/uuid" "github.com/hay-kot/content/backend/ent/authtokens" + "github.com/hay-kot/content/backend/ent/group" + "github.com/hay-kot/content/backend/ent/item" + "github.com/hay-kot/content/backend/ent/itemfield" + "github.com/hay-kot/content/backend/ent/label" + "github.com/hay-kot/content/backend/ent/location" "github.com/hay-kot/content/backend/ent/predicate" "github.com/hay-kot/content/backend/ent/user" @@ -27,6 +32,11 @@ const ( // Node types. TypeAuthTokens = "AuthTokens" + TypeGroup = "Group" + TypeItem = "Item" + TypeItemField = "ItemField" + TypeLabel = "Label" + TypeLocation = "Location" TypeUser = "User" ) @@ -35,10 +45,11 @@ type AuthTokensMutation struct { config op Op typ string - id *int + id *uuid.UUID + created_at *time.Time + updated_at *time.Time token *[]byte expires_at *time.Time - created_at *time.Time clearedFields map[string]struct{} user *uuid.UUID cleareduser bool @@ -67,7 +78,7 @@ func newAuthTokensMutation(c config, op Op, opts ...authtokensOption) *AuthToken } // withAuthTokensID sets the ID field of the mutation. -func withAuthTokensID(id int) authtokensOption { +func withAuthTokensID(id uuid.UUID) authtokensOption { return func(m *AuthTokensMutation) { var ( err error @@ -117,9 +128,15 @@ func (m AuthTokensMutation) Tx() (*Tx, error) { return tx, nil } +// SetID sets the value of the id field. Note that this +// operation is only accepted on creation of AuthTokens entities. +func (m *AuthTokensMutation) SetID(id uuid.UUID) { + m.id = &id +} + // ID returns the ID value in the mutation. Note that the ID is only available // if it was provided to the builder or after it was returned from the database. -func (m *AuthTokensMutation) ID() (id int, exists bool) { +func (m *AuthTokensMutation) ID() (id uuid.UUID, exists bool) { if m.id == nil { return } @@ -130,12 +147,12 @@ func (m *AuthTokensMutation) ID() (id int, exists bool) { // That means, if the mutation is applied within a transaction with an isolation level such // as sql.LevelSerializable, the returned ids match the ids of the rows that will be updated // or updated by the mutation. -func (m *AuthTokensMutation) IDs(ctx context.Context) ([]int, error) { +func (m *AuthTokensMutation) IDs(ctx context.Context) ([]uuid.UUID, error) { switch { case m.op.Is(OpUpdateOne | OpDeleteOne): id, exists := m.ID() if exists { - return []int{id}, nil + return []uuid.UUID{id}, nil } fallthrough case m.op.Is(OpUpdate | OpDelete): @@ -145,6 +162,78 @@ func (m *AuthTokensMutation) IDs(ctx context.Context) ([]int, error) { } } +// SetCreatedAt sets the "created_at" field. +func (m *AuthTokensMutation) SetCreatedAt(t time.Time) { + m.created_at = &t +} + +// CreatedAt returns the value of the "created_at" field in the mutation. +func (m *AuthTokensMutation) CreatedAt() (r time.Time, exists bool) { + v := m.created_at + if v == nil { + return + } + return *v, true +} + +// OldCreatedAt returns the old "created_at" field's value of the AuthTokens entity. +// If the AuthTokens object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *AuthTokensMutation) OldCreatedAt(ctx context.Context) (v time.Time, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldCreatedAt is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldCreatedAt requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldCreatedAt: %w", err) + } + return oldValue.CreatedAt, nil +} + +// ResetCreatedAt resets all changes to the "created_at" field. +func (m *AuthTokensMutation) ResetCreatedAt() { + m.created_at = nil +} + +// SetUpdatedAt sets the "updated_at" field. +func (m *AuthTokensMutation) SetUpdatedAt(t time.Time) { + m.updated_at = &t +} + +// UpdatedAt returns the value of the "updated_at" field in the mutation. +func (m *AuthTokensMutation) UpdatedAt() (r time.Time, exists bool) { + v := m.updated_at + if v == nil { + return + } + return *v, true +} + +// OldUpdatedAt returns the old "updated_at" field's value of the AuthTokens entity. +// If the AuthTokens object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *AuthTokensMutation) OldUpdatedAt(ctx context.Context) (v time.Time, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldUpdatedAt is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldUpdatedAt requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldUpdatedAt: %w", err) + } + return oldValue.UpdatedAt, nil +} + +// ResetUpdatedAt resets all changes to the "updated_at" field. +func (m *AuthTokensMutation) ResetUpdatedAt() { + m.updated_at = nil +} + // SetToken sets the "token" field. func (m *AuthTokensMutation) SetToken(b []byte) { m.token = &b @@ -217,42 +306,6 @@ func (m *AuthTokensMutation) ResetExpiresAt() { m.expires_at = nil } -// SetCreatedAt sets the "created_at" field. -func (m *AuthTokensMutation) SetCreatedAt(t time.Time) { - m.created_at = &t -} - -// CreatedAt returns the value of the "created_at" field in the mutation. -func (m *AuthTokensMutation) CreatedAt() (r time.Time, exists bool) { - v := m.created_at - if v == nil { - return - } - return *v, true -} - -// OldCreatedAt returns the old "created_at" field's value of the AuthTokens entity. -// If the AuthTokens object wasn't provided to the builder, the object is fetched from the database. -// An error is returned if the mutation operation is not UpdateOne, or the database query fails. -func (m *AuthTokensMutation) OldCreatedAt(ctx context.Context) (v time.Time, err error) { - if !m.op.Is(OpUpdateOne) { - return v, errors.New("OldCreatedAt is only allowed on UpdateOne operations") - } - if m.id == nil || m.oldValue == nil { - return v, errors.New("OldCreatedAt requires an ID field in the mutation") - } - oldValue, err := m.oldValue(ctx) - if err != nil { - return v, fmt.Errorf("querying old value for OldCreatedAt: %w", err) - } - return oldValue.CreatedAt, nil -} - -// ResetCreatedAt resets all changes to the "created_at" field. -func (m *AuthTokensMutation) ResetCreatedAt() { - m.created_at = nil -} - // SetUserID sets the "user" edge to the User entity by id. func (m *AuthTokensMutation) SetUserID(id uuid.UUID) { m.user = &id @@ -311,16 +364,19 @@ func (m *AuthTokensMutation) Type() string { // order to get all numeric fields that were incremented/decremented, call // AddedFields(). func (m *AuthTokensMutation) Fields() []string { - fields := make([]string, 0, 3) + fields := make([]string, 0, 4) + if m.created_at != nil { + fields = append(fields, authtokens.FieldCreatedAt) + } + if m.updated_at != nil { + fields = append(fields, authtokens.FieldUpdatedAt) + } if m.token != nil { fields = append(fields, authtokens.FieldToken) } if m.expires_at != nil { fields = append(fields, authtokens.FieldExpiresAt) } - if m.created_at != nil { - fields = append(fields, authtokens.FieldCreatedAt) - } return fields } @@ -329,12 +385,14 @@ func (m *AuthTokensMutation) Fields() []string { // schema. func (m *AuthTokensMutation) Field(name string) (ent.Value, bool) { switch name { + case authtokens.FieldCreatedAt: + return m.CreatedAt() + case authtokens.FieldUpdatedAt: + return m.UpdatedAt() case authtokens.FieldToken: return m.Token() case authtokens.FieldExpiresAt: return m.ExpiresAt() - case authtokens.FieldCreatedAt: - return m.CreatedAt() } return nil, false } @@ -344,12 +402,14 @@ func (m *AuthTokensMutation) Field(name string) (ent.Value, bool) { // database failed. func (m *AuthTokensMutation) OldField(ctx context.Context, name string) (ent.Value, error) { switch name { + case authtokens.FieldCreatedAt: + return m.OldCreatedAt(ctx) + case authtokens.FieldUpdatedAt: + return m.OldUpdatedAt(ctx) case authtokens.FieldToken: return m.OldToken(ctx) case authtokens.FieldExpiresAt: return m.OldExpiresAt(ctx) - case authtokens.FieldCreatedAt: - return m.OldCreatedAt(ctx) } return nil, fmt.Errorf("unknown AuthTokens field %s", name) } @@ -359,6 +419,20 @@ func (m *AuthTokensMutation) OldField(ctx context.Context, name string) (ent.Val // type. func (m *AuthTokensMutation) SetField(name string, value ent.Value) error { switch name { + case authtokens.FieldCreatedAt: + v, ok := value.(time.Time) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetCreatedAt(v) + return nil + case authtokens.FieldUpdatedAt: + v, ok := value.(time.Time) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetUpdatedAt(v) + return nil case authtokens.FieldToken: v, ok := value.([]byte) if !ok { @@ -373,13 +447,6 @@ func (m *AuthTokensMutation) SetField(name string, value ent.Value) error { } m.SetExpiresAt(v) return nil - case authtokens.FieldCreatedAt: - v, ok := value.(time.Time) - if !ok { - return fmt.Errorf("unexpected type %T for field %s", value, name) - } - m.SetCreatedAt(v) - return nil } return fmt.Errorf("unknown AuthTokens field %s", name) } @@ -429,15 +496,18 @@ func (m *AuthTokensMutation) ClearField(name string) error { // It returns an error if the field is not defined in the schema. func (m *AuthTokensMutation) ResetField(name string) error { switch name { + case authtokens.FieldCreatedAt: + m.ResetCreatedAt() + return nil + case authtokens.FieldUpdatedAt: + m.ResetUpdatedAt() + return nil case authtokens.FieldToken: m.ResetToken() return nil case authtokens.FieldExpiresAt: m.ResetExpiresAt() return nil - case authtokens.FieldCreatedAt: - m.ResetCreatedAt() - return nil } return fmt.Errorf("unknown AuthTokens field %s", name) } @@ -518,19 +588,4913 @@ func (m *AuthTokensMutation) ResetEdge(name string) error { return fmt.Errorf("unknown AuthTokens edge %s", name) } +// GroupMutation represents an operation that mutates the Group nodes in the graph. +type GroupMutation struct { + config + op Op + typ string + id *uuid.UUID + created_at *time.Time + updated_at *time.Time + name *string + currency *group.Currency + clearedFields map[string]struct{} + users map[uuid.UUID]struct{} + removedusers map[uuid.UUID]struct{} + clearedusers bool + locations map[uuid.UUID]struct{} + removedlocations map[uuid.UUID]struct{} + clearedlocations bool + items map[uuid.UUID]struct{} + removeditems map[uuid.UUID]struct{} + cleareditems bool + labels map[uuid.UUID]struct{} + removedlabels map[uuid.UUID]struct{} + clearedlabels bool + done bool + oldValue func(context.Context) (*Group, error) + predicates []predicate.Group +} + +var _ ent.Mutation = (*GroupMutation)(nil) + +// groupOption allows management of the mutation configuration using functional options. +type groupOption func(*GroupMutation) + +// newGroupMutation creates new mutation for the Group entity. +func newGroupMutation(c config, op Op, opts ...groupOption) *GroupMutation { + m := &GroupMutation{ + config: c, + op: op, + typ: TypeGroup, + clearedFields: make(map[string]struct{}), + } + for _, opt := range opts { + opt(m) + } + return m +} + +// withGroupID sets the ID field of the mutation. +func withGroupID(id uuid.UUID) groupOption { + return func(m *GroupMutation) { + var ( + err error + once sync.Once + value *Group + ) + m.oldValue = func(ctx context.Context) (*Group, error) { + once.Do(func() { + if m.done { + err = errors.New("querying old values post mutation is not allowed") + } else { + value, err = m.Client().Group.Get(ctx, id) + } + }) + return value, err + } + m.id = &id + } +} + +// withGroup sets the old Group of the mutation. +func withGroup(node *Group) groupOption { + return func(m *GroupMutation) { + m.oldValue = func(context.Context) (*Group, error) { + return node, nil + } + m.id = &node.ID + } +} + +// Client returns a new `ent.Client` from the mutation. If the mutation was +// executed in a transaction (ent.Tx), a transactional client is returned. +func (m GroupMutation) Client() *Client { + client := &Client{config: m.config} + client.init() + return client +} + +// Tx returns an `ent.Tx` for mutations that were executed in transactions; +// it returns an error otherwise. +func (m GroupMutation) Tx() (*Tx, error) { + if _, ok := m.driver.(*txDriver); !ok { + return nil, errors.New("ent: mutation is not running in a transaction") + } + tx := &Tx{config: m.config} + tx.init() + return tx, nil +} + +// SetID sets the value of the id field. Note that this +// operation is only accepted on creation of Group entities. +func (m *GroupMutation) SetID(id uuid.UUID) { + m.id = &id +} + +// ID returns the ID value in the mutation. Note that the ID is only available +// if it was provided to the builder or after it was returned from the database. +func (m *GroupMutation) ID() (id uuid.UUID, exists bool) { + if m.id == nil { + return + } + return *m.id, true +} + +// IDs queries the database and returns the entity ids that match the mutation's predicate. +// That means, if the mutation is applied within a transaction with an isolation level such +// as sql.LevelSerializable, the returned ids match the ids of the rows that will be updated +// or updated by the mutation. +func (m *GroupMutation) IDs(ctx context.Context) ([]uuid.UUID, error) { + switch { + case m.op.Is(OpUpdateOne | OpDeleteOne): + id, exists := m.ID() + if exists { + return []uuid.UUID{id}, nil + } + fallthrough + case m.op.Is(OpUpdate | OpDelete): + return m.Client().Group.Query().Where(m.predicates...).IDs(ctx) + default: + return nil, fmt.Errorf("IDs is not allowed on %s operations", m.op) + } +} + +// SetCreatedAt sets the "created_at" field. +func (m *GroupMutation) SetCreatedAt(t time.Time) { + m.created_at = &t +} + +// CreatedAt returns the value of the "created_at" field in the mutation. +func (m *GroupMutation) CreatedAt() (r time.Time, exists bool) { + v := m.created_at + if v == nil { + return + } + return *v, true +} + +// OldCreatedAt returns the old "created_at" field's value of the Group entity. +// If the Group object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *GroupMutation) OldCreatedAt(ctx context.Context) (v time.Time, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldCreatedAt is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldCreatedAt requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldCreatedAt: %w", err) + } + return oldValue.CreatedAt, nil +} + +// ResetCreatedAt resets all changes to the "created_at" field. +func (m *GroupMutation) ResetCreatedAt() { + m.created_at = nil +} + +// SetUpdatedAt sets the "updated_at" field. +func (m *GroupMutation) SetUpdatedAt(t time.Time) { + m.updated_at = &t +} + +// UpdatedAt returns the value of the "updated_at" field in the mutation. +func (m *GroupMutation) UpdatedAt() (r time.Time, exists bool) { + v := m.updated_at + if v == nil { + return + } + return *v, true +} + +// OldUpdatedAt returns the old "updated_at" field's value of the Group entity. +// If the Group object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *GroupMutation) OldUpdatedAt(ctx context.Context) (v time.Time, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldUpdatedAt is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldUpdatedAt requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldUpdatedAt: %w", err) + } + return oldValue.UpdatedAt, nil +} + +// ResetUpdatedAt resets all changes to the "updated_at" field. +func (m *GroupMutation) ResetUpdatedAt() { + m.updated_at = nil +} + +// SetName sets the "name" field. +func (m *GroupMutation) SetName(s string) { + m.name = &s +} + +// Name returns the value of the "name" field in the mutation. +func (m *GroupMutation) Name() (r string, exists bool) { + v := m.name + if v == nil { + return + } + return *v, true +} + +// OldName returns the old "name" field's value of the Group entity. +// If the Group object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *GroupMutation) OldName(ctx context.Context) (v string, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldName is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldName requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldName: %w", err) + } + return oldValue.Name, nil +} + +// ResetName resets all changes to the "name" field. +func (m *GroupMutation) ResetName() { + m.name = nil +} + +// SetCurrency sets the "currency" field. +func (m *GroupMutation) SetCurrency(gr group.Currency) { + m.currency = &gr +} + +// Currency returns the value of the "currency" field in the mutation. +func (m *GroupMutation) Currency() (r group.Currency, exists bool) { + v := m.currency + if v == nil { + return + } + return *v, true +} + +// OldCurrency returns the old "currency" field's value of the Group entity. +// If the Group object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *GroupMutation) OldCurrency(ctx context.Context) (v group.Currency, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldCurrency is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldCurrency requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldCurrency: %w", err) + } + return oldValue.Currency, nil +} + +// ResetCurrency resets all changes to the "currency" field. +func (m *GroupMutation) ResetCurrency() { + m.currency = nil +} + +// AddUserIDs adds the "users" edge to the User entity by ids. +func (m *GroupMutation) AddUserIDs(ids ...uuid.UUID) { + if m.users == nil { + m.users = make(map[uuid.UUID]struct{}) + } + for i := range ids { + m.users[ids[i]] = struct{}{} + } +} + +// ClearUsers clears the "users" edge to the User entity. +func (m *GroupMutation) ClearUsers() { + m.clearedusers = true +} + +// UsersCleared reports if the "users" edge to the User entity was cleared. +func (m *GroupMutation) UsersCleared() bool { + return m.clearedusers +} + +// RemoveUserIDs removes the "users" edge to the User entity by IDs. +func (m *GroupMutation) RemoveUserIDs(ids ...uuid.UUID) { + if m.removedusers == nil { + m.removedusers = make(map[uuid.UUID]struct{}) + } + for i := range ids { + delete(m.users, ids[i]) + m.removedusers[ids[i]] = struct{}{} + } +} + +// RemovedUsers returns the removed IDs of the "users" edge to the User entity. +func (m *GroupMutation) RemovedUsersIDs() (ids []uuid.UUID) { + for id := range m.removedusers { + ids = append(ids, id) + } + return +} + +// UsersIDs returns the "users" edge IDs in the mutation. +func (m *GroupMutation) UsersIDs() (ids []uuid.UUID) { + for id := range m.users { + ids = append(ids, id) + } + return +} + +// ResetUsers resets all changes to the "users" edge. +func (m *GroupMutation) ResetUsers() { + m.users = nil + m.clearedusers = false + m.removedusers = nil +} + +// AddLocationIDs adds the "locations" edge to the Location entity by ids. +func (m *GroupMutation) AddLocationIDs(ids ...uuid.UUID) { + if m.locations == nil { + m.locations = make(map[uuid.UUID]struct{}) + } + for i := range ids { + m.locations[ids[i]] = struct{}{} + } +} + +// ClearLocations clears the "locations" edge to the Location entity. +func (m *GroupMutation) ClearLocations() { + m.clearedlocations = true +} + +// LocationsCleared reports if the "locations" edge to the Location entity was cleared. +func (m *GroupMutation) LocationsCleared() bool { + return m.clearedlocations +} + +// RemoveLocationIDs removes the "locations" edge to the Location entity by IDs. +func (m *GroupMutation) RemoveLocationIDs(ids ...uuid.UUID) { + if m.removedlocations == nil { + m.removedlocations = make(map[uuid.UUID]struct{}) + } + for i := range ids { + delete(m.locations, ids[i]) + m.removedlocations[ids[i]] = struct{}{} + } +} + +// RemovedLocations returns the removed IDs of the "locations" edge to the Location entity. +func (m *GroupMutation) RemovedLocationsIDs() (ids []uuid.UUID) { + for id := range m.removedlocations { + ids = append(ids, id) + } + return +} + +// LocationsIDs returns the "locations" edge IDs in the mutation. +func (m *GroupMutation) LocationsIDs() (ids []uuid.UUID) { + for id := range m.locations { + ids = append(ids, id) + } + return +} + +// ResetLocations resets all changes to the "locations" edge. +func (m *GroupMutation) ResetLocations() { + m.locations = nil + m.clearedlocations = false + m.removedlocations = nil +} + +// AddItemIDs adds the "items" edge to the Item entity by ids. +func (m *GroupMutation) AddItemIDs(ids ...uuid.UUID) { + if m.items == nil { + m.items = make(map[uuid.UUID]struct{}) + } + for i := range ids { + m.items[ids[i]] = struct{}{} + } +} + +// ClearItems clears the "items" edge to the Item entity. +func (m *GroupMutation) ClearItems() { + m.cleareditems = true +} + +// ItemsCleared reports if the "items" edge to the Item entity was cleared. +func (m *GroupMutation) ItemsCleared() bool { + return m.cleareditems +} + +// RemoveItemIDs removes the "items" edge to the Item entity by IDs. +func (m *GroupMutation) RemoveItemIDs(ids ...uuid.UUID) { + if m.removeditems == nil { + m.removeditems = make(map[uuid.UUID]struct{}) + } + for i := range ids { + delete(m.items, ids[i]) + m.removeditems[ids[i]] = struct{}{} + } +} + +// RemovedItems returns the removed IDs of the "items" edge to the Item entity. +func (m *GroupMutation) RemovedItemsIDs() (ids []uuid.UUID) { + for id := range m.removeditems { + ids = append(ids, id) + } + return +} + +// ItemsIDs returns the "items" edge IDs in the mutation. +func (m *GroupMutation) ItemsIDs() (ids []uuid.UUID) { + for id := range m.items { + ids = append(ids, id) + } + return +} + +// ResetItems resets all changes to the "items" edge. +func (m *GroupMutation) ResetItems() { + m.items = nil + m.cleareditems = false + m.removeditems = nil +} + +// AddLabelIDs adds the "labels" edge to the Label entity by ids. +func (m *GroupMutation) AddLabelIDs(ids ...uuid.UUID) { + if m.labels == nil { + m.labels = make(map[uuid.UUID]struct{}) + } + for i := range ids { + m.labels[ids[i]] = struct{}{} + } +} + +// ClearLabels clears the "labels" edge to the Label entity. +func (m *GroupMutation) ClearLabels() { + m.clearedlabels = true +} + +// LabelsCleared reports if the "labels" edge to the Label entity was cleared. +func (m *GroupMutation) LabelsCleared() bool { + return m.clearedlabels +} + +// RemoveLabelIDs removes the "labels" edge to the Label entity by IDs. +func (m *GroupMutation) RemoveLabelIDs(ids ...uuid.UUID) { + if m.removedlabels == nil { + m.removedlabels = make(map[uuid.UUID]struct{}) + } + for i := range ids { + delete(m.labels, ids[i]) + m.removedlabels[ids[i]] = struct{}{} + } +} + +// RemovedLabels returns the removed IDs of the "labels" edge to the Label entity. +func (m *GroupMutation) RemovedLabelsIDs() (ids []uuid.UUID) { + for id := range m.removedlabels { + ids = append(ids, id) + } + return +} + +// LabelsIDs returns the "labels" edge IDs in the mutation. +func (m *GroupMutation) LabelsIDs() (ids []uuid.UUID) { + for id := range m.labels { + ids = append(ids, id) + } + return +} + +// ResetLabels resets all changes to the "labels" edge. +func (m *GroupMutation) ResetLabels() { + m.labels = nil + m.clearedlabels = false + m.removedlabels = nil +} + +// Where appends a list predicates to the GroupMutation builder. +func (m *GroupMutation) Where(ps ...predicate.Group) { + m.predicates = append(m.predicates, ps...) +} + +// Op returns the operation name. +func (m *GroupMutation) Op() Op { + return m.op +} + +// Type returns the node type of this mutation (Group). +func (m *GroupMutation) Type() string { + return m.typ +} + +// Fields returns all fields that were changed during this mutation. Note that in +// order to get all numeric fields that were incremented/decremented, call +// AddedFields(). +func (m *GroupMutation) Fields() []string { + fields := make([]string, 0, 4) + if m.created_at != nil { + fields = append(fields, group.FieldCreatedAt) + } + if m.updated_at != nil { + fields = append(fields, group.FieldUpdatedAt) + } + if m.name != nil { + fields = append(fields, group.FieldName) + } + if m.currency != nil { + fields = append(fields, group.FieldCurrency) + } + return fields +} + +// Field returns the value of a field with the given name. The second boolean +// return value indicates that this field was not set, or was not defined in the +// schema. +func (m *GroupMutation) Field(name string) (ent.Value, bool) { + switch name { + case group.FieldCreatedAt: + return m.CreatedAt() + case group.FieldUpdatedAt: + return m.UpdatedAt() + case group.FieldName: + return m.Name() + case group.FieldCurrency: + return m.Currency() + } + return nil, false +} + +// OldField returns the old value of the field from the database. An error is +// returned if the mutation operation is not UpdateOne, or the query to the +// database failed. +func (m *GroupMutation) OldField(ctx context.Context, name string) (ent.Value, error) { + switch name { + case group.FieldCreatedAt: + return m.OldCreatedAt(ctx) + case group.FieldUpdatedAt: + return m.OldUpdatedAt(ctx) + case group.FieldName: + return m.OldName(ctx) + case group.FieldCurrency: + return m.OldCurrency(ctx) + } + return nil, fmt.Errorf("unknown Group field %s", name) +} + +// SetField sets the value of a field with the given name. It returns an error if +// the field is not defined in the schema, or if the type mismatched the field +// type. +func (m *GroupMutation) SetField(name string, value ent.Value) error { + switch name { + case group.FieldCreatedAt: + v, ok := value.(time.Time) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetCreatedAt(v) + return nil + case group.FieldUpdatedAt: + v, ok := value.(time.Time) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetUpdatedAt(v) + return nil + case group.FieldName: + v, ok := value.(string) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetName(v) + return nil + case group.FieldCurrency: + v, ok := value.(group.Currency) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetCurrency(v) + return nil + } + return fmt.Errorf("unknown Group field %s", name) +} + +// AddedFields returns all numeric fields that were incremented/decremented during +// this mutation. +func (m *GroupMutation) AddedFields() []string { + return nil +} + +// AddedField returns the numeric value that was incremented/decremented on a field +// with the given name. The second boolean return value indicates that this field +// was not set, or was not defined in the schema. +func (m *GroupMutation) AddedField(name string) (ent.Value, bool) { + return nil, false +} + +// AddField adds the value to the field with the given name. It returns an error if +// the field is not defined in the schema, or if the type mismatched the field +// type. +func (m *GroupMutation) AddField(name string, value ent.Value) error { + switch name { + } + return fmt.Errorf("unknown Group numeric field %s", name) +} + +// ClearedFields returns all nullable fields that were cleared during this +// mutation. +func (m *GroupMutation) ClearedFields() []string { + return nil +} + +// FieldCleared returns a boolean indicating if a field with the given name was +// cleared in this mutation. +func (m *GroupMutation) FieldCleared(name string) bool { + _, ok := m.clearedFields[name] + return ok +} + +// ClearField clears the value of the field with the given name. It returns an +// error if the field is not defined in the schema. +func (m *GroupMutation) ClearField(name string) error { + return fmt.Errorf("unknown Group nullable field %s", name) +} + +// ResetField resets all changes in the mutation for the field with the given name. +// It returns an error if the field is not defined in the schema. +func (m *GroupMutation) ResetField(name string) error { + switch name { + case group.FieldCreatedAt: + m.ResetCreatedAt() + return nil + case group.FieldUpdatedAt: + m.ResetUpdatedAt() + return nil + case group.FieldName: + m.ResetName() + return nil + case group.FieldCurrency: + m.ResetCurrency() + return nil + } + return fmt.Errorf("unknown Group field %s", name) +} + +// AddedEdges returns all edge names that were set/added in this mutation. +func (m *GroupMutation) AddedEdges() []string { + edges := make([]string, 0, 4) + if m.users != nil { + edges = append(edges, group.EdgeUsers) + } + if m.locations != nil { + edges = append(edges, group.EdgeLocations) + } + if m.items != nil { + edges = append(edges, group.EdgeItems) + } + if m.labels != nil { + edges = append(edges, group.EdgeLabels) + } + return edges +} + +// AddedIDs returns all IDs (to other nodes) that were added for the given edge +// name in this mutation. +func (m *GroupMutation) AddedIDs(name string) []ent.Value { + switch name { + case group.EdgeUsers: + ids := make([]ent.Value, 0, len(m.users)) + for id := range m.users { + ids = append(ids, id) + } + return ids + case group.EdgeLocations: + ids := make([]ent.Value, 0, len(m.locations)) + for id := range m.locations { + ids = append(ids, id) + } + return ids + case group.EdgeItems: + ids := make([]ent.Value, 0, len(m.items)) + for id := range m.items { + ids = append(ids, id) + } + return ids + case group.EdgeLabels: + ids := make([]ent.Value, 0, len(m.labels)) + for id := range m.labels { + ids = append(ids, id) + } + return ids + } + return nil +} + +// RemovedEdges returns all edge names that were removed in this mutation. +func (m *GroupMutation) RemovedEdges() []string { + edges := make([]string, 0, 4) + if m.removedusers != nil { + edges = append(edges, group.EdgeUsers) + } + if m.removedlocations != nil { + edges = append(edges, group.EdgeLocations) + } + if m.removeditems != nil { + edges = append(edges, group.EdgeItems) + } + if m.removedlabels != nil { + edges = append(edges, group.EdgeLabels) + } + return edges +} + +// RemovedIDs returns all IDs (to other nodes) that were removed for the edge with +// the given name in this mutation. +func (m *GroupMutation) RemovedIDs(name string) []ent.Value { + switch name { + case group.EdgeUsers: + ids := make([]ent.Value, 0, len(m.removedusers)) + for id := range m.removedusers { + ids = append(ids, id) + } + return ids + case group.EdgeLocations: + ids := make([]ent.Value, 0, len(m.removedlocations)) + for id := range m.removedlocations { + ids = append(ids, id) + } + return ids + case group.EdgeItems: + ids := make([]ent.Value, 0, len(m.removeditems)) + for id := range m.removeditems { + ids = append(ids, id) + } + return ids + case group.EdgeLabels: + ids := make([]ent.Value, 0, len(m.removedlabels)) + for id := range m.removedlabels { + ids = append(ids, id) + } + return ids + } + return nil +} + +// ClearedEdges returns all edge names that were cleared in this mutation. +func (m *GroupMutation) ClearedEdges() []string { + edges := make([]string, 0, 4) + if m.clearedusers { + edges = append(edges, group.EdgeUsers) + } + if m.clearedlocations { + edges = append(edges, group.EdgeLocations) + } + if m.cleareditems { + edges = append(edges, group.EdgeItems) + } + if m.clearedlabels { + edges = append(edges, group.EdgeLabels) + } + return edges +} + +// EdgeCleared returns a boolean which indicates if the edge with the given name +// was cleared in this mutation. +func (m *GroupMutation) EdgeCleared(name string) bool { + switch name { + case group.EdgeUsers: + return m.clearedusers + case group.EdgeLocations: + return m.clearedlocations + case group.EdgeItems: + return m.cleareditems + case group.EdgeLabels: + return m.clearedlabels + } + return false +} + +// ClearEdge clears the value of the edge with the given name. It returns an error +// if that edge is not defined in the schema. +func (m *GroupMutation) ClearEdge(name string) error { + switch name { + } + return fmt.Errorf("unknown Group unique edge %s", name) +} + +// ResetEdge resets all changes to the edge with the given name in this mutation. +// It returns an error if the edge is not defined in the schema. +func (m *GroupMutation) ResetEdge(name string) error { + switch name { + case group.EdgeUsers: + m.ResetUsers() + return nil + case group.EdgeLocations: + m.ResetLocations() + return nil + case group.EdgeItems: + m.ResetItems() + return nil + case group.EdgeLabels: + m.ResetLabels() + return nil + } + return fmt.Errorf("unknown Group edge %s", name) +} + +// ItemMutation represents an operation that mutates the Item nodes in the graph. +type ItemMutation struct { + config + op Op + typ string + id *uuid.UUID + created_at *time.Time + updated_at *time.Time + name *string + description *string + notes *string + serial_number *string + model_number *string + manufacturer *string + purchase_time *time.Time + purchase_from *string + purchase_price *float64 + addpurchase_price *float64 + purchase_receipt_id *uuid.UUID + sold_time *time.Time + sold_to *string + sold_price *float64 + addsold_price *float64 + sold_receipt_id *uuid.UUID + sold_notes *string + clearedFields map[string]struct{} + group *uuid.UUID + clearedgroup bool + location *uuid.UUID + clearedlocation bool + fields map[uuid.UUID]struct{} + removedfields map[uuid.UUID]struct{} + clearedfields bool + label map[uuid.UUID]struct{} + removedlabel map[uuid.UUID]struct{} + clearedlabel bool + done bool + oldValue func(context.Context) (*Item, error) + predicates []predicate.Item +} + +var _ ent.Mutation = (*ItemMutation)(nil) + +// itemOption allows management of the mutation configuration using functional options. +type itemOption func(*ItemMutation) + +// newItemMutation creates new mutation for the Item entity. +func newItemMutation(c config, op Op, opts ...itemOption) *ItemMutation { + m := &ItemMutation{ + config: c, + op: op, + typ: TypeItem, + clearedFields: make(map[string]struct{}), + } + for _, opt := range opts { + opt(m) + } + return m +} + +// withItemID sets the ID field of the mutation. +func withItemID(id uuid.UUID) itemOption { + return func(m *ItemMutation) { + var ( + err error + once sync.Once + value *Item + ) + m.oldValue = func(ctx context.Context) (*Item, error) { + once.Do(func() { + if m.done { + err = errors.New("querying old values post mutation is not allowed") + } else { + value, err = m.Client().Item.Get(ctx, id) + } + }) + return value, err + } + m.id = &id + } +} + +// withItem sets the old Item of the mutation. +func withItem(node *Item) itemOption { + return func(m *ItemMutation) { + m.oldValue = func(context.Context) (*Item, error) { + return node, nil + } + m.id = &node.ID + } +} + +// Client returns a new `ent.Client` from the mutation. If the mutation was +// executed in a transaction (ent.Tx), a transactional client is returned. +func (m ItemMutation) Client() *Client { + client := &Client{config: m.config} + client.init() + return client +} + +// Tx returns an `ent.Tx` for mutations that were executed in transactions; +// it returns an error otherwise. +func (m ItemMutation) Tx() (*Tx, error) { + if _, ok := m.driver.(*txDriver); !ok { + return nil, errors.New("ent: mutation is not running in a transaction") + } + tx := &Tx{config: m.config} + tx.init() + return tx, nil +} + +// SetID sets the value of the id field. Note that this +// operation is only accepted on creation of Item entities. +func (m *ItemMutation) SetID(id uuid.UUID) { + m.id = &id +} + +// ID returns the ID value in the mutation. Note that the ID is only available +// if it was provided to the builder or after it was returned from the database. +func (m *ItemMutation) ID() (id uuid.UUID, exists bool) { + if m.id == nil { + return + } + return *m.id, true +} + +// IDs queries the database and returns the entity ids that match the mutation's predicate. +// That means, if the mutation is applied within a transaction with an isolation level such +// as sql.LevelSerializable, the returned ids match the ids of the rows that will be updated +// or updated by the mutation. +func (m *ItemMutation) IDs(ctx context.Context) ([]uuid.UUID, error) { + switch { + case m.op.Is(OpUpdateOne | OpDeleteOne): + id, exists := m.ID() + if exists { + return []uuid.UUID{id}, nil + } + fallthrough + case m.op.Is(OpUpdate | OpDelete): + return m.Client().Item.Query().Where(m.predicates...).IDs(ctx) + default: + return nil, fmt.Errorf("IDs is not allowed on %s operations", m.op) + } +} + +// SetCreatedAt sets the "created_at" field. +func (m *ItemMutation) SetCreatedAt(t time.Time) { + m.created_at = &t +} + +// CreatedAt returns the value of the "created_at" field in the mutation. +func (m *ItemMutation) CreatedAt() (r time.Time, exists bool) { + v := m.created_at + if v == nil { + return + } + return *v, true +} + +// OldCreatedAt returns the old "created_at" field's value of the Item entity. +// If the Item object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *ItemMutation) OldCreatedAt(ctx context.Context) (v time.Time, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldCreatedAt is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldCreatedAt requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldCreatedAt: %w", err) + } + return oldValue.CreatedAt, nil +} + +// ResetCreatedAt resets all changes to the "created_at" field. +func (m *ItemMutation) ResetCreatedAt() { + m.created_at = nil +} + +// SetUpdatedAt sets the "updated_at" field. +func (m *ItemMutation) SetUpdatedAt(t time.Time) { + m.updated_at = &t +} + +// UpdatedAt returns the value of the "updated_at" field in the mutation. +func (m *ItemMutation) UpdatedAt() (r time.Time, exists bool) { + v := m.updated_at + if v == nil { + return + } + return *v, true +} + +// OldUpdatedAt returns the old "updated_at" field's value of the Item entity. +// If the Item object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *ItemMutation) OldUpdatedAt(ctx context.Context) (v time.Time, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldUpdatedAt is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldUpdatedAt requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldUpdatedAt: %w", err) + } + return oldValue.UpdatedAt, nil +} + +// ResetUpdatedAt resets all changes to the "updated_at" field. +func (m *ItemMutation) ResetUpdatedAt() { + m.updated_at = nil +} + +// SetName sets the "name" field. +func (m *ItemMutation) SetName(s string) { + m.name = &s +} + +// Name returns the value of the "name" field in the mutation. +func (m *ItemMutation) Name() (r string, exists bool) { + v := m.name + if v == nil { + return + } + return *v, true +} + +// OldName returns the old "name" field's value of the Item entity. +// If the Item object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *ItemMutation) OldName(ctx context.Context) (v string, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldName is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldName requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldName: %w", err) + } + return oldValue.Name, nil +} + +// ResetName resets all changes to the "name" field. +func (m *ItemMutation) ResetName() { + m.name = nil +} + +// SetDescription sets the "description" field. +func (m *ItemMutation) SetDescription(s string) { + m.description = &s +} + +// Description returns the value of the "description" field in the mutation. +func (m *ItemMutation) Description() (r string, exists bool) { + v := m.description + if v == nil { + return + } + return *v, true +} + +// OldDescription returns the old "description" field's value of the Item entity. +// If the Item object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *ItemMutation) OldDescription(ctx context.Context) (v string, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldDescription is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldDescription requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldDescription: %w", err) + } + return oldValue.Description, nil +} + +// ClearDescription clears the value of the "description" field. +func (m *ItemMutation) ClearDescription() { + m.description = nil + m.clearedFields[item.FieldDescription] = struct{}{} +} + +// DescriptionCleared returns if the "description" field was cleared in this mutation. +func (m *ItemMutation) DescriptionCleared() bool { + _, ok := m.clearedFields[item.FieldDescription] + return ok +} + +// ResetDescription resets all changes to the "description" field. +func (m *ItemMutation) ResetDescription() { + m.description = nil + delete(m.clearedFields, item.FieldDescription) +} + +// SetNotes sets the "notes" field. +func (m *ItemMutation) SetNotes(s string) { + m.notes = &s +} + +// Notes returns the value of the "notes" field in the mutation. +func (m *ItemMutation) Notes() (r string, exists bool) { + v := m.notes + if v == nil { + return + } + return *v, true +} + +// OldNotes returns the old "notes" field's value of the Item entity. +// If the Item object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *ItemMutation) OldNotes(ctx context.Context) (v string, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldNotes is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldNotes requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldNotes: %w", err) + } + return oldValue.Notes, nil +} + +// ClearNotes clears the value of the "notes" field. +func (m *ItemMutation) ClearNotes() { + m.notes = nil + m.clearedFields[item.FieldNotes] = struct{}{} +} + +// NotesCleared returns if the "notes" field was cleared in this mutation. +func (m *ItemMutation) NotesCleared() bool { + _, ok := m.clearedFields[item.FieldNotes] + return ok +} + +// ResetNotes resets all changes to the "notes" field. +func (m *ItemMutation) ResetNotes() { + m.notes = nil + delete(m.clearedFields, item.FieldNotes) +} + +// SetSerialNumber sets the "serial_number" field. +func (m *ItemMutation) SetSerialNumber(s string) { + m.serial_number = &s +} + +// SerialNumber returns the value of the "serial_number" field in the mutation. +func (m *ItemMutation) SerialNumber() (r string, exists bool) { + v := m.serial_number + if v == nil { + return + } + return *v, true +} + +// OldSerialNumber returns the old "serial_number" field's value of the Item entity. +// If the Item object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *ItemMutation) OldSerialNumber(ctx context.Context) (v string, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldSerialNumber is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldSerialNumber requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldSerialNumber: %w", err) + } + return oldValue.SerialNumber, nil +} + +// ClearSerialNumber clears the value of the "serial_number" field. +func (m *ItemMutation) ClearSerialNumber() { + m.serial_number = nil + m.clearedFields[item.FieldSerialNumber] = struct{}{} +} + +// SerialNumberCleared returns if the "serial_number" field was cleared in this mutation. +func (m *ItemMutation) SerialNumberCleared() bool { + _, ok := m.clearedFields[item.FieldSerialNumber] + return ok +} + +// ResetSerialNumber resets all changes to the "serial_number" field. +func (m *ItemMutation) ResetSerialNumber() { + m.serial_number = nil + delete(m.clearedFields, item.FieldSerialNumber) +} + +// SetModelNumber sets the "model_number" field. +func (m *ItemMutation) SetModelNumber(s string) { + m.model_number = &s +} + +// ModelNumber returns the value of the "model_number" field in the mutation. +func (m *ItemMutation) ModelNumber() (r string, exists bool) { + v := m.model_number + if v == nil { + return + } + return *v, true +} + +// OldModelNumber returns the old "model_number" field's value of the Item entity. +// If the Item object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *ItemMutation) OldModelNumber(ctx context.Context) (v string, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldModelNumber is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldModelNumber requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldModelNumber: %w", err) + } + return oldValue.ModelNumber, nil +} + +// ClearModelNumber clears the value of the "model_number" field. +func (m *ItemMutation) ClearModelNumber() { + m.model_number = nil + m.clearedFields[item.FieldModelNumber] = struct{}{} +} + +// ModelNumberCleared returns if the "model_number" field was cleared in this mutation. +func (m *ItemMutation) ModelNumberCleared() bool { + _, ok := m.clearedFields[item.FieldModelNumber] + return ok +} + +// ResetModelNumber resets all changes to the "model_number" field. +func (m *ItemMutation) ResetModelNumber() { + m.model_number = nil + delete(m.clearedFields, item.FieldModelNumber) +} + +// SetManufacturer sets the "manufacturer" field. +func (m *ItemMutation) SetManufacturer(s string) { + m.manufacturer = &s +} + +// Manufacturer returns the value of the "manufacturer" field in the mutation. +func (m *ItemMutation) Manufacturer() (r string, exists bool) { + v := m.manufacturer + if v == nil { + return + } + return *v, true +} + +// OldManufacturer returns the old "manufacturer" field's value of the Item entity. +// If the Item object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *ItemMutation) OldManufacturer(ctx context.Context) (v string, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldManufacturer is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldManufacturer requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldManufacturer: %w", err) + } + return oldValue.Manufacturer, nil +} + +// ClearManufacturer clears the value of the "manufacturer" field. +func (m *ItemMutation) ClearManufacturer() { + m.manufacturer = nil + m.clearedFields[item.FieldManufacturer] = struct{}{} +} + +// ManufacturerCleared returns if the "manufacturer" field was cleared in this mutation. +func (m *ItemMutation) ManufacturerCleared() bool { + _, ok := m.clearedFields[item.FieldManufacturer] + return ok +} + +// ResetManufacturer resets all changes to the "manufacturer" field. +func (m *ItemMutation) ResetManufacturer() { + m.manufacturer = nil + delete(m.clearedFields, item.FieldManufacturer) +} + +// SetPurchaseTime sets the "purchase_time" field. +func (m *ItemMutation) SetPurchaseTime(t time.Time) { + m.purchase_time = &t +} + +// PurchaseTime returns the value of the "purchase_time" field in the mutation. +func (m *ItemMutation) PurchaseTime() (r time.Time, exists bool) { + v := m.purchase_time + if v == nil { + return + } + return *v, true +} + +// OldPurchaseTime returns the old "purchase_time" field's value of the Item entity. +// If the Item object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *ItemMutation) OldPurchaseTime(ctx context.Context) (v time.Time, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldPurchaseTime is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldPurchaseTime requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldPurchaseTime: %w", err) + } + return oldValue.PurchaseTime, nil +} + +// ClearPurchaseTime clears the value of the "purchase_time" field. +func (m *ItemMutation) ClearPurchaseTime() { + m.purchase_time = nil + m.clearedFields[item.FieldPurchaseTime] = struct{}{} +} + +// PurchaseTimeCleared returns if the "purchase_time" field was cleared in this mutation. +func (m *ItemMutation) PurchaseTimeCleared() bool { + _, ok := m.clearedFields[item.FieldPurchaseTime] + return ok +} + +// ResetPurchaseTime resets all changes to the "purchase_time" field. +func (m *ItemMutation) ResetPurchaseTime() { + m.purchase_time = nil + delete(m.clearedFields, item.FieldPurchaseTime) +} + +// SetPurchaseFrom sets the "purchase_from" field. +func (m *ItemMutation) SetPurchaseFrom(s string) { + m.purchase_from = &s +} + +// PurchaseFrom returns the value of the "purchase_from" field in the mutation. +func (m *ItemMutation) PurchaseFrom() (r string, exists bool) { + v := m.purchase_from + if v == nil { + return + } + return *v, true +} + +// OldPurchaseFrom returns the old "purchase_from" field's value of the Item entity. +// If the Item object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *ItemMutation) OldPurchaseFrom(ctx context.Context) (v string, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldPurchaseFrom is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldPurchaseFrom requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldPurchaseFrom: %w", err) + } + return oldValue.PurchaseFrom, nil +} + +// ClearPurchaseFrom clears the value of the "purchase_from" field. +func (m *ItemMutation) ClearPurchaseFrom() { + m.purchase_from = nil + m.clearedFields[item.FieldPurchaseFrom] = struct{}{} +} + +// PurchaseFromCleared returns if the "purchase_from" field was cleared in this mutation. +func (m *ItemMutation) PurchaseFromCleared() bool { + _, ok := m.clearedFields[item.FieldPurchaseFrom] + return ok +} + +// ResetPurchaseFrom resets all changes to the "purchase_from" field. +func (m *ItemMutation) ResetPurchaseFrom() { + m.purchase_from = nil + delete(m.clearedFields, item.FieldPurchaseFrom) +} + +// SetPurchasePrice sets the "purchase_price" field. +func (m *ItemMutation) SetPurchasePrice(f float64) { + m.purchase_price = &f + m.addpurchase_price = nil +} + +// PurchasePrice returns the value of the "purchase_price" field in the mutation. +func (m *ItemMutation) PurchasePrice() (r float64, exists bool) { + v := m.purchase_price + if v == nil { + return + } + return *v, true +} + +// OldPurchasePrice returns the old "purchase_price" field's value of the Item entity. +// If the Item object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *ItemMutation) OldPurchasePrice(ctx context.Context) (v float64, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldPurchasePrice is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldPurchasePrice requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldPurchasePrice: %w", err) + } + return oldValue.PurchasePrice, nil +} + +// AddPurchasePrice adds f to the "purchase_price" field. +func (m *ItemMutation) AddPurchasePrice(f float64) { + if m.addpurchase_price != nil { + *m.addpurchase_price += f + } else { + m.addpurchase_price = &f + } +} + +// AddedPurchasePrice returns the value that was added to the "purchase_price" field in this mutation. +func (m *ItemMutation) AddedPurchasePrice() (r float64, exists bool) { + v := m.addpurchase_price + if v == nil { + return + } + return *v, true +} + +// ResetPurchasePrice resets all changes to the "purchase_price" field. +func (m *ItemMutation) ResetPurchasePrice() { + m.purchase_price = nil + m.addpurchase_price = nil +} + +// SetPurchaseReceiptID sets the "purchase_receipt_id" field. +func (m *ItemMutation) SetPurchaseReceiptID(u uuid.UUID) { + m.purchase_receipt_id = &u +} + +// PurchaseReceiptID returns the value of the "purchase_receipt_id" field in the mutation. +func (m *ItemMutation) PurchaseReceiptID() (r uuid.UUID, exists bool) { + v := m.purchase_receipt_id + if v == nil { + return + } + return *v, true +} + +// OldPurchaseReceiptID returns the old "purchase_receipt_id" field's value of the Item entity. +// If the Item object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *ItemMutation) OldPurchaseReceiptID(ctx context.Context) (v uuid.UUID, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldPurchaseReceiptID is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldPurchaseReceiptID requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldPurchaseReceiptID: %w", err) + } + return oldValue.PurchaseReceiptID, nil +} + +// ClearPurchaseReceiptID clears the value of the "purchase_receipt_id" field. +func (m *ItemMutation) ClearPurchaseReceiptID() { + m.purchase_receipt_id = nil + m.clearedFields[item.FieldPurchaseReceiptID] = struct{}{} +} + +// PurchaseReceiptIDCleared returns if the "purchase_receipt_id" field was cleared in this mutation. +func (m *ItemMutation) PurchaseReceiptIDCleared() bool { + _, ok := m.clearedFields[item.FieldPurchaseReceiptID] + return ok +} + +// ResetPurchaseReceiptID resets all changes to the "purchase_receipt_id" field. +func (m *ItemMutation) ResetPurchaseReceiptID() { + m.purchase_receipt_id = nil + delete(m.clearedFields, item.FieldPurchaseReceiptID) +} + +// SetSoldTime sets the "sold_time" field. +func (m *ItemMutation) SetSoldTime(t time.Time) { + m.sold_time = &t +} + +// SoldTime returns the value of the "sold_time" field in the mutation. +func (m *ItemMutation) SoldTime() (r time.Time, exists bool) { + v := m.sold_time + if v == nil { + return + } + return *v, true +} + +// OldSoldTime returns the old "sold_time" field's value of the Item entity. +// If the Item object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *ItemMutation) OldSoldTime(ctx context.Context) (v time.Time, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldSoldTime is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldSoldTime requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldSoldTime: %w", err) + } + return oldValue.SoldTime, nil +} + +// ClearSoldTime clears the value of the "sold_time" field. +func (m *ItemMutation) ClearSoldTime() { + m.sold_time = nil + m.clearedFields[item.FieldSoldTime] = struct{}{} +} + +// SoldTimeCleared returns if the "sold_time" field was cleared in this mutation. +func (m *ItemMutation) SoldTimeCleared() bool { + _, ok := m.clearedFields[item.FieldSoldTime] + return ok +} + +// ResetSoldTime resets all changes to the "sold_time" field. +func (m *ItemMutation) ResetSoldTime() { + m.sold_time = nil + delete(m.clearedFields, item.FieldSoldTime) +} + +// SetSoldTo sets the "sold_to" field. +func (m *ItemMutation) SetSoldTo(s string) { + m.sold_to = &s +} + +// SoldTo returns the value of the "sold_to" field in the mutation. +func (m *ItemMutation) SoldTo() (r string, exists bool) { + v := m.sold_to + if v == nil { + return + } + return *v, true +} + +// OldSoldTo returns the old "sold_to" field's value of the Item entity. +// If the Item object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *ItemMutation) OldSoldTo(ctx context.Context) (v string, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldSoldTo is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldSoldTo requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldSoldTo: %w", err) + } + return oldValue.SoldTo, nil +} + +// ClearSoldTo clears the value of the "sold_to" field. +func (m *ItemMutation) ClearSoldTo() { + m.sold_to = nil + m.clearedFields[item.FieldSoldTo] = struct{}{} +} + +// SoldToCleared returns if the "sold_to" field was cleared in this mutation. +func (m *ItemMutation) SoldToCleared() bool { + _, ok := m.clearedFields[item.FieldSoldTo] + return ok +} + +// ResetSoldTo resets all changes to the "sold_to" field. +func (m *ItemMutation) ResetSoldTo() { + m.sold_to = nil + delete(m.clearedFields, item.FieldSoldTo) +} + +// SetSoldPrice sets the "sold_price" field. +func (m *ItemMutation) SetSoldPrice(f float64) { + m.sold_price = &f + m.addsold_price = nil +} + +// SoldPrice returns the value of the "sold_price" field in the mutation. +func (m *ItemMutation) SoldPrice() (r float64, exists bool) { + v := m.sold_price + if v == nil { + return + } + return *v, true +} + +// OldSoldPrice returns the old "sold_price" field's value of the Item entity. +// If the Item object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *ItemMutation) OldSoldPrice(ctx context.Context) (v float64, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldSoldPrice is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldSoldPrice requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldSoldPrice: %w", err) + } + return oldValue.SoldPrice, nil +} + +// AddSoldPrice adds f to the "sold_price" field. +func (m *ItemMutation) AddSoldPrice(f float64) { + if m.addsold_price != nil { + *m.addsold_price += f + } else { + m.addsold_price = &f + } +} + +// AddedSoldPrice returns the value that was added to the "sold_price" field in this mutation. +func (m *ItemMutation) AddedSoldPrice() (r float64, exists bool) { + v := m.addsold_price + if v == nil { + return + } + return *v, true +} + +// ResetSoldPrice resets all changes to the "sold_price" field. +func (m *ItemMutation) ResetSoldPrice() { + m.sold_price = nil + m.addsold_price = nil +} + +// SetSoldReceiptID sets the "sold_receipt_id" field. +func (m *ItemMutation) SetSoldReceiptID(u uuid.UUID) { + m.sold_receipt_id = &u +} + +// SoldReceiptID returns the value of the "sold_receipt_id" field in the mutation. +func (m *ItemMutation) SoldReceiptID() (r uuid.UUID, exists bool) { + v := m.sold_receipt_id + if v == nil { + return + } + return *v, true +} + +// OldSoldReceiptID returns the old "sold_receipt_id" field's value of the Item entity. +// If the Item object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *ItemMutation) OldSoldReceiptID(ctx context.Context) (v uuid.UUID, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldSoldReceiptID is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldSoldReceiptID requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldSoldReceiptID: %w", err) + } + return oldValue.SoldReceiptID, nil +} + +// ClearSoldReceiptID clears the value of the "sold_receipt_id" field. +func (m *ItemMutation) ClearSoldReceiptID() { + m.sold_receipt_id = nil + m.clearedFields[item.FieldSoldReceiptID] = struct{}{} +} + +// SoldReceiptIDCleared returns if the "sold_receipt_id" field was cleared in this mutation. +func (m *ItemMutation) SoldReceiptIDCleared() bool { + _, ok := m.clearedFields[item.FieldSoldReceiptID] + return ok +} + +// ResetSoldReceiptID resets all changes to the "sold_receipt_id" field. +func (m *ItemMutation) ResetSoldReceiptID() { + m.sold_receipt_id = nil + delete(m.clearedFields, item.FieldSoldReceiptID) +} + +// SetSoldNotes sets the "sold_notes" field. +func (m *ItemMutation) SetSoldNotes(s string) { + m.sold_notes = &s +} + +// SoldNotes returns the value of the "sold_notes" field in the mutation. +func (m *ItemMutation) SoldNotes() (r string, exists bool) { + v := m.sold_notes + if v == nil { + return + } + return *v, true +} + +// OldSoldNotes returns the old "sold_notes" field's value of the Item entity. +// If the Item object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *ItemMutation) OldSoldNotes(ctx context.Context) (v string, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldSoldNotes is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldSoldNotes requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldSoldNotes: %w", err) + } + return oldValue.SoldNotes, nil +} + +// ClearSoldNotes clears the value of the "sold_notes" field. +func (m *ItemMutation) ClearSoldNotes() { + m.sold_notes = nil + m.clearedFields[item.FieldSoldNotes] = struct{}{} +} + +// SoldNotesCleared returns if the "sold_notes" field was cleared in this mutation. +func (m *ItemMutation) SoldNotesCleared() bool { + _, ok := m.clearedFields[item.FieldSoldNotes] + return ok +} + +// ResetSoldNotes resets all changes to the "sold_notes" field. +func (m *ItemMutation) ResetSoldNotes() { + m.sold_notes = nil + delete(m.clearedFields, item.FieldSoldNotes) +} + +// SetGroupID sets the "group" edge to the Group entity by id. +func (m *ItemMutation) SetGroupID(id uuid.UUID) { + m.group = &id +} + +// ClearGroup clears the "group" edge to the Group entity. +func (m *ItemMutation) ClearGroup() { + m.clearedgroup = true +} + +// GroupCleared reports if the "group" edge to the Group entity was cleared. +func (m *ItemMutation) GroupCleared() bool { + return m.clearedgroup +} + +// GroupID returns the "group" edge ID in the mutation. +func (m *ItemMutation) GroupID() (id uuid.UUID, exists bool) { + if m.group != nil { + return *m.group, true + } + return +} + +// GroupIDs returns the "group" edge IDs in the mutation. +// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use +// GroupID instead. It exists only for internal usage by the builders. +func (m *ItemMutation) GroupIDs() (ids []uuid.UUID) { + if id := m.group; id != nil { + ids = append(ids, *id) + } + return +} + +// ResetGroup resets all changes to the "group" edge. +func (m *ItemMutation) ResetGroup() { + m.group = nil + m.clearedgroup = false +} + +// SetLocationID sets the "location" edge to the Location entity by id. +func (m *ItemMutation) SetLocationID(id uuid.UUID) { + m.location = &id +} + +// ClearLocation clears the "location" edge to the Location entity. +func (m *ItemMutation) ClearLocation() { + m.clearedlocation = true +} + +// LocationCleared reports if the "location" edge to the Location entity was cleared. +func (m *ItemMutation) LocationCleared() bool { + return m.clearedlocation +} + +// LocationID returns the "location" edge ID in the mutation. +func (m *ItemMutation) LocationID() (id uuid.UUID, exists bool) { + if m.location != nil { + return *m.location, true + } + return +} + +// LocationIDs returns the "location" edge IDs in the mutation. +// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use +// LocationID instead. It exists only for internal usage by the builders. +func (m *ItemMutation) LocationIDs() (ids []uuid.UUID) { + if id := m.location; id != nil { + ids = append(ids, *id) + } + return +} + +// ResetLocation resets all changes to the "location" edge. +func (m *ItemMutation) ResetLocation() { + m.location = nil + m.clearedlocation = false +} + +// AddFieldIDs adds the "fields" edge to the ItemField entity by ids. +func (m *ItemMutation) AddFieldIDs(ids ...uuid.UUID) { + if m.fields == nil { + m.fields = make(map[uuid.UUID]struct{}) + } + for i := range ids { + m.fields[ids[i]] = struct{}{} + } +} + +// ClearFields clears the "fields" edge to the ItemField entity. +func (m *ItemMutation) ClearFields() { + m.clearedfields = true +} + +// FieldsCleared reports if the "fields" edge to the ItemField entity was cleared. +func (m *ItemMutation) FieldsCleared() bool { + return m.clearedfields +} + +// RemoveFieldIDs removes the "fields" edge to the ItemField entity by IDs. +func (m *ItemMutation) RemoveFieldIDs(ids ...uuid.UUID) { + if m.removedfields == nil { + m.removedfields = make(map[uuid.UUID]struct{}) + } + for i := range ids { + delete(m.fields, ids[i]) + m.removedfields[ids[i]] = struct{}{} + } +} + +// RemovedFields returns the removed IDs of the "fields" edge to the ItemField entity. +func (m *ItemMutation) RemovedFieldsIDs() (ids []uuid.UUID) { + for id := range m.removedfields { + ids = append(ids, id) + } + return +} + +// FieldsIDs returns the "fields" edge IDs in the mutation. +func (m *ItemMutation) FieldsIDs() (ids []uuid.UUID) { + for id := range m.fields { + ids = append(ids, id) + } + return +} + +// ResetFields resets all changes to the "fields" edge. +func (m *ItemMutation) ResetFields() { + m.fields = nil + m.clearedfields = false + m.removedfields = nil +} + +// AddLabelIDs adds the "label" edge to the Label entity by ids. +func (m *ItemMutation) AddLabelIDs(ids ...uuid.UUID) { + if m.label == nil { + m.label = make(map[uuid.UUID]struct{}) + } + for i := range ids { + m.label[ids[i]] = struct{}{} + } +} + +// ClearLabel clears the "label" edge to the Label entity. +func (m *ItemMutation) ClearLabel() { + m.clearedlabel = true +} + +// LabelCleared reports if the "label" edge to the Label entity was cleared. +func (m *ItemMutation) LabelCleared() bool { + return m.clearedlabel +} + +// RemoveLabelIDs removes the "label" edge to the Label entity by IDs. +func (m *ItemMutation) RemoveLabelIDs(ids ...uuid.UUID) { + if m.removedlabel == nil { + m.removedlabel = make(map[uuid.UUID]struct{}) + } + for i := range ids { + delete(m.label, ids[i]) + m.removedlabel[ids[i]] = struct{}{} + } +} + +// RemovedLabel returns the removed IDs of the "label" edge to the Label entity. +func (m *ItemMutation) RemovedLabelIDs() (ids []uuid.UUID) { + for id := range m.removedlabel { + ids = append(ids, id) + } + return +} + +// LabelIDs returns the "label" edge IDs in the mutation. +func (m *ItemMutation) LabelIDs() (ids []uuid.UUID) { + for id := range m.label { + ids = append(ids, id) + } + return +} + +// ResetLabel resets all changes to the "label" edge. +func (m *ItemMutation) ResetLabel() { + m.label = nil + m.clearedlabel = false + m.removedlabel = nil +} + +// Where appends a list predicates to the ItemMutation builder. +func (m *ItemMutation) Where(ps ...predicate.Item) { + m.predicates = append(m.predicates, ps...) +} + +// Op returns the operation name. +func (m *ItemMutation) Op() Op { + return m.op +} + +// Type returns the node type of this mutation (Item). +func (m *ItemMutation) Type() string { + return m.typ +} + +// Fields returns all fields that were changed during this mutation. Note that in +// order to get all numeric fields that were incremented/decremented, call +// AddedFields(). +func (m *ItemMutation) Fields() []string { + fields := make([]string, 0, 17) + if m.created_at != nil { + fields = append(fields, item.FieldCreatedAt) + } + if m.updated_at != nil { + fields = append(fields, item.FieldUpdatedAt) + } + if m.name != nil { + fields = append(fields, item.FieldName) + } + if m.description != nil { + fields = append(fields, item.FieldDescription) + } + if m.notes != nil { + fields = append(fields, item.FieldNotes) + } + if m.serial_number != nil { + fields = append(fields, item.FieldSerialNumber) + } + if m.model_number != nil { + fields = append(fields, item.FieldModelNumber) + } + if m.manufacturer != nil { + fields = append(fields, item.FieldManufacturer) + } + if m.purchase_time != nil { + fields = append(fields, item.FieldPurchaseTime) + } + if m.purchase_from != nil { + fields = append(fields, item.FieldPurchaseFrom) + } + if m.purchase_price != nil { + fields = append(fields, item.FieldPurchasePrice) + } + if m.purchase_receipt_id != nil { + fields = append(fields, item.FieldPurchaseReceiptID) + } + if m.sold_time != nil { + fields = append(fields, item.FieldSoldTime) + } + if m.sold_to != nil { + fields = append(fields, item.FieldSoldTo) + } + if m.sold_price != nil { + fields = append(fields, item.FieldSoldPrice) + } + if m.sold_receipt_id != nil { + fields = append(fields, item.FieldSoldReceiptID) + } + if m.sold_notes != nil { + fields = append(fields, item.FieldSoldNotes) + } + return fields +} + +// Field returns the value of a field with the given name. The second boolean +// return value indicates that this field was not set, or was not defined in the +// schema. +func (m *ItemMutation) Field(name string) (ent.Value, bool) { + switch name { + case item.FieldCreatedAt: + return m.CreatedAt() + case item.FieldUpdatedAt: + return m.UpdatedAt() + case item.FieldName: + return m.Name() + case item.FieldDescription: + return m.Description() + case item.FieldNotes: + return m.Notes() + case item.FieldSerialNumber: + return m.SerialNumber() + case item.FieldModelNumber: + return m.ModelNumber() + case item.FieldManufacturer: + return m.Manufacturer() + case item.FieldPurchaseTime: + return m.PurchaseTime() + case item.FieldPurchaseFrom: + return m.PurchaseFrom() + case item.FieldPurchasePrice: + return m.PurchasePrice() + case item.FieldPurchaseReceiptID: + return m.PurchaseReceiptID() + case item.FieldSoldTime: + return m.SoldTime() + case item.FieldSoldTo: + return m.SoldTo() + case item.FieldSoldPrice: + return m.SoldPrice() + case item.FieldSoldReceiptID: + return m.SoldReceiptID() + case item.FieldSoldNotes: + return m.SoldNotes() + } + return nil, false +} + +// OldField returns the old value of the field from the database. An error is +// returned if the mutation operation is not UpdateOne, or the query to the +// database failed. +func (m *ItemMutation) OldField(ctx context.Context, name string) (ent.Value, error) { + switch name { + case item.FieldCreatedAt: + return m.OldCreatedAt(ctx) + case item.FieldUpdatedAt: + return m.OldUpdatedAt(ctx) + case item.FieldName: + return m.OldName(ctx) + case item.FieldDescription: + return m.OldDescription(ctx) + case item.FieldNotes: + return m.OldNotes(ctx) + case item.FieldSerialNumber: + return m.OldSerialNumber(ctx) + case item.FieldModelNumber: + return m.OldModelNumber(ctx) + case item.FieldManufacturer: + return m.OldManufacturer(ctx) + case item.FieldPurchaseTime: + return m.OldPurchaseTime(ctx) + case item.FieldPurchaseFrom: + return m.OldPurchaseFrom(ctx) + case item.FieldPurchasePrice: + return m.OldPurchasePrice(ctx) + case item.FieldPurchaseReceiptID: + return m.OldPurchaseReceiptID(ctx) + case item.FieldSoldTime: + return m.OldSoldTime(ctx) + case item.FieldSoldTo: + return m.OldSoldTo(ctx) + case item.FieldSoldPrice: + return m.OldSoldPrice(ctx) + case item.FieldSoldReceiptID: + return m.OldSoldReceiptID(ctx) + case item.FieldSoldNotes: + return m.OldSoldNotes(ctx) + } + return nil, fmt.Errorf("unknown Item field %s", name) +} + +// SetField sets the value of a field with the given name. It returns an error if +// the field is not defined in the schema, or if the type mismatched the field +// type. +func (m *ItemMutation) SetField(name string, value ent.Value) error { + switch name { + case item.FieldCreatedAt: + v, ok := value.(time.Time) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetCreatedAt(v) + return nil + case item.FieldUpdatedAt: + v, ok := value.(time.Time) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetUpdatedAt(v) + return nil + case item.FieldName: + v, ok := value.(string) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetName(v) + return nil + case item.FieldDescription: + v, ok := value.(string) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetDescription(v) + return nil + case item.FieldNotes: + v, ok := value.(string) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetNotes(v) + return nil + case item.FieldSerialNumber: + v, ok := value.(string) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetSerialNumber(v) + return nil + case item.FieldModelNumber: + v, ok := value.(string) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetModelNumber(v) + return nil + case item.FieldManufacturer: + v, ok := value.(string) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetManufacturer(v) + return nil + case item.FieldPurchaseTime: + v, ok := value.(time.Time) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetPurchaseTime(v) + return nil + case item.FieldPurchaseFrom: + v, ok := value.(string) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetPurchaseFrom(v) + return nil + case item.FieldPurchasePrice: + v, ok := value.(float64) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetPurchasePrice(v) + return nil + case item.FieldPurchaseReceiptID: + v, ok := value.(uuid.UUID) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetPurchaseReceiptID(v) + return nil + case item.FieldSoldTime: + v, ok := value.(time.Time) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetSoldTime(v) + return nil + case item.FieldSoldTo: + v, ok := value.(string) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetSoldTo(v) + return nil + case item.FieldSoldPrice: + v, ok := value.(float64) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetSoldPrice(v) + return nil + case item.FieldSoldReceiptID: + v, ok := value.(uuid.UUID) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetSoldReceiptID(v) + return nil + case item.FieldSoldNotes: + v, ok := value.(string) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetSoldNotes(v) + return nil + } + return fmt.Errorf("unknown Item field %s", name) +} + +// AddedFields returns all numeric fields that were incremented/decremented during +// this mutation. +func (m *ItemMutation) AddedFields() []string { + var fields []string + if m.addpurchase_price != nil { + fields = append(fields, item.FieldPurchasePrice) + } + if m.addsold_price != nil { + fields = append(fields, item.FieldSoldPrice) + } + return fields +} + +// AddedField returns the numeric value that was incremented/decremented on a field +// with the given name. The second boolean return value indicates that this field +// was not set, or was not defined in the schema. +func (m *ItemMutation) AddedField(name string) (ent.Value, bool) { + switch name { + case item.FieldPurchasePrice: + return m.AddedPurchasePrice() + case item.FieldSoldPrice: + return m.AddedSoldPrice() + } + return nil, false +} + +// AddField adds the value to the field with the given name. It returns an error if +// the field is not defined in the schema, or if the type mismatched the field +// type. +func (m *ItemMutation) AddField(name string, value ent.Value) error { + switch name { + case item.FieldPurchasePrice: + v, ok := value.(float64) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.AddPurchasePrice(v) + return nil + case item.FieldSoldPrice: + v, ok := value.(float64) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.AddSoldPrice(v) + return nil + } + return fmt.Errorf("unknown Item numeric field %s", name) +} + +// ClearedFields returns all nullable fields that were cleared during this +// mutation. +func (m *ItemMutation) ClearedFields() []string { + var fields []string + if m.FieldCleared(item.FieldDescription) { + fields = append(fields, item.FieldDescription) + } + if m.FieldCleared(item.FieldNotes) { + fields = append(fields, item.FieldNotes) + } + if m.FieldCleared(item.FieldSerialNumber) { + fields = append(fields, item.FieldSerialNumber) + } + if m.FieldCleared(item.FieldModelNumber) { + fields = append(fields, item.FieldModelNumber) + } + if m.FieldCleared(item.FieldManufacturer) { + fields = append(fields, item.FieldManufacturer) + } + if m.FieldCleared(item.FieldPurchaseTime) { + fields = append(fields, item.FieldPurchaseTime) + } + if m.FieldCleared(item.FieldPurchaseFrom) { + fields = append(fields, item.FieldPurchaseFrom) + } + if m.FieldCleared(item.FieldPurchaseReceiptID) { + fields = append(fields, item.FieldPurchaseReceiptID) + } + if m.FieldCleared(item.FieldSoldTime) { + fields = append(fields, item.FieldSoldTime) + } + if m.FieldCleared(item.FieldSoldTo) { + fields = append(fields, item.FieldSoldTo) + } + if m.FieldCleared(item.FieldSoldReceiptID) { + fields = append(fields, item.FieldSoldReceiptID) + } + if m.FieldCleared(item.FieldSoldNotes) { + fields = append(fields, item.FieldSoldNotes) + } + return fields +} + +// FieldCleared returns a boolean indicating if a field with the given name was +// cleared in this mutation. +func (m *ItemMutation) FieldCleared(name string) bool { + _, ok := m.clearedFields[name] + return ok +} + +// ClearField clears the value of the field with the given name. It returns an +// error if the field is not defined in the schema. +func (m *ItemMutation) ClearField(name string) error { + switch name { + case item.FieldDescription: + m.ClearDescription() + return nil + case item.FieldNotes: + m.ClearNotes() + return nil + case item.FieldSerialNumber: + m.ClearSerialNumber() + return nil + case item.FieldModelNumber: + m.ClearModelNumber() + return nil + case item.FieldManufacturer: + m.ClearManufacturer() + return nil + case item.FieldPurchaseTime: + m.ClearPurchaseTime() + return nil + case item.FieldPurchaseFrom: + m.ClearPurchaseFrom() + return nil + case item.FieldPurchaseReceiptID: + m.ClearPurchaseReceiptID() + return nil + case item.FieldSoldTime: + m.ClearSoldTime() + return nil + case item.FieldSoldTo: + m.ClearSoldTo() + return nil + case item.FieldSoldReceiptID: + m.ClearSoldReceiptID() + return nil + case item.FieldSoldNotes: + m.ClearSoldNotes() + return nil + } + return fmt.Errorf("unknown Item nullable field %s", name) +} + +// ResetField resets all changes in the mutation for the field with the given name. +// It returns an error if the field is not defined in the schema. +func (m *ItemMutation) ResetField(name string) error { + switch name { + case item.FieldCreatedAt: + m.ResetCreatedAt() + return nil + case item.FieldUpdatedAt: + m.ResetUpdatedAt() + return nil + case item.FieldName: + m.ResetName() + return nil + case item.FieldDescription: + m.ResetDescription() + return nil + case item.FieldNotes: + m.ResetNotes() + return nil + case item.FieldSerialNumber: + m.ResetSerialNumber() + return nil + case item.FieldModelNumber: + m.ResetModelNumber() + return nil + case item.FieldManufacturer: + m.ResetManufacturer() + return nil + case item.FieldPurchaseTime: + m.ResetPurchaseTime() + return nil + case item.FieldPurchaseFrom: + m.ResetPurchaseFrom() + return nil + case item.FieldPurchasePrice: + m.ResetPurchasePrice() + return nil + case item.FieldPurchaseReceiptID: + m.ResetPurchaseReceiptID() + return nil + case item.FieldSoldTime: + m.ResetSoldTime() + return nil + case item.FieldSoldTo: + m.ResetSoldTo() + return nil + case item.FieldSoldPrice: + m.ResetSoldPrice() + return nil + case item.FieldSoldReceiptID: + m.ResetSoldReceiptID() + return nil + case item.FieldSoldNotes: + m.ResetSoldNotes() + return nil + } + return fmt.Errorf("unknown Item field %s", name) +} + +// AddedEdges returns all edge names that were set/added in this mutation. +func (m *ItemMutation) AddedEdges() []string { + edges := make([]string, 0, 4) + if m.group != nil { + edges = append(edges, item.EdgeGroup) + } + if m.location != nil { + edges = append(edges, item.EdgeLocation) + } + if m.fields != nil { + edges = append(edges, item.EdgeFields) + } + if m.label != nil { + edges = append(edges, item.EdgeLabel) + } + return edges +} + +// AddedIDs returns all IDs (to other nodes) that were added for the given edge +// name in this mutation. +func (m *ItemMutation) AddedIDs(name string) []ent.Value { + switch name { + case item.EdgeGroup: + if id := m.group; id != nil { + return []ent.Value{*id} + } + case item.EdgeLocation: + if id := m.location; id != nil { + return []ent.Value{*id} + } + case item.EdgeFields: + ids := make([]ent.Value, 0, len(m.fields)) + for id := range m.fields { + ids = append(ids, id) + } + return ids + case item.EdgeLabel: + ids := make([]ent.Value, 0, len(m.label)) + for id := range m.label { + ids = append(ids, id) + } + return ids + } + return nil +} + +// RemovedEdges returns all edge names that were removed in this mutation. +func (m *ItemMutation) RemovedEdges() []string { + edges := make([]string, 0, 4) + if m.removedfields != nil { + edges = append(edges, item.EdgeFields) + } + if m.removedlabel != nil { + edges = append(edges, item.EdgeLabel) + } + return edges +} + +// RemovedIDs returns all IDs (to other nodes) that were removed for the edge with +// the given name in this mutation. +func (m *ItemMutation) RemovedIDs(name string) []ent.Value { + switch name { + case item.EdgeFields: + ids := make([]ent.Value, 0, len(m.removedfields)) + for id := range m.removedfields { + ids = append(ids, id) + } + return ids + case item.EdgeLabel: + ids := make([]ent.Value, 0, len(m.removedlabel)) + for id := range m.removedlabel { + ids = append(ids, id) + } + return ids + } + return nil +} + +// ClearedEdges returns all edge names that were cleared in this mutation. +func (m *ItemMutation) ClearedEdges() []string { + edges := make([]string, 0, 4) + if m.clearedgroup { + edges = append(edges, item.EdgeGroup) + } + if m.clearedlocation { + edges = append(edges, item.EdgeLocation) + } + if m.clearedfields { + edges = append(edges, item.EdgeFields) + } + if m.clearedlabel { + edges = append(edges, item.EdgeLabel) + } + return edges +} + +// EdgeCleared returns a boolean which indicates if the edge with the given name +// was cleared in this mutation. +func (m *ItemMutation) EdgeCleared(name string) bool { + switch name { + case item.EdgeGroup: + return m.clearedgroup + case item.EdgeLocation: + return m.clearedlocation + case item.EdgeFields: + return m.clearedfields + case item.EdgeLabel: + return m.clearedlabel + } + return false +} + +// ClearEdge clears the value of the edge with the given name. It returns an error +// if that edge is not defined in the schema. +func (m *ItemMutation) ClearEdge(name string) error { + switch name { + case item.EdgeGroup: + m.ClearGroup() + return nil + case item.EdgeLocation: + m.ClearLocation() + return nil + } + return fmt.Errorf("unknown Item unique edge %s", name) +} + +// ResetEdge resets all changes to the edge with the given name in this mutation. +// It returns an error if the edge is not defined in the schema. +func (m *ItemMutation) ResetEdge(name string) error { + switch name { + case item.EdgeGroup: + m.ResetGroup() + return nil + case item.EdgeLocation: + m.ResetLocation() + return nil + case item.EdgeFields: + m.ResetFields() + return nil + case item.EdgeLabel: + m.ResetLabel() + return nil + } + return fmt.Errorf("unknown Item edge %s", name) +} + +// ItemFieldMutation represents an operation that mutates the ItemField nodes in the graph. +type ItemFieldMutation struct { + config + op Op + typ string + id *uuid.UUID + created_at *time.Time + updated_at *time.Time + name *string + description *string + _type *itemfield.Type + text_value *string + number_value *int + addnumber_value *int + boolean_value *bool + time_value *time.Time + clearedFields map[string]struct{} + item *uuid.UUID + cleareditem bool + done bool + oldValue func(context.Context) (*ItemField, error) + predicates []predicate.ItemField +} + +var _ ent.Mutation = (*ItemFieldMutation)(nil) + +// itemfieldOption allows management of the mutation configuration using functional options. +type itemfieldOption func(*ItemFieldMutation) + +// newItemFieldMutation creates new mutation for the ItemField entity. +func newItemFieldMutation(c config, op Op, opts ...itemfieldOption) *ItemFieldMutation { + m := &ItemFieldMutation{ + config: c, + op: op, + typ: TypeItemField, + clearedFields: make(map[string]struct{}), + } + for _, opt := range opts { + opt(m) + } + return m +} + +// withItemFieldID sets the ID field of the mutation. +func withItemFieldID(id uuid.UUID) itemfieldOption { + return func(m *ItemFieldMutation) { + var ( + err error + once sync.Once + value *ItemField + ) + m.oldValue = func(ctx context.Context) (*ItemField, error) { + once.Do(func() { + if m.done { + err = errors.New("querying old values post mutation is not allowed") + } else { + value, err = m.Client().ItemField.Get(ctx, id) + } + }) + return value, err + } + m.id = &id + } +} + +// withItemField sets the old ItemField of the mutation. +func withItemField(node *ItemField) itemfieldOption { + return func(m *ItemFieldMutation) { + m.oldValue = func(context.Context) (*ItemField, error) { + return node, nil + } + m.id = &node.ID + } +} + +// Client returns a new `ent.Client` from the mutation. If the mutation was +// executed in a transaction (ent.Tx), a transactional client is returned. +func (m ItemFieldMutation) Client() *Client { + client := &Client{config: m.config} + client.init() + return client +} + +// Tx returns an `ent.Tx` for mutations that were executed in transactions; +// it returns an error otherwise. +func (m ItemFieldMutation) Tx() (*Tx, error) { + if _, ok := m.driver.(*txDriver); !ok { + return nil, errors.New("ent: mutation is not running in a transaction") + } + tx := &Tx{config: m.config} + tx.init() + return tx, nil +} + +// SetID sets the value of the id field. Note that this +// operation is only accepted on creation of ItemField entities. +func (m *ItemFieldMutation) SetID(id uuid.UUID) { + m.id = &id +} + +// ID returns the ID value in the mutation. Note that the ID is only available +// if it was provided to the builder or after it was returned from the database. +func (m *ItemFieldMutation) ID() (id uuid.UUID, exists bool) { + if m.id == nil { + return + } + return *m.id, true +} + +// IDs queries the database and returns the entity ids that match the mutation's predicate. +// That means, if the mutation is applied within a transaction with an isolation level such +// as sql.LevelSerializable, the returned ids match the ids of the rows that will be updated +// or updated by the mutation. +func (m *ItemFieldMutation) IDs(ctx context.Context) ([]uuid.UUID, error) { + switch { + case m.op.Is(OpUpdateOne | OpDeleteOne): + id, exists := m.ID() + if exists { + return []uuid.UUID{id}, nil + } + fallthrough + case m.op.Is(OpUpdate | OpDelete): + return m.Client().ItemField.Query().Where(m.predicates...).IDs(ctx) + default: + return nil, fmt.Errorf("IDs is not allowed on %s operations", m.op) + } +} + +// SetCreatedAt sets the "created_at" field. +func (m *ItemFieldMutation) SetCreatedAt(t time.Time) { + m.created_at = &t +} + +// CreatedAt returns the value of the "created_at" field in the mutation. +func (m *ItemFieldMutation) CreatedAt() (r time.Time, exists bool) { + v := m.created_at + if v == nil { + return + } + return *v, true +} + +// OldCreatedAt returns the old "created_at" field's value of the ItemField entity. +// If the ItemField object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *ItemFieldMutation) OldCreatedAt(ctx context.Context) (v time.Time, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldCreatedAt is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldCreatedAt requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldCreatedAt: %w", err) + } + return oldValue.CreatedAt, nil +} + +// ResetCreatedAt resets all changes to the "created_at" field. +func (m *ItemFieldMutation) ResetCreatedAt() { + m.created_at = nil +} + +// SetUpdatedAt sets the "updated_at" field. +func (m *ItemFieldMutation) SetUpdatedAt(t time.Time) { + m.updated_at = &t +} + +// UpdatedAt returns the value of the "updated_at" field in the mutation. +func (m *ItemFieldMutation) UpdatedAt() (r time.Time, exists bool) { + v := m.updated_at + if v == nil { + return + } + return *v, true +} + +// OldUpdatedAt returns the old "updated_at" field's value of the ItemField entity. +// If the ItemField object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *ItemFieldMutation) OldUpdatedAt(ctx context.Context) (v time.Time, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldUpdatedAt is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldUpdatedAt requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldUpdatedAt: %w", err) + } + return oldValue.UpdatedAt, nil +} + +// ResetUpdatedAt resets all changes to the "updated_at" field. +func (m *ItemFieldMutation) ResetUpdatedAt() { + m.updated_at = nil +} + +// SetName sets the "name" field. +func (m *ItemFieldMutation) SetName(s string) { + m.name = &s +} + +// Name returns the value of the "name" field in the mutation. +func (m *ItemFieldMutation) Name() (r string, exists bool) { + v := m.name + if v == nil { + return + } + return *v, true +} + +// OldName returns the old "name" field's value of the ItemField entity. +// If the ItemField object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *ItemFieldMutation) OldName(ctx context.Context) (v string, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldName is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldName requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldName: %w", err) + } + return oldValue.Name, nil +} + +// ResetName resets all changes to the "name" field. +func (m *ItemFieldMutation) ResetName() { + m.name = nil +} + +// SetDescription sets the "description" field. +func (m *ItemFieldMutation) SetDescription(s string) { + m.description = &s +} + +// Description returns the value of the "description" field in the mutation. +func (m *ItemFieldMutation) Description() (r string, exists bool) { + v := m.description + if v == nil { + return + } + return *v, true +} + +// OldDescription returns the old "description" field's value of the ItemField entity. +// If the ItemField object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *ItemFieldMutation) OldDescription(ctx context.Context) (v string, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldDescription is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldDescription requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldDescription: %w", err) + } + return oldValue.Description, nil +} + +// ClearDescription clears the value of the "description" field. +func (m *ItemFieldMutation) ClearDescription() { + m.description = nil + m.clearedFields[itemfield.FieldDescription] = struct{}{} +} + +// DescriptionCleared returns if the "description" field was cleared in this mutation. +func (m *ItemFieldMutation) DescriptionCleared() bool { + _, ok := m.clearedFields[itemfield.FieldDescription] + return ok +} + +// ResetDescription resets all changes to the "description" field. +func (m *ItemFieldMutation) ResetDescription() { + m.description = nil + delete(m.clearedFields, itemfield.FieldDescription) +} + +// SetType sets the "type" field. +func (m *ItemFieldMutation) SetType(i itemfield.Type) { + m._type = &i +} + +// GetType returns the value of the "type" field in the mutation. +func (m *ItemFieldMutation) GetType() (r itemfield.Type, exists bool) { + v := m._type + if v == nil { + return + } + return *v, true +} + +// OldType returns the old "type" field's value of the ItemField entity. +// If the ItemField object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *ItemFieldMutation) OldType(ctx context.Context) (v itemfield.Type, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldType is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldType requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldType: %w", err) + } + return oldValue.Type, nil +} + +// ResetType resets all changes to the "type" field. +func (m *ItemFieldMutation) ResetType() { + m._type = nil +} + +// SetTextValue sets the "text_value" field. +func (m *ItemFieldMutation) SetTextValue(s string) { + m.text_value = &s +} + +// TextValue returns the value of the "text_value" field in the mutation. +func (m *ItemFieldMutation) TextValue() (r string, exists bool) { + v := m.text_value + if v == nil { + return + } + return *v, true +} + +// OldTextValue returns the old "text_value" field's value of the ItemField entity. +// If the ItemField object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *ItemFieldMutation) OldTextValue(ctx context.Context) (v string, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldTextValue is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldTextValue requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldTextValue: %w", err) + } + return oldValue.TextValue, nil +} + +// ClearTextValue clears the value of the "text_value" field. +func (m *ItemFieldMutation) ClearTextValue() { + m.text_value = nil + m.clearedFields[itemfield.FieldTextValue] = struct{}{} +} + +// TextValueCleared returns if the "text_value" field was cleared in this mutation. +func (m *ItemFieldMutation) TextValueCleared() bool { + _, ok := m.clearedFields[itemfield.FieldTextValue] + return ok +} + +// ResetTextValue resets all changes to the "text_value" field. +func (m *ItemFieldMutation) ResetTextValue() { + m.text_value = nil + delete(m.clearedFields, itemfield.FieldTextValue) +} + +// SetNumberValue sets the "number_value" field. +func (m *ItemFieldMutation) SetNumberValue(i int) { + m.number_value = &i + m.addnumber_value = nil +} + +// NumberValue returns the value of the "number_value" field in the mutation. +func (m *ItemFieldMutation) NumberValue() (r int, exists bool) { + v := m.number_value + if v == nil { + return + } + return *v, true +} + +// OldNumberValue returns the old "number_value" field's value of the ItemField entity. +// If the ItemField object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *ItemFieldMutation) OldNumberValue(ctx context.Context) (v int, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldNumberValue is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldNumberValue requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldNumberValue: %w", err) + } + return oldValue.NumberValue, nil +} + +// AddNumberValue adds i to the "number_value" field. +func (m *ItemFieldMutation) AddNumberValue(i int) { + if m.addnumber_value != nil { + *m.addnumber_value += i + } else { + m.addnumber_value = &i + } +} + +// AddedNumberValue returns the value that was added to the "number_value" field in this mutation. +func (m *ItemFieldMutation) AddedNumberValue() (r int, exists bool) { + v := m.addnumber_value + if v == nil { + return + } + return *v, true +} + +// ClearNumberValue clears the value of the "number_value" field. +func (m *ItemFieldMutation) ClearNumberValue() { + m.number_value = nil + m.addnumber_value = nil + m.clearedFields[itemfield.FieldNumberValue] = struct{}{} +} + +// NumberValueCleared returns if the "number_value" field was cleared in this mutation. +func (m *ItemFieldMutation) NumberValueCleared() bool { + _, ok := m.clearedFields[itemfield.FieldNumberValue] + return ok +} + +// ResetNumberValue resets all changes to the "number_value" field. +func (m *ItemFieldMutation) ResetNumberValue() { + m.number_value = nil + m.addnumber_value = nil + delete(m.clearedFields, itemfield.FieldNumberValue) +} + +// SetBooleanValue sets the "boolean_value" field. +func (m *ItemFieldMutation) SetBooleanValue(b bool) { + m.boolean_value = &b +} + +// BooleanValue returns the value of the "boolean_value" field in the mutation. +func (m *ItemFieldMutation) BooleanValue() (r bool, exists bool) { + v := m.boolean_value + if v == nil { + return + } + return *v, true +} + +// OldBooleanValue returns the old "boolean_value" field's value of the ItemField entity. +// If the ItemField object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *ItemFieldMutation) OldBooleanValue(ctx context.Context) (v bool, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldBooleanValue is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldBooleanValue requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldBooleanValue: %w", err) + } + return oldValue.BooleanValue, nil +} + +// ResetBooleanValue resets all changes to the "boolean_value" field. +func (m *ItemFieldMutation) ResetBooleanValue() { + m.boolean_value = nil +} + +// SetTimeValue sets the "time_value" field. +func (m *ItemFieldMutation) SetTimeValue(t time.Time) { + m.time_value = &t +} + +// TimeValue returns the value of the "time_value" field in the mutation. +func (m *ItemFieldMutation) TimeValue() (r time.Time, exists bool) { + v := m.time_value + if v == nil { + return + } + return *v, true +} + +// OldTimeValue returns the old "time_value" field's value of the ItemField entity. +// If the ItemField object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *ItemFieldMutation) OldTimeValue(ctx context.Context) (v time.Time, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldTimeValue is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldTimeValue requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldTimeValue: %w", err) + } + return oldValue.TimeValue, nil +} + +// ResetTimeValue resets all changes to the "time_value" field. +func (m *ItemFieldMutation) ResetTimeValue() { + m.time_value = nil +} + +// SetItemID sets the "item" edge to the Item entity by id. +func (m *ItemFieldMutation) SetItemID(id uuid.UUID) { + m.item = &id +} + +// ClearItem clears the "item" edge to the Item entity. +func (m *ItemFieldMutation) ClearItem() { + m.cleareditem = true +} + +// ItemCleared reports if the "item" edge to the Item entity was cleared. +func (m *ItemFieldMutation) ItemCleared() bool { + return m.cleareditem +} + +// ItemID returns the "item" edge ID in the mutation. +func (m *ItemFieldMutation) ItemID() (id uuid.UUID, exists bool) { + if m.item != nil { + return *m.item, true + } + return +} + +// ItemIDs returns the "item" edge IDs in the mutation. +// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use +// ItemID instead. It exists only for internal usage by the builders. +func (m *ItemFieldMutation) ItemIDs() (ids []uuid.UUID) { + if id := m.item; id != nil { + ids = append(ids, *id) + } + return +} + +// ResetItem resets all changes to the "item" edge. +func (m *ItemFieldMutation) ResetItem() { + m.item = nil + m.cleareditem = false +} + +// Where appends a list predicates to the ItemFieldMutation builder. +func (m *ItemFieldMutation) Where(ps ...predicate.ItemField) { + m.predicates = append(m.predicates, ps...) +} + +// Op returns the operation name. +func (m *ItemFieldMutation) Op() Op { + return m.op +} + +// Type returns the node type of this mutation (ItemField). +func (m *ItemFieldMutation) Type() string { + return m.typ +} + +// Fields returns all fields that were changed during this mutation. Note that in +// order to get all numeric fields that were incremented/decremented, call +// AddedFields(). +func (m *ItemFieldMutation) Fields() []string { + fields := make([]string, 0, 9) + if m.created_at != nil { + fields = append(fields, itemfield.FieldCreatedAt) + } + if m.updated_at != nil { + fields = append(fields, itemfield.FieldUpdatedAt) + } + if m.name != nil { + fields = append(fields, itemfield.FieldName) + } + if m.description != nil { + fields = append(fields, itemfield.FieldDescription) + } + if m._type != nil { + fields = append(fields, itemfield.FieldType) + } + if m.text_value != nil { + fields = append(fields, itemfield.FieldTextValue) + } + if m.number_value != nil { + fields = append(fields, itemfield.FieldNumberValue) + } + if m.boolean_value != nil { + fields = append(fields, itemfield.FieldBooleanValue) + } + if m.time_value != nil { + fields = append(fields, itemfield.FieldTimeValue) + } + return fields +} + +// Field returns the value of a field with the given name. The second boolean +// return value indicates that this field was not set, or was not defined in the +// schema. +func (m *ItemFieldMutation) Field(name string) (ent.Value, bool) { + switch name { + case itemfield.FieldCreatedAt: + return m.CreatedAt() + case itemfield.FieldUpdatedAt: + return m.UpdatedAt() + case itemfield.FieldName: + return m.Name() + case itemfield.FieldDescription: + return m.Description() + case itemfield.FieldType: + return m.GetType() + case itemfield.FieldTextValue: + return m.TextValue() + case itemfield.FieldNumberValue: + return m.NumberValue() + case itemfield.FieldBooleanValue: + return m.BooleanValue() + case itemfield.FieldTimeValue: + return m.TimeValue() + } + return nil, false +} + +// OldField returns the old value of the field from the database. An error is +// returned if the mutation operation is not UpdateOne, or the query to the +// database failed. +func (m *ItemFieldMutation) OldField(ctx context.Context, name string) (ent.Value, error) { + switch name { + case itemfield.FieldCreatedAt: + return m.OldCreatedAt(ctx) + case itemfield.FieldUpdatedAt: + return m.OldUpdatedAt(ctx) + case itemfield.FieldName: + return m.OldName(ctx) + case itemfield.FieldDescription: + return m.OldDescription(ctx) + case itemfield.FieldType: + return m.OldType(ctx) + case itemfield.FieldTextValue: + return m.OldTextValue(ctx) + case itemfield.FieldNumberValue: + return m.OldNumberValue(ctx) + case itemfield.FieldBooleanValue: + return m.OldBooleanValue(ctx) + case itemfield.FieldTimeValue: + return m.OldTimeValue(ctx) + } + return nil, fmt.Errorf("unknown ItemField field %s", name) +} + +// SetField sets the value of a field with the given name. It returns an error if +// the field is not defined in the schema, or if the type mismatched the field +// type. +func (m *ItemFieldMutation) SetField(name string, value ent.Value) error { + switch name { + case itemfield.FieldCreatedAt: + v, ok := value.(time.Time) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetCreatedAt(v) + return nil + case itemfield.FieldUpdatedAt: + v, ok := value.(time.Time) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetUpdatedAt(v) + return nil + case itemfield.FieldName: + v, ok := value.(string) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetName(v) + return nil + case itemfield.FieldDescription: + v, ok := value.(string) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetDescription(v) + return nil + case itemfield.FieldType: + v, ok := value.(itemfield.Type) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetType(v) + return nil + case itemfield.FieldTextValue: + v, ok := value.(string) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetTextValue(v) + return nil + case itemfield.FieldNumberValue: + v, ok := value.(int) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetNumberValue(v) + return nil + case itemfield.FieldBooleanValue: + v, ok := value.(bool) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetBooleanValue(v) + return nil + case itemfield.FieldTimeValue: + v, ok := value.(time.Time) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetTimeValue(v) + return nil + } + return fmt.Errorf("unknown ItemField field %s", name) +} + +// AddedFields returns all numeric fields that were incremented/decremented during +// this mutation. +func (m *ItemFieldMutation) AddedFields() []string { + var fields []string + if m.addnumber_value != nil { + fields = append(fields, itemfield.FieldNumberValue) + } + return fields +} + +// AddedField returns the numeric value that was incremented/decremented on a field +// with the given name. The second boolean return value indicates that this field +// was not set, or was not defined in the schema. +func (m *ItemFieldMutation) AddedField(name string) (ent.Value, bool) { + switch name { + case itemfield.FieldNumberValue: + return m.AddedNumberValue() + } + return nil, false +} + +// AddField adds the value to the field with the given name. It returns an error if +// the field is not defined in the schema, or if the type mismatched the field +// type. +func (m *ItemFieldMutation) AddField(name string, value ent.Value) error { + switch name { + case itemfield.FieldNumberValue: + v, ok := value.(int) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.AddNumberValue(v) + return nil + } + return fmt.Errorf("unknown ItemField numeric field %s", name) +} + +// ClearedFields returns all nullable fields that were cleared during this +// mutation. +func (m *ItemFieldMutation) ClearedFields() []string { + var fields []string + if m.FieldCleared(itemfield.FieldDescription) { + fields = append(fields, itemfield.FieldDescription) + } + if m.FieldCleared(itemfield.FieldTextValue) { + fields = append(fields, itemfield.FieldTextValue) + } + if m.FieldCleared(itemfield.FieldNumberValue) { + fields = append(fields, itemfield.FieldNumberValue) + } + return fields +} + +// FieldCleared returns a boolean indicating if a field with the given name was +// cleared in this mutation. +func (m *ItemFieldMutation) FieldCleared(name string) bool { + _, ok := m.clearedFields[name] + return ok +} + +// ClearField clears the value of the field with the given name. It returns an +// error if the field is not defined in the schema. +func (m *ItemFieldMutation) ClearField(name string) error { + switch name { + case itemfield.FieldDescription: + m.ClearDescription() + return nil + case itemfield.FieldTextValue: + m.ClearTextValue() + return nil + case itemfield.FieldNumberValue: + m.ClearNumberValue() + return nil + } + return fmt.Errorf("unknown ItemField nullable field %s", name) +} + +// ResetField resets all changes in the mutation for the field with the given name. +// It returns an error if the field is not defined in the schema. +func (m *ItemFieldMutation) ResetField(name string) error { + switch name { + case itemfield.FieldCreatedAt: + m.ResetCreatedAt() + return nil + case itemfield.FieldUpdatedAt: + m.ResetUpdatedAt() + return nil + case itemfield.FieldName: + m.ResetName() + return nil + case itemfield.FieldDescription: + m.ResetDescription() + return nil + case itemfield.FieldType: + m.ResetType() + return nil + case itemfield.FieldTextValue: + m.ResetTextValue() + return nil + case itemfield.FieldNumberValue: + m.ResetNumberValue() + return nil + case itemfield.FieldBooleanValue: + m.ResetBooleanValue() + return nil + case itemfield.FieldTimeValue: + m.ResetTimeValue() + return nil + } + return fmt.Errorf("unknown ItemField field %s", name) +} + +// AddedEdges returns all edge names that were set/added in this mutation. +func (m *ItemFieldMutation) AddedEdges() []string { + edges := make([]string, 0, 1) + if m.item != nil { + edges = append(edges, itemfield.EdgeItem) + } + return edges +} + +// AddedIDs returns all IDs (to other nodes) that were added for the given edge +// name in this mutation. +func (m *ItemFieldMutation) AddedIDs(name string) []ent.Value { + switch name { + case itemfield.EdgeItem: + if id := m.item; id != nil { + return []ent.Value{*id} + } + } + return nil +} + +// RemovedEdges returns all edge names that were removed in this mutation. +func (m *ItemFieldMutation) RemovedEdges() []string { + edges := make([]string, 0, 1) + return edges +} + +// RemovedIDs returns all IDs (to other nodes) that were removed for the edge with +// the given name in this mutation. +func (m *ItemFieldMutation) RemovedIDs(name string) []ent.Value { + switch name { + } + return nil +} + +// ClearedEdges returns all edge names that were cleared in this mutation. +func (m *ItemFieldMutation) ClearedEdges() []string { + edges := make([]string, 0, 1) + if m.cleareditem { + edges = append(edges, itemfield.EdgeItem) + } + return edges +} + +// EdgeCleared returns a boolean which indicates if the edge with the given name +// was cleared in this mutation. +func (m *ItemFieldMutation) EdgeCleared(name string) bool { + switch name { + case itemfield.EdgeItem: + return m.cleareditem + } + return false +} + +// ClearEdge clears the value of the edge with the given name. It returns an error +// if that edge is not defined in the schema. +func (m *ItemFieldMutation) ClearEdge(name string) error { + switch name { + case itemfield.EdgeItem: + m.ClearItem() + return nil + } + return fmt.Errorf("unknown ItemField unique edge %s", name) +} + +// ResetEdge resets all changes to the edge with the given name in this mutation. +// It returns an error if the edge is not defined in the schema. +func (m *ItemFieldMutation) ResetEdge(name string) error { + switch name { + case itemfield.EdgeItem: + m.ResetItem() + return nil + } + return fmt.Errorf("unknown ItemField edge %s", name) +} + +// LabelMutation represents an operation that mutates the Label nodes in the graph. +type LabelMutation struct { + config + op Op + typ string + id *uuid.UUID + created_at *time.Time + updated_at *time.Time + name *string + description *string + color *string + clearedFields map[string]struct{} + group *uuid.UUID + clearedgroup bool + items map[uuid.UUID]struct{} + removeditems map[uuid.UUID]struct{} + cleareditems bool + done bool + oldValue func(context.Context) (*Label, error) + predicates []predicate.Label +} + +var _ ent.Mutation = (*LabelMutation)(nil) + +// labelOption allows management of the mutation configuration using functional options. +type labelOption func(*LabelMutation) + +// newLabelMutation creates new mutation for the Label entity. +func newLabelMutation(c config, op Op, opts ...labelOption) *LabelMutation { + m := &LabelMutation{ + config: c, + op: op, + typ: TypeLabel, + clearedFields: make(map[string]struct{}), + } + for _, opt := range opts { + opt(m) + } + return m +} + +// withLabelID sets the ID field of the mutation. +func withLabelID(id uuid.UUID) labelOption { + return func(m *LabelMutation) { + var ( + err error + once sync.Once + value *Label + ) + m.oldValue = func(ctx context.Context) (*Label, error) { + once.Do(func() { + if m.done { + err = errors.New("querying old values post mutation is not allowed") + } else { + value, err = m.Client().Label.Get(ctx, id) + } + }) + return value, err + } + m.id = &id + } +} + +// withLabel sets the old Label of the mutation. +func withLabel(node *Label) labelOption { + return func(m *LabelMutation) { + m.oldValue = func(context.Context) (*Label, error) { + return node, nil + } + m.id = &node.ID + } +} + +// Client returns a new `ent.Client` from the mutation. If the mutation was +// executed in a transaction (ent.Tx), a transactional client is returned. +func (m LabelMutation) Client() *Client { + client := &Client{config: m.config} + client.init() + return client +} + +// Tx returns an `ent.Tx` for mutations that were executed in transactions; +// it returns an error otherwise. +func (m LabelMutation) Tx() (*Tx, error) { + if _, ok := m.driver.(*txDriver); !ok { + return nil, errors.New("ent: mutation is not running in a transaction") + } + tx := &Tx{config: m.config} + tx.init() + return tx, nil +} + +// SetID sets the value of the id field. Note that this +// operation is only accepted on creation of Label entities. +func (m *LabelMutation) SetID(id uuid.UUID) { + m.id = &id +} + +// ID returns the ID value in the mutation. Note that the ID is only available +// if it was provided to the builder or after it was returned from the database. +func (m *LabelMutation) ID() (id uuid.UUID, exists bool) { + if m.id == nil { + return + } + return *m.id, true +} + +// IDs queries the database and returns the entity ids that match the mutation's predicate. +// That means, if the mutation is applied within a transaction with an isolation level such +// as sql.LevelSerializable, the returned ids match the ids of the rows that will be updated +// or updated by the mutation. +func (m *LabelMutation) IDs(ctx context.Context) ([]uuid.UUID, error) { + switch { + case m.op.Is(OpUpdateOne | OpDeleteOne): + id, exists := m.ID() + if exists { + return []uuid.UUID{id}, nil + } + fallthrough + case m.op.Is(OpUpdate | OpDelete): + return m.Client().Label.Query().Where(m.predicates...).IDs(ctx) + default: + return nil, fmt.Errorf("IDs is not allowed on %s operations", m.op) + } +} + +// SetCreatedAt sets the "created_at" field. +func (m *LabelMutation) SetCreatedAt(t time.Time) { + m.created_at = &t +} + +// CreatedAt returns the value of the "created_at" field in the mutation. +func (m *LabelMutation) CreatedAt() (r time.Time, exists bool) { + v := m.created_at + if v == nil { + return + } + return *v, true +} + +// OldCreatedAt returns the old "created_at" field's value of the Label entity. +// If the Label object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *LabelMutation) OldCreatedAt(ctx context.Context) (v time.Time, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldCreatedAt is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldCreatedAt requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldCreatedAt: %w", err) + } + return oldValue.CreatedAt, nil +} + +// ResetCreatedAt resets all changes to the "created_at" field. +func (m *LabelMutation) ResetCreatedAt() { + m.created_at = nil +} + +// SetUpdatedAt sets the "updated_at" field. +func (m *LabelMutation) SetUpdatedAt(t time.Time) { + m.updated_at = &t +} + +// UpdatedAt returns the value of the "updated_at" field in the mutation. +func (m *LabelMutation) UpdatedAt() (r time.Time, exists bool) { + v := m.updated_at + if v == nil { + return + } + return *v, true +} + +// OldUpdatedAt returns the old "updated_at" field's value of the Label entity. +// If the Label object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *LabelMutation) OldUpdatedAt(ctx context.Context) (v time.Time, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldUpdatedAt is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldUpdatedAt requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldUpdatedAt: %w", err) + } + return oldValue.UpdatedAt, nil +} + +// ResetUpdatedAt resets all changes to the "updated_at" field. +func (m *LabelMutation) ResetUpdatedAt() { + m.updated_at = nil +} + +// SetName sets the "name" field. +func (m *LabelMutation) SetName(s string) { + m.name = &s +} + +// Name returns the value of the "name" field in the mutation. +func (m *LabelMutation) Name() (r string, exists bool) { + v := m.name + if v == nil { + return + } + return *v, true +} + +// OldName returns the old "name" field's value of the Label entity. +// If the Label object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *LabelMutation) OldName(ctx context.Context) (v string, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldName is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldName requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldName: %w", err) + } + return oldValue.Name, nil +} + +// ResetName resets all changes to the "name" field. +func (m *LabelMutation) ResetName() { + m.name = nil +} + +// SetDescription sets the "description" field. +func (m *LabelMutation) SetDescription(s string) { + m.description = &s +} + +// Description returns the value of the "description" field in the mutation. +func (m *LabelMutation) Description() (r string, exists bool) { + v := m.description + if v == nil { + return + } + return *v, true +} + +// OldDescription returns the old "description" field's value of the Label entity. +// If the Label object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *LabelMutation) OldDescription(ctx context.Context) (v string, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldDescription is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldDescription requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldDescription: %w", err) + } + return oldValue.Description, nil +} + +// ClearDescription clears the value of the "description" field. +func (m *LabelMutation) ClearDescription() { + m.description = nil + m.clearedFields[label.FieldDescription] = struct{}{} +} + +// DescriptionCleared returns if the "description" field was cleared in this mutation. +func (m *LabelMutation) DescriptionCleared() bool { + _, ok := m.clearedFields[label.FieldDescription] + return ok +} + +// ResetDescription resets all changes to the "description" field. +func (m *LabelMutation) ResetDescription() { + m.description = nil + delete(m.clearedFields, label.FieldDescription) +} + +// SetColor sets the "color" field. +func (m *LabelMutation) SetColor(s string) { + m.color = &s +} + +// Color returns the value of the "color" field in the mutation. +func (m *LabelMutation) Color() (r string, exists bool) { + v := m.color + if v == nil { + return + } + return *v, true +} + +// OldColor returns the old "color" field's value of the Label entity. +// If the Label object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *LabelMutation) OldColor(ctx context.Context) (v string, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldColor is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldColor requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldColor: %w", err) + } + return oldValue.Color, nil +} + +// ClearColor clears the value of the "color" field. +func (m *LabelMutation) ClearColor() { + m.color = nil + m.clearedFields[label.FieldColor] = struct{}{} +} + +// ColorCleared returns if the "color" field was cleared in this mutation. +func (m *LabelMutation) ColorCleared() bool { + _, ok := m.clearedFields[label.FieldColor] + return ok +} + +// ResetColor resets all changes to the "color" field. +func (m *LabelMutation) ResetColor() { + m.color = nil + delete(m.clearedFields, label.FieldColor) +} + +// SetGroupID sets the "group" edge to the Group entity by id. +func (m *LabelMutation) SetGroupID(id uuid.UUID) { + m.group = &id +} + +// ClearGroup clears the "group" edge to the Group entity. +func (m *LabelMutation) ClearGroup() { + m.clearedgroup = true +} + +// GroupCleared reports if the "group" edge to the Group entity was cleared. +func (m *LabelMutation) GroupCleared() bool { + return m.clearedgroup +} + +// GroupID returns the "group" edge ID in the mutation. +func (m *LabelMutation) GroupID() (id uuid.UUID, exists bool) { + if m.group != nil { + return *m.group, true + } + return +} + +// GroupIDs returns the "group" edge IDs in the mutation. +// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use +// GroupID instead. It exists only for internal usage by the builders. +func (m *LabelMutation) GroupIDs() (ids []uuid.UUID) { + if id := m.group; id != nil { + ids = append(ids, *id) + } + return +} + +// ResetGroup resets all changes to the "group" edge. +func (m *LabelMutation) ResetGroup() { + m.group = nil + m.clearedgroup = false +} + +// AddItemIDs adds the "items" edge to the Item entity by ids. +func (m *LabelMutation) AddItemIDs(ids ...uuid.UUID) { + if m.items == nil { + m.items = make(map[uuid.UUID]struct{}) + } + for i := range ids { + m.items[ids[i]] = struct{}{} + } +} + +// ClearItems clears the "items" edge to the Item entity. +func (m *LabelMutation) ClearItems() { + m.cleareditems = true +} + +// ItemsCleared reports if the "items" edge to the Item entity was cleared. +func (m *LabelMutation) ItemsCleared() bool { + return m.cleareditems +} + +// RemoveItemIDs removes the "items" edge to the Item entity by IDs. +func (m *LabelMutation) RemoveItemIDs(ids ...uuid.UUID) { + if m.removeditems == nil { + m.removeditems = make(map[uuid.UUID]struct{}) + } + for i := range ids { + delete(m.items, ids[i]) + m.removeditems[ids[i]] = struct{}{} + } +} + +// RemovedItems returns the removed IDs of the "items" edge to the Item entity. +func (m *LabelMutation) RemovedItemsIDs() (ids []uuid.UUID) { + for id := range m.removeditems { + ids = append(ids, id) + } + return +} + +// ItemsIDs returns the "items" edge IDs in the mutation. +func (m *LabelMutation) ItemsIDs() (ids []uuid.UUID) { + for id := range m.items { + ids = append(ids, id) + } + return +} + +// ResetItems resets all changes to the "items" edge. +func (m *LabelMutation) ResetItems() { + m.items = nil + m.cleareditems = false + m.removeditems = nil +} + +// Where appends a list predicates to the LabelMutation builder. +func (m *LabelMutation) Where(ps ...predicate.Label) { + m.predicates = append(m.predicates, ps...) +} + +// Op returns the operation name. +func (m *LabelMutation) Op() Op { + return m.op +} + +// Type returns the node type of this mutation (Label). +func (m *LabelMutation) Type() string { + return m.typ +} + +// Fields returns all fields that were changed during this mutation. Note that in +// order to get all numeric fields that were incremented/decremented, call +// AddedFields(). +func (m *LabelMutation) Fields() []string { + fields := make([]string, 0, 5) + if m.created_at != nil { + fields = append(fields, label.FieldCreatedAt) + } + if m.updated_at != nil { + fields = append(fields, label.FieldUpdatedAt) + } + if m.name != nil { + fields = append(fields, label.FieldName) + } + if m.description != nil { + fields = append(fields, label.FieldDescription) + } + if m.color != nil { + fields = append(fields, label.FieldColor) + } + return fields +} + +// Field returns the value of a field with the given name. The second boolean +// return value indicates that this field was not set, or was not defined in the +// schema. +func (m *LabelMutation) Field(name string) (ent.Value, bool) { + switch name { + case label.FieldCreatedAt: + return m.CreatedAt() + case label.FieldUpdatedAt: + return m.UpdatedAt() + case label.FieldName: + return m.Name() + case label.FieldDescription: + return m.Description() + case label.FieldColor: + return m.Color() + } + return nil, false +} + +// OldField returns the old value of the field from the database. An error is +// returned if the mutation operation is not UpdateOne, or the query to the +// database failed. +func (m *LabelMutation) OldField(ctx context.Context, name string) (ent.Value, error) { + switch name { + case label.FieldCreatedAt: + return m.OldCreatedAt(ctx) + case label.FieldUpdatedAt: + return m.OldUpdatedAt(ctx) + case label.FieldName: + return m.OldName(ctx) + case label.FieldDescription: + return m.OldDescription(ctx) + case label.FieldColor: + return m.OldColor(ctx) + } + return nil, fmt.Errorf("unknown Label field %s", name) +} + +// SetField sets the value of a field with the given name. It returns an error if +// the field is not defined in the schema, or if the type mismatched the field +// type. +func (m *LabelMutation) SetField(name string, value ent.Value) error { + switch name { + case label.FieldCreatedAt: + v, ok := value.(time.Time) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetCreatedAt(v) + return nil + case label.FieldUpdatedAt: + v, ok := value.(time.Time) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetUpdatedAt(v) + return nil + case label.FieldName: + v, ok := value.(string) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetName(v) + return nil + case label.FieldDescription: + v, ok := value.(string) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetDescription(v) + return nil + case label.FieldColor: + v, ok := value.(string) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetColor(v) + return nil + } + return fmt.Errorf("unknown Label field %s", name) +} + +// AddedFields returns all numeric fields that were incremented/decremented during +// this mutation. +func (m *LabelMutation) AddedFields() []string { + return nil +} + +// AddedField returns the numeric value that was incremented/decremented on a field +// with the given name. The second boolean return value indicates that this field +// was not set, or was not defined in the schema. +func (m *LabelMutation) AddedField(name string) (ent.Value, bool) { + return nil, false +} + +// AddField adds the value to the field with the given name. It returns an error if +// the field is not defined in the schema, or if the type mismatched the field +// type. +func (m *LabelMutation) AddField(name string, value ent.Value) error { + switch name { + } + return fmt.Errorf("unknown Label numeric field %s", name) +} + +// ClearedFields returns all nullable fields that were cleared during this +// mutation. +func (m *LabelMutation) ClearedFields() []string { + var fields []string + if m.FieldCleared(label.FieldDescription) { + fields = append(fields, label.FieldDescription) + } + if m.FieldCleared(label.FieldColor) { + fields = append(fields, label.FieldColor) + } + return fields +} + +// FieldCleared returns a boolean indicating if a field with the given name was +// cleared in this mutation. +func (m *LabelMutation) FieldCleared(name string) bool { + _, ok := m.clearedFields[name] + return ok +} + +// ClearField clears the value of the field with the given name. It returns an +// error if the field is not defined in the schema. +func (m *LabelMutation) ClearField(name string) error { + switch name { + case label.FieldDescription: + m.ClearDescription() + return nil + case label.FieldColor: + m.ClearColor() + return nil + } + return fmt.Errorf("unknown Label nullable field %s", name) +} + +// ResetField resets all changes in the mutation for the field with the given name. +// It returns an error if the field is not defined in the schema. +func (m *LabelMutation) ResetField(name string) error { + switch name { + case label.FieldCreatedAt: + m.ResetCreatedAt() + return nil + case label.FieldUpdatedAt: + m.ResetUpdatedAt() + return nil + case label.FieldName: + m.ResetName() + return nil + case label.FieldDescription: + m.ResetDescription() + return nil + case label.FieldColor: + m.ResetColor() + return nil + } + return fmt.Errorf("unknown Label field %s", name) +} + +// AddedEdges returns all edge names that were set/added in this mutation. +func (m *LabelMutation) AddedEdges() []string { + edges := make([]string, 0, 2) + if m.group != nil { + edges = append(edges, label.EdgeGroup) + } + if m.items != nil { + edges = append(edges, label.EdgeItems) + } + return edges +} + +// AddedIDs returns all IDs (to other nodes) that were added for the given edge +// name in this mutation. +func (m *LabelMutation) AddedIDs(name string) []ent.Value { + switch name { + case label.EdgeGroup: + if id := m.group; id != nil { + return []ent.Value{*id} + } + case label.EdgeItems: + ids := make([]ent.Value, 0, len(m.items)) + for id := range m.items { + ids = append(ids, id) + } + return ids + } + return nil +} + +// RemovedEdges returns all edge names that were removed in this mutation. +func (m *LabelMutation) RemovedEdges() []string { + edges := make([]string, 0, 2) + if m.removeditems != nil { + edges = append(edges, label.EdgeItems) + } + return edges +} + +// RemovedIDs returns all IDs (to other nodes) that were removed for the edge with +// the given name in this mutation. +func (m *LabelMutation) RemovedIDs(name string) []ent.Value { + switch name { + case label.EdgeItems: + ids := make([]ent.Value, 0, len(m.removeditems)) + for id := range m.removeditems { + ids = append(ids, id) + } + return ids + } + return nil +} + +// ClearedEdges returns all edge names that were cleared in this mutation. +func (m *LabelMutation) ClearedEdges() []string { + edges := make([]string, 0, 2) + if m.clearedgroup { + edges = append(edges, label.EdgeGroup) + } + if m.cleareditems { + edges = append(edges, label.EdgeItems) + } + return edges +} + +// EdgeCleared returns a boolean which indicates if the edge with the given name +// was cleared in this mutation. +func (m *LabelMutation) EdgeCleared(name string) bool { + switch name { + case label.EdgeGroup: + return m.clearedgroup + case label.EdgeItems: + return m.cleareditems + } + return false +} + +// ClearEdge clears the value of the edge with the given name. It returns an error +// if that edge is not defined in the schema. +func (m *LabelMutation) ClearEdge(name string) error { + switch name { + case label.EdgeGroup: + m.ClearGroup() + return nil + } + return fmt.Errorf("unknown Label unique edge %s", name) +} + +// ResetEdge resets all changes to the edge with the given name in this mutation. +// It returns an error if the edge is not defined in the schema. +func (m *LabelMutation) ResetEdge(name string) error { + switch name { + case label.EdgeGroup: + m.ResetGroup() + return nil + case label.EdgeItems: + m.ResetItems() + return nil + } + return fmt.Errorf("unknown Label edge %s", name) +} + +// LocationMutation represents an operation that mutates the Location nodes in the graph. +type LocationMutation struct { + config + op Op + typ string + id *uuid.UUID + created_at *time.Time + updated_at *time.Time + name *string + description *string + clearedFields map[string]struct{} + group *uuid.UUID + clearedgroup bool + items map[uuid.UUID]struct{} + removeditems map[uuid.UUID]struct{} + cleareditems bool + done bool + oldValue func(context.Context) (*Location, error) + predicates []predicate.Location +} + +var _ ent.Mutation = (*LocationMutation)(nil) + +// locationOption allows management of the mutation configuration using functional options. +type locationOption func(*LocationMutation) + +// newLocationMutation creates new mutation for the Location entity. +func newLocationMutation(c config, op Op, opts ...locationOption) *LocationMutation { + m := &LocationMutation{ + config: c, + op: op, + typ: TypeLocation, + clearedFields: make(map[string]struct{}), + } + for _, opt := range opts { + opt(m) + } + return m +} + +// withLocationID sets the ID field of the mutation. +func withLocationID(id uuid.UUID) locationOption { + return func(m *LocationMutation) { + var ( + err error + once sync.Once + value *Location + ) + m.oldValue = func(ctx context.Context) (*Location, error) { + once.Do(func() { + if m.done { + err = errors.New("querying old values post mutation is not allowed") + } else { + value, err = m.Client().Location.Get(ctx, id) + } + }) + return value, err + } + m.id = &id + } +} + +// withLocation sets the old Location of the mutation. +func withLocation(node *Location) locationOption { + return func(m *LocationMutation) { + m.oldValue = func(context.Context) (*Location, error) { + return node, nil + } + m.id = &node.ID + } +} + +// Client returns a new `ent.Client` from the mutation. If the mutation was +// executed in a transaction (ent.Tx), a transactional client is returned. +func (m LocationMutation) Client() *Client { + client := &Client{config: m.config} + client.init() + return client +} + +// Tx returns an `ent.Tx` for mutations that were executed in transactions; +// it returns an error otherwise. +func (m LocationMutation) Tx() (*Tx, error) { + if _, ok := m.driver.(*txDriver); !ok { + return nil, errors.New("ent: mutation is not running in a transaction") + } + tx := &Tx{config: m.config} + tx.init() + return tx, nil +} + +// SetID sets the value of the id field. Note that this +// operation is only accepted on creation of Location entities. +func (m *LocationMutation) SetID(id uuid.UUID) { + m.id = &id +} + +// ID returns the ID value in the mutation. Note that the ID is only available +// if it was provided to the builder or after it was returned from the database. +func (m *LocationMutation) ID() (id uuid.UUID, exists bool) { + if m.id == nil { + return + } + return *m.id, true +} + +// IDs queries the database and returns the entity ids that match the mutation's predicate. +// That means, if the mutation is applied within a transaction with an isolation level such +// as sql.LevelSerializable, the returned ids match the ids of the rows that will be updated +// or updated by the mutation. +func (m *LocationMutation) IDs(ctx context.Context) ([]uuid.UUID, error) { + switch { + case m.op.Is(OpUpdateOne | OpDeleteOne): + id, exists := m.ID() + if exists { + return []uuid.UUID{id}, nil + } + fallthrough + case m.op.Is(OpUpdate | OpDelete): + return m.Client().Location.Query().Where(m.predicates...).IDs(ctx) + default: + return nil, fmt.Errorf("IDs is not allowed on %s operations", m.op) + } +} + +// SetCreatedAt sets the "created_at" field. +func (m *LocationMutation) SetCreatedAt(t time.Time) { + m.created_at = &t +} + +// CreatedAt returns the value of the "created_at" field in the mutation. +func (m *LocationMutation) CreatedAt() (r time.Time, exists bool) { + v := m.created_at + if v == nil { + return + } + return *v, true +} + +// OldCreatedAt returns the old "created_at" field's value of the Location entity. +// If the Location object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *LocationMutation) OldCreatedAt(ctx context.Context) (v time.Time, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldCreatedAt is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldCreatedAt requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldCreatedAt: %w", err) + } + return oldValue.CreatedAt, nil +} + +// ResetCreatedAt resets all changes to the "created_at" field. +func (m *LocationMutation) ResetCreatedAt() { + m.created_at = nil +} + +// SetUpdatedAt sets the "updated_at" field. +func (m *LocationMutation) SetUpdatedAt(t time.Time) { + m.updated_at = &t +} + +// UpdatedAt returns the value of the "updated_at" field in the mutation. +func (m *LocationMutation) UpdatedAt() (r time.Time, exists bool) { + v := m.updated_at + if v == nil { + return + } + return *v, true +} + +// OldUpdatedAt returns the old "updated_at" field's value of the Location entity. +// If the Location object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *LocationMutation) OldUpdatedAt(ctx context.Context) (v time.Time, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldUpdatedAt is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldUpdatedAt requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldUpdatedAt: %w", err) + } + return oldValue.UpdatedAt, nil +} + +// ResetUpdatedAt resets all changes to the "updated_at" field. +func (m *LocationMutation) ResetUpdatedAt() { + m.updated_at = nil +} + +// SetName sets the "name" field. +func (m *LocationMutation) SetName(s string) { + m.name = &s +} + +// Name returns the value of the "name" field in the mutation. +func (m *LocationMutation) Name() (r string, exists bool) { + v := m.name + if v == nil { + return + } + return *v, true +} + +// OldName returns the old "name" field's value of the Location entity. +// If the Location object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *LocationMutation) OldName(ctx context.Context) (v string, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldName is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldName requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldName: %w", err) + } + return oldValue.Name, nil +} + +// ResetName resets all changes to the "name" field. +func (m *LocationMutation) ResetName() { + m.name = nil +} + +// SetDescription sets the "description" field. +func (m *LocationMutation) SetDescription(s string) { + m.description = &s +} + +// Description returns the value of the "description" field in the mutation. +func (m *LocationMutation) Description() (r string, exists bool) { + v := m.description + if v == nil { + return + } + return *v, true +} + +// OldDescription returns the old "description" field's value of the Location entity. +// If the Location object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *LocationMutation) OldDescription(ctx context.Context) (v string, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldDescription is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldDescription requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldDescription: %w", err) + } + return oldValue.Description, nil +} + +// ClearDescription clears the value of the "description" field. +func (m *LocationMutation) ClearDescription() { + m.description = nil + m.clearedFields[location.FieldDescription] = struct{}{} +} + +// DescriptionCleared returns if the "description" field was cleared in this mutation. +func (m *LocationMutation) DescriptionCleared() bool { + _, ok := m.clearedFields[location.FieldDescription] + return ok +} + +// ResetDescription resets all changes to the "description" field. +func (m *LocationMutation) ResetDescription() { + m.description = nil + delete(m.clearedFields, location.FieldDescription) +} + +// SetGroupID sets the "group" edge to the Group entity by id. +func (m *LocationMutation) SetGroupID(id uuid.UUID) { + m.group = &id +} + +// ClearGroup clears the "group" edge to the Group entity. +func (m *LocationMutation) ClearGroup() { + m.clearedgroup = true +} + +// GroupCleared reports if the "group" edge to the Group entity was cleared. +func (m *LocationMutation) GroupCleared() bool { + return m.clearedgroup +} + +// GroupID returns the "group" edge ID in the mutation. +func (m *LocationMutation) GroupID() (id uuid.UUID, exists bool) { + if m.group != nil { + return *m.group, true + } + return +} + +// GroupIDs returns the "group" edge IDs in the mutation. +// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use +// GroupID instead. It exists only for internal usage by the builders. +func (m *LocationMutation) GroupIDs() (ids []uuid.UUID) { + if id := m.group; id != nil { + ids = append(ids, *id) + } + return +} + +// ResetGroup resets all changes to the "group" edge. +func (m *LocationMutation) ResetGroup() { + m.group = nil + m.clearedgroup = false +} + +// AddItemIDs adds the "items" edge to the Item entity by ids. +func (m *LocationMutation) AddItemIDs(ids ...uuid.UUID) { + if m.items == nil { + m.items = make(map[uuid.UUID]struct{}) + } + for i := range ids { + m.items[ids[i]] = struct{}{} + } +} + +// ClearItems clears the "items" edge to the Item entity. +func (m *LocationMutation) ClearItems() { + m.cleareditems = true +} + +// ItemsCleared reports if the "items" edge to the Item entity was cleared. +func (m *LocationMutation) ItemsCleared() bool { + return m.cleareditems +} + +// RemoveItemIDs removes the "items" edge to the Item entity by IDs. +func (m *LocationMutation) RemoveItemIDs(ids ...uuid.UUID) { + if m.removeditems == nil { + m.removeditems = make(map[uuid.UUID]struct{}) + } + for i := range ids { + delete(m.items, ids[i]) + m.removeditems[ids[i]] = struct{}{} + } +} + +// RemovedItems returns the removed IDs of the "items" edge to the Item entity. +func (m *LocationMutation) RemovedItemsIDs() (ids []uuid.UUID) { + for id := range m.removeditems { + ids = append(ids, id) + } + return +} + +// ItemsIDs returns the "items" edge IDs in the mutation. +func (m *LocationMutation) ItemsIDs() (ids []uuid.UUID) { + for id := range m.items { + ids = append(ids, id) + } + return +} + +// ResetItems resets all changes to the "items" edge. +func (m *LocationMutation) ResetItems() { + m.items = nil + m.cleareditems = false + m.removeditems = nil +} + +// Where appends a list predicates to the LocationMutation builder. +func (m *LocationMutation) Where(ps ...predicate.Location) { + m.predicates = append(m.predicates, ps...) +} + +// Op returns the operation name. +func (m *LocationMutation) Op() Op { + return m.op +} + +// Type returns the node type of this mutation (Location). +func (m *LocationMutation) Type() string { + return m.typ +} + +// Fields returns all fields that were changed during this mutation. Note that in +// order to get all numeric fields that were incremented/decremented, call +// AddedFields(). +func (m *LocationMutation) Fields() []string { + fields := make([]string, 0, 4) + if m.created_at != nil { + fields = append(fields, location.FieldCreatedAt) + } + if m.updated_at != nil { + fields = append(fields, location.FieldUpdatedAt) + } + if m.name != nil { + fields = append(fields, location.FieldName) + } + if m.description != nil { + fields = append(fields, location.FieldDescription) + } + return fields +} + +// Field returns the value of a field with the given name. The second boolean +// return value indicates that this field was not set, or was not defined in the +// schema. +func (m *LocationMutation) Field(name string) (ent.Value, bool) { + switch name { + case location.FieldCreatedAt: + return m.CreatedAt() + case location.FieldUpdatedAt: + return m.UpdatedAt() + case location.FieldName: + return m.Name() + case location.FieldDescription: + return m.Description() + } + return nil, false +} + +// OldField returns the old value of the field from the database. An error is +// returned if the mutation operation is not UpdateOne, or the query to the +// database failed. +func (m *LocationMutation) OldField(ctx context.Context, name string) (ent.Value, error) { + switch name { + case location.FieldCreatedAt: + return m.OldCreatedAt(ctx) + case location.FieldUpdatedAt: + return m.OldUpdatedAt(ctx) + case location.FieldName: + return m.OldName(ctx) + case location.FieldDescription: + return m.OldDescription(ctx) + } + return nil, fmt.Errorf("unknown Location field %s", name) +} + +// SetField sets the value of a field with the given name. It returns an error if +// the field is not defined in the schema, or if the type mismatched the field +// type. +func (m *LocationMutation) SetField(name string, value ent.Value) error { + switch name { + case location.FieldCreatedAt: + v, ok := value.(time.Time) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetCreatedAt(v) + return nil + case location.FieldUpdatedAt: + v, ok := value.(time.Time) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetUpdatedAt(v) + return nil + case location.FieldName: + v, ok := value.(string) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetName(v) + return nil + case location.FieldDescription: + v, ok := value.(string) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetDescription(v) + return nil + } + return fmt.Errorf("unknown Location field %s", name) +} + +// AddedFields returns all numeric fields that were incremented/decremented during +// this mutation. +func (m *LocationMutation) AddedFields() []string { + return nil +} + +// AddedField returns the numeric value that was incremented/decremented on a field +// with the given name. The second boolean return value indicates that this field +// was not set, or was not defined in the schema. +func (m *LocationMutation) AddedField(name string) (ent.Value, bool) { + return nil, false +} + +// AddField adds the value to the field with the given name. It returns an error if +// the field is not defined in the schema, or if the type mismatched the field +// type. +func (m *LocationMutation) AddField(name string, value ent.Value) error { + switch name { + } + return fmt.Errorf("unknown Location numeric field %s", name) +} + +// ClearedFields returns all nullable fields that were cleared during this +// mutation. +func (m *LocationMutation) ClearedFields() []string { + var fields []string + if m.FieldCleared(location.FieldDescription) { + fields = append(fields, location.FieldDescription) + } + return fields +} + +// FieldCleared returns a boolean indicating if a field with the given name was +// cleared in this mutation. +func (m *LocationMutation) FieldCleared(name string) bool { + _, ok := m.clearedFields[name] + return ok +} + +// ClearField clears the value of the field with the given name. It returns an +// error if the field is not defined in the schema. +func (m *LocationMutation) ClearField(name string) error { + switch name { + case location.FieldDescription: + m.ClearDescription() + return nil + } + return fmt.Errorf("unknown Location nullable field %s", name) +} + +// ResetField resets all changes in the mutation for the field with the given name. +// It returns an error if the field is not defined in the schema. +func (m *LocationMutation) ResetField(name string) error { + switch name { + case location.FieldCreatedAt: + m.ResetCreatedAt() + return nil + case location.FieldUpdatedAt: + m.ResetUpdatedAt() + return nil + case location.FieldName: + m.ResetName() + return nil + case location.FieldDescription: + m.ResetDescription() + return nil + } + return fmt.Errorf("unknown Location field %s", name) +} + +// AddedEdges returns all edge names that were set/added in this mutation. +func (m *LocationMutation) AddedEdges() []string { + edges := make([]string, 0, 2) + if m.group != nil { + edges = append(edges, location.EdgeGroup) + } + if m.items != nil { + edges = append(edges, location.EdgeItems) + } + return edges +} + +// AddedIDs returns all IDs (to other nodes) that were added for the given edge +// name in this mutation. +func (m *LocationMutation) AddedIDs(name string) []ent.Value { + switch name { + case location.EdgeGroup: + if id := m.group; id != nil { + return []ent.Value{*id} + } + case location.EdgeItems: + ids := make([]ent.Value, 0, len(m.items)) + for id := range m.items { + ids = append(ids, id) + } + return ids + } + return nil +} + +// RemovedEdges returns all edge names that were removed in this mutation. +func (m *LocationMutation) RemovedEdges() []string { + edges := make([]string, 0, 2) + if m.removeditems != nil { + edges = append(edges, location.EdgeItems) + } + return edges +} + +// RemovedIDs returns all IDs (to other nodes) that were removed for the edge with +// the given name in this mutation. +func (m *LocationMutation) RemovedIDs(name string) []ent.Value { + switch name { + case location.EdgeItems: + ids := make([]ent.Value, 0, len(m.removeditems)) + for id := range m.removeditems { + ids = append(ids, id) + } + return ids + } + return nil +} + +// ClearedEdges returns all edge names that were cleared in this mutation. +func (m *LocationMutation) ClearedEdges() []string { + edges := make([]string, 0, 2) + if m.clearedgroup { + edges = append(edges, location.EdgeGroup) + } + if m.cleareditems { + edges = append(edges, location.EdgeItems) + } + return edges +} + +// EdgeCleared returns a boolean which indicates if the edge with the given name +// was cleared in this mutation. +func (m *LocationMutation) EdgeCleared(name string) bool { + switch name { + case location.EdgeGroup: + return m.clearedgroup + case location.EdgeItems: + return m.cleareditems + } + return false +} + +// ClearEdge clears the value of the edge with the given name. It returns an error +// if that edge is not defined in the schema. +func (m *LocationMutation) ClearEdge(name string) error { + switch name { + case location.EdgeGroup: + m.ClearGroup() + return nil + } + return fmt.Errorf("unknown Location unique edge %s", name) +} + +// ResetEdge resets all changes to the edge with the given name in this mutation. +// It returns an error if the edge is not defined in the schema. +func (m *LocationMutation) ResetEdge(name string) error { + switch name { + case location.EdgeGroup: + m.ResetGroup() + return nil + case location.EdgeItems: + m.ResetItems() + return nil + } + return fmt.Errorf("unknown Location edge %s", name) +} + // UserMutation represents an operation that mutates the User nodes in the graph. type UserMutation struct { config op Op typ string id *uuid.UUID + created_at *time.Time + updated_at *time.Time name *string email *string password *string is_superuser *bool clearedFields map[string]struct{} - auth_tokens map[int]struct{} - removedauth_tokens map[int]struct{} + group *uuid.UUID + clearedgroup bool + auth_tokens map[uuid.UUID]struct{} + removedauth_tokens map[uuid.UUID]struct{} clearedauth_tokens bool done bool oldValue func(context.Context) (*User, error) @@ -641,6 +5605,78 @@ func (m *UserMutation) IDs(ctx context.Context) ([]uuid.UUID, error) { } } +// SetCreatedAt sets the "created_at" field. +func (m *UserMutation) SetCreatedAt(t time.Time) { + m.created_at = &t +} + +// CreatedAt returns the value of the "created_at" field in the mutation. +func (m *UserMutation) CreatedAt() (r time.Time, exists bool) { + v := m.created_at + if v == nil { + return + } + return *v, true +} + +// OldCreatedAt returns the old "created_at" field's value of the User entity. +// If the User object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *UserMutation) OldCreatedAt(ctx context.Context) (v time.Time, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldCreatedAt is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldCreatedAt requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldCreatedAt: %w", err) + } + return oldValue.CreatedAt, nil +} + +// ResetCreatedAt resets all changes to the "created_at" field. +func (m *UserMutation) ResetCreatedAt() { + m.created_at = nil +} + +// SetUpdatedAt sets the "updated_at" field. +func (m *UserMutation) SetUpdatedAt(t time.Time) { + m.updated_at = &t +} + +// UpdatedAt returns the value of the "updated_at" field in the mutation. +func (m *UserMutation) UpdatedAt() (r time.Time, exists bool) { + v := m.updated_at + if v == nil { + return + } + return *v, true +} + +// OldUpdatedAt returns the old "updated_at" field's value of the User entity. +// If the User object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *UserMutation) OldUpdatedAt(ctx context.Context) (v time.Time, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldUpdatedAt is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldUpdatedAt requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldUpdatedAt: %w", err) + } + return oldValue.UpdatedAt, nil +} + +// ResetUpdatedAt resets all changes to the "updated_at" field. +func (m *UserMutation) ResetUpdatedAt() { + m.updated_at = nil +} + // SetName sets the "name" field. func (m *UserMutation) SetName(s string) { m.name = &s @@ -785,10 +5821,49 @@ func (m *UserMutation) ResetIsSuperuser() { m.is_superuser = nil } +// SetGroupID sets the "group" edge to the Group entity by id. +func (m *UserMutation) SetGroupID(id uuid.UUID) { + m.group = &id +} + +// ClearGroup clears the "group" edge to the Group entity. +func (m *UserMutation) ClearGroup() { + m.clearedgroup = true +} + +// GroupCleared reports if the "group" edge to the Group entity was cleared. +func (m *UserMutation) GroupCleared() bool { + return m.clearedgroup +} + +// GroupID returns the "group" edge ID in the mutation. +func (m *UserMutation) GroupID() (id uuid.UUID, exists bool) { + if m.group != nil { + return *m.group, true + } + return +} + +// GroupIDs returns the "group" edge IDs in the mutation. +// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use +// GroupID instead. It exists only for internal usage by the builders. +func (m *UserMutation) GroupIDs() (ids []uuid.UUID) { + if id := m.group; id != nil { + ids = append(ids, *id) + } + return +} + +// ResetGroup resets all changes to the "group" edge. +func (m *UserMutation) ResetGroup() { + m.group = nil + m.clearedgroup = false +} + // AddAuthTokenIDs adds the "auth_tokens" edge to the AuthTokens entity by ids. -func (m *UserMutation) AddAuthTokenIDs(ids ...int) { +func (m *UserMutation) AddAuthTokenIDs(ids ...uuid.UUID) { if m.auth_tokens == nil { - m.auth_tokens = make(map[int]struct{}) + m.auth_tokens = make(map[uuid.UUID]struct{}) } for i := range ids { m.auth_tokens[ids[i]] = struct{}{} @@ -806,9 +5881,9 @@ func (m *UserMutation) AuthTokensCleared() bool { } // RemoveAuthTokenIDs removes the "auth_tokens" edge to the AuthTokens entity by IDs. -func (m *UserMutation) RemoveAuthTokenIDs(ids ...int) { +func (m *UserMutation) RemoveAuthTokenIDs(ids ...uuid.UUID) { if m.removedauth_tokens == nil { - m.removedauth_tokens = make(map[int]struct{}) + m.removedauth_tokens = make(map[uuid.UUID]struct{}) } for i := range ids { delete(m.auth_tokens, ids[i]) @@ -817,7 +5892,7 @@ func (m *UserMutation) RemoveAuthTokenIDs(ids ...int) { } // RemovedAuthTokens returns the removed IDs of the "auth_tokens" edge to the AuthTokens entity. -func (m *UserMutation) RemovedAuthTokensIDs() (ids []int) { +func (m *UserMutation) RemovedAuthTokensIDs() (ids []uuid.UUID) { for id := range m.removedauth_tokens { ids = append(ids, id) } @@ -825,7 +5900,7 @@ func (m *UserMutation) RemovedAuthTokensIDs() (ids []int) { } // AuthTokensIDs returns the "auth_tokens" edge IDs in the mutation. -func (m *UserMutation) AuthTokensIDs() (ids []int) { +func (m *UserMutation) AuthTokensIDs() (ids []uuid.UUID) { for id := range m.auth_tokens { ids = append(ids, id) } @@ -858,7 +5933,13 @@ func (m *UserMutation) Type() string { // order to get all numeric fields that were incremented/decremented, call // AddedFields(). func (m *UserMutation) Fields() []string { - fields := make([]string, 0, 4) + fields := make([]string, 0, 6) + if m.created_at != nil { + fields = append(fields, user.FieldCreatedAt) + } + if m.updated_at != nil { + fields = append(fields, user.FieldUpdatedAt) + } if m.name != nil { fields = append(fields, user.FieldName) } @@ -879,6 +5960,10 @@ func (m *UserMutation) Fields() []string { // schema. func (m *UserMutation) Field(name string) (ent.Value, bool) { switch name { + case user.FieldCreatedAt: + return m.CreatedAt() + case user.FieldUpdatedAt: + return m.UpdatedAt() case user.FieldName: return m.Name() case user.FieldEmail: @@ -896,6 +5981,10 @@ func (m *UserMutation) Field(name string) (ent.Value, bool) { // database failed. func (m *UserMutation) OldField(ctx context.Context, name string) (ent.Value, error) { switch name { + case user.FieldCreatedAt: + return m.OldCreatedAt(ctx) + case user.FieldUpdatedAt: + return m.OldUpdatedAt(ctx) case user.FieldName: return m.OldName(ctx) case user.FieldEmail: @@ -913,6 +6002,20 @@ func (m *UserMutation) OldField(ctx context.Context, name string) (ent.Value, er // type. func (m *UserMutation) SetField(name string, value ent.Value) error { switch name { + case user.FieldCreatedAt: + v, ok := value.(time.Time) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetCreatedAt(v) + return nil + case user.FieldUpdatedAt: + v, ok := value.(time.Time) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetUpdatedAt(v) + return nil case user.FieldName: v, ok := value.(string) if !ok { @@ -990,6 +6093,12 @@ func (m *UserMutation) ClearField(name string) error { // It returns an error if the field is not defined in the schema. func (m *UserMutation) ResetField(name string) error { switch name { + case user.FieldCreatedAt: + m.ResetCreatedAt() + return nil + case user.FieldUpdatedAt: + m.ResetUpdatedAt() + return nil case user.FieldName: m.ResetName() return nil @@ -1008,7 +6117,10 @@ func (m *UserMutation) ResetField(name string) error { // AddedEdges returns all edge names that were set/added in this mutation. func (m *UserMutation) AddedEdges() []string { - edges := make([]string, 0, 1) + edges := make([]string, 0, 2) + if m.group != nil { + edges = append(edges, user.EdgeGroup) + } if m.auth_tokens != nil { edges = append(edges, user.EdgeAuthTokens) } @@ -1019,6 +6131,10 @@ func (m *UserMutation) AddedEdges() []string { // name in this mutation. func (m *UserMutation) AddedIDs(name string) []ent.Value { switch name { + case user.EdgeGroup: + if id := m.group; id != nil { + return []ent.Value{*id} + } case user.EdgeAuthTokens: ids := make([]ent.Value, 0, len(m.auth_tokens)) for id := range m.auth_tokens { @@ -1031,7 +6147,7 @@ func (m *UserMutation) AddedIDs(name string) []ent.Value { // RemovedEdges returns all edge names that were removed in this mutation. func (m *UserMutation) RemovedEdges() []string { - edges := make([]string, 0, 1) + edges := make([]string, 0, 2) if m.removedauth_tokens != nil { edges = append(edges, user.EdgeAuthTokens) } @@ -1054,7 +6170,10 @@ func (m *UserMutation) RemovedIDs(name string) []ent.Value { // ClearedEdges returns all edge names that were cleared in this mutation. func (m *UserMutation) ClearedEdges() []string { - edges := make([]string, 0, 1) + edges := make([]string, 0, 2) + if m.clearedgroup { + edges = append(edges, user.EdgeGroup) + } if m.clearedauth_tokens { edges = append(edges, user.EdgeAuthTokens) } @@ -1065,6 +6184,8 @@ func (m *UserMutation) ClearedEdges() []string { // was cleared in this mutation. func (m *UserMutation) EdgeCleared(name string) bool { switch name { + case user.EdgeGroup: + return m.clearedgroup case user.EdgeAuthTokens: return m.clearedauth_tokens } @@ -1075,6 +6196,9 @@ func (m *UserMutation) EdgeCleared(name string) bool { // if that edge is not defined in the schema. func (m *UserMutation) ClearEdge(name string) error { switch name { + case user.EdgeGroup: + m.ClearGroup() + return nil } return fmt.Errorf("unknown User unique edge %s", name) } @@ -1083,6 +6207,9 @@ func (m *UserMutation) ClearEdge(name string) error { // It returns an error if the edge is not defined in the schema. func (m *UserMutation) ResetEdge(name string) error { switch name { + case user.EdgeGroup: + m.ResetGroup() + return nil case user.EdgeAuthTokens: m.ResetAuthTokens() return nil diff --git a/backend/ent/predicate/predicate.go b/backend/ent/predicate/predicate.go index b26324f..6053082 100644 --- a/backend/ent/predicate/predicate.go +++ b/backend/ent/predicate/predicate.go @@ -1,4 +1,4 @@ -// Code generated by entc, DO NOT EDIT. +// Code generated by ent, DO NOT EDIT. package predicate @@ -9,5 +9,20 @@ import ( // AuthTokens is the predicate function for authtokens builders. type AuthTokens func(*sql.Selector) +// Group is the predicate function for group builders. +type Group func(*sql.Selector) + +// Item is the predicate function for item builders. +type Item func(*sql.Selector) + +// ItemField is the predicate function for itemfield builders. +type ItemField func(*sql.Selector) + +// Label is the predicate function for label builders. +type Label func(*sql.Selector) + +// Location is the predicate function for location builders. +type Location func(*sql.Selector) + // User is the predicate function for user builders. type User func(*sql.Selector) diff --git a/backend/ent/runtime.go b/backend/ent/runtime.go index 9df6038..8dc33ae 100644 --- a/backend/ent/runtime.go +++ b/backend/ent/runtime.go @@ -1,4 +1,4 @@ -// Code generated by entc, DO NOT EDIT. +// Code generated by ent, DO NOT EDIT. package ent @@ -7,6 +7,11 @@ import ( "github.com/google/uuid" "github.com/hay-kot/content/backend/ent/authtokens" + "github.com/hay-kot/content/backend/ent/group" + "github.com/hay-kot/content/backend/ent/item" + "github.com/hay-kot/content/backend/ent/itemfield" + "github.com/hay-kot/content/backend/ent/label" + "github.com/hay-kot/content/backend/ent/location" "github.com/hay-kot/content/backend/ent/schema" "github.com/hay-kot/content/backend/ent/user" ) @@ -15,36 +20,357 @@ import ( // (default values, validators, hooks and policies) and stitches it // to their package variables. func init() { + authtokensMixin := schema.AuthTokens{}.Mixin() + authtokensMixinFields0 := authtokensMixin[0].Fields() + _ = authtokensMixinFields0 authtokensFields := schema.AuthTokens{}.Fields() _ = authtokensFields + // authtokensDescCreatedAt is the schema descriptor for created_at field. + authtokensDescCreatedAt := authtokensMixinFields0[1].Descriptor() + // authtokens.DefaultCreatedAt holds the default value on creation for the created_at field. + authtokens.DefaultCreatedAt = authtokensDescCreatedAt.Default.(func() time.Time) + // authtokensDescUpdatedAt is the schema descriptor for updated_at field. + authtokensDescUpdatedAt := authtokensMixinFields0[2].Descriptor() + // authtokens.DefaultUpdatedAt holds the default value on creation for the updated_at field. + authtokens.DefaultUpdatedAt = authtokensDescUpdatedAt.Default.(func() time.Time) + // authtokens.UpdateDefaultUpdatedAt holds the default value on update for the updated_at field. + authtokens.UpdateDefaultUpdatedAt = authtokensDescUpdatedAt.UpdateDefault.(func() time.Time) // authtokensDescExpiresAt is the schema descriptor for expires_at field. authtokensDescExpiresAt := authtokensFields[1].Descriptor() // authtokens.DefaultExpiresAt holds the default value on creation for the expires_at field. authtokens.DefaultExpiresAt = authtokensDescExpiresAt.Default.(func() time.Time) - // authtokensDescCreatedAt is the schema descriptor for created_at field. - authtokensDescCreatedAt := authtokensFields[2].Descriptor() - // authtokens.DefaultCreatedAt holds the default value on creation for the created_at field. - authtokens.DefaultCreatedAt = authtokensDescCreatedAt.Default.(func() time.Time) + // authtokensDescID is the schema descriptor for id field. + authtokensDescID := authtokensMixinFields0[0].Descriptor() + // authtokens.DefaultID holds the default value on creation for the id field. + authtokens.DefaultID = authtokensDescID.Default.(func() uuid.UUID) + groupMixin := schema.Group{}.Mixin() + groupMixinFields0 := groupMixin[0].Fields() + _ = groupMixinFields0 + groupFields := schema.Group{}.Fields() + _ = groupFields + // groupDescCreatedAt is the schema descriptor for created_at field. + groupDescCreatedAt := groupMixinFields0[1].Descriptor() + // group.DefaultCreatedAt holds the default value on creation for the created_at field. + group.DefaultCreatedAt = groupDescCreatedAt.Default.(func() time.Time) + // groupDescUpdatedAt is the schema descriptor for updated_at field. + groupDescUpdatedAt := groupMixinFields0[2].Descriptor() + // group.DefaultUpdatedAt holds the default value on creation for the updated_at field. + group.DefaultUpdatedAt = groupDescUpdatedAt.Default.(func() time.Time) + // group.UpdateDefaultUpdatedAt holds the default value on update for the updated_at field. + group.UpdateDefaultUpdatedAt = groupDescUpdatedAt.UpdateDefault.(func() time.Time) + // groupDescName is the schema descriptor for name field. + groupDescName := groupFields[0].Descriptor() + // group.NameValidator is a validator for the "name" field. It is called by the builders before save. + group.NameValidator = func() func(string) error { + validators := groupDescName.Validators + fns := [...]func(string) error{ + validators[0].(func(string) error), + validators[1].(func(string) error), + } + return func(name string) error { + for _, fn := range fns { + if err := fn(name); err != nil { + return err + } + } + return nil + } + }() + // groupDescID is the schema descriptor for id field. + groupDescID := groupMixinFields0[0].Descriptor() + // group.DefaultID holds the default value on creation for the id field. + group.DefaultID = groupDescID.Default.(func() uuid.UUID) + itemMixin := schema.Item{}.Mixin() + itemMixinFields0 := itemMixin[0].Fields() + _ = itemMixinFields0 + itemMixinFields1 := itemMixin[1].Fields() + _ = itemMixinFields1 + itemFields := schema.Item{}.Fields() + _ = itemFields + // itemDescCreatedAt is the schema descriptor for created_at field. + itemDescCreatedAt := itemMixinFields0[1].Descriptor() + // item.DefaultCreatedAt holds the default value on creation for the created_at field. + item.DefaultCreatedAt = itemDescCreatedAt.Default.(func() time.Time) + // itemDescUpdatedAt is the schema descriptor for updated_at field. + itemDescUpdatedAt := itemMixinFields0[2].Descriptor() + // item.DefaultUpdatedAt holds the default value on creation for the updated_at field. + item.DefaultUpdatedAt = itemDescUpdatedAt.Default.(func() time.Time) + // item.UpdateDefaultUpdatedAt holds the default value on update for the updated_at field. + item.UpdateDefaultUpdatedAt = itemDescUpdatedAt.UpdateDefault.(func() time.Time) + // itemDescName is the schema descriptor for name field. + itemDescName := itemMixinFields1[0].Descriptor() + // item.NameValidator is a validator for the "name" field. It is called by the builders before save. + item.NameValidator = func() func(string) error { + validators := itemDescName.Validators + fns := [...]func(string) error{ + validators[0].(func(string) error), + validators[1].(func(string) error), + } + return func(name string) error { + for _, fn := range fns { + if err := fn(name); err != nil { + return err + } + } + return nil + } + }() + // itemDescDescription is the schema descriptor for description field. + itemDescDescription := itemMixinFields1[1].Descriptor() + // item.DescriptionValidator is a validator for the "description" field. It is called by the builders before save. + item.DescriptionValidator = itemDescDescription.Validators[0].(func(string) error) + // itemDescNotes is the schema descriptor for notes field. + itemDescNotes := itemFields[0].Descriptor() + // item.NotesValidator is a validator for the "notes" field. It is called by the builders before save. + item.NotesValidator = itemDescNotes.Validators[0].(func(string) error) + // itemDescSerialNumber is the schema descriptor for serial_number field. + itemDescSerialNumber := itemFields[1].Descriptor() + // item.SerialNumberValidator is a validator for the "serial_number" field. It is called by the builders before save. + item.SerialNumberValidator = itemDescSerialNumber.Validators[0].(func(string) error) + // itemDescModelNumber is the schema descriptor for model_number field. + itemDescModelNumber := itemFields[2].Descriptor() + // item.ModelNumberValidator is a validator for the "model_number" field. It is called by the builders before save. + item.ModelNumberValidator = itemDescModelNumber.Validators[0].(func(string) error) + // itemDescManufacturer is the schema descriptor for manufacturer field. + itemDescManufacturer := itemFields[3].Descriptor() + // item.ManufacturerValidator is a validator for the "manufacturer" field. It is called by the builders before save. + item.ManufacturerValidator = itemDescManufacturer.Validators[0].(func(string) error) + // itemDescPurchasePrice is the schema descriptor for purchase_price field. + itemDescPurchasePrice := itemFields[6].Descriptor() + // item.DefaultPurchasePrice holds the default value on creation for the purchase_price field. + item.DefaultPurchasePrice = itemDescPurchasePrice.Default.(float64) + // itemDescSoldPrice is the schema descriptor for sold_price field. + itemDescSoldPrice := itemFields[10].Descriptor() + // item.DefaultSoldPrice holds the default value on creation for the sold_price field. + item.DefaultSoldPrice = itemDescSoldPrice.Default.(float64) + // itemDescSoldNotes is the schema descriptor for sold_notes field. + itemDescSoldNotes := itemFields[12].Descriptor() + // item.SoldNotesValidator is a validator for the "sold_notes" field. It is called by the builders before save. + item.SoldNotesValidator = itemDescSoldNotes.Validators[0].(func(string) error) + // itemDescID is the schema descriptor for id field. + itemDescID := itemMixinFields0[0].Descriptor() + // item.DefaultID holds the default value on creation for the id field. + item.DefaultID = itemDescID.Default.(func() uuid.UUID) + itemfieldMixin := schema.ItemField{}.Mixin() + itemfieldMixinFields0 := itemfieldMixin[0].Fields() + _ = itemfieldMixinFields0 + itemfieldMixinFields1 := itemfieldMixin[1].Fields() + _ = itemfieldMixinFields1 + itemfieldFields := schema.ItemField{}.Fields() + _ = itemfieldFields + // itemfieldDescCreatedAt is the schema descriptor for created_at field. + itemfieldDescCreatedAt := itemfieldMixinFields0[1].Descriptor() + // itemfield.DefaultCreatedAt holds the default value on creation for the created_at field. + itemfield.DefaultCreatedAt = itemfieldDescCreatedAt.Default.(func() time.Time) + // itemfieldDescUpdatedAt is the schema descriptor for updated_at field. + itemfieldDescUpdatedAt := itemfieldMixinFields0[2].Descriptor() + // itemfield.DefaultUpdatedAt holds the default value on creation for the updated_at field. + itemfield.DefaultUpdatedAt = itemfieldDescUpdatedAt.Default.(func() time.Time) + // itemfield.UpdateDefaultUpdatedAt holds the default value on update for the updated_at field. + itemfield.UpdateDefaultUpdatedAt = itemfieldDescUpdatedAt.UpdateDefault.(func() time.Time) + // itemfieldDescName is the schema descriptor for name field. + itemfieldDescName := itemfieldMixinFields1[0].Descriptor() + // itemfield.NameValidator is a validator for the "name" field. It is called by the builders before save. + itemfield.NameValidator = func() func(string) error { + validators := itemfieldDescName.Validators + fns := [...]func(string) error{ + validators[0].(func(string) error), + validators[1].(func(string) error), + } + return func(name string) error { + for _, fn := range fns { + if err := fn(name); err != nil { + return err + } + } + return nil + } + }() + // itemfieldDescDescription is the schema descriptor for description field. + itemfieldDescDescription := itemfieldMixinFields1[1].Descriptor() + // itemfield.DescriptionValidator is a validator for the "description" field. It is called by the builders before save. + itemfield.DescriptionValidator = itemfieldDescDescription.Validators[0].(func(string) error) + // itemfieldDescTextValue is the schema descriptor for text_value field. + itemfieldDescTextValue := itemfieldFields[1].Descriptor() + // itemfield.TextValueValidator is a validator for the "text_value" field. It is called by the builders before save. + itemfield.TextValueValidator = itemfieldDescTextValue.Validators[0].(func(string) error) + // itemfieldDescBooleanValue is the schema descriptor for boolean_value field. + itemfieldDescBooleanValue := itemfieldFields[3].Descriptor() + // itemfield.DefaultBooleanValue holds the default value on creation for the boolean_value field. + itemfield.DefaultBooleanValue = itemfieldDescBooleanValue.Default.(bool) + // itemfieldDescTimeValue is the schema descriptor for time_value field. + itemfieldDescTimeValue := itemfieldFields[4].Descriptor() + // itemfield.DefaultTimeValue holds the default value on creation for the time_value field. + itemfield.DefaultTimeValue = itemfieldDescTimeValue.Default.(func() time.Time) + // itemfieldDescID is the schema descriptor for id field. + itemfieldDescID := itemfieldMixinFields0[0].Descriptor() + // itemfield.DefaultID holds the default value on creation for the id field. + itemfield.DefaultID = itemfieldDescID.Default.(func() uuid.UUID) + labelMixin := schema.Label{}.Mixin() + labelMixinFields0 := labelMixin[0].Fields() + _ = labelMixinFields0 + labelMixinFields1 := labelMixin[1].Fields() + _ = labelMixinFields1 + labelFields := schema.Label{}.Fields() + _ = labelFields + // labelDescCreatedAt is the schema descriptor for created_at field. + labelDescCreatedAt := labelMixinFields0[1].Descriptor() + // label.DefaultCreatedAt holds the default value on creation for the created_at field. + label.DefaultCreatedAt = labelDescCreatedAt.Default.(func() time.Time) + // labelDescUpdatedAt is the schema descriptor for updated_at field. + labelDescUpdatedAt := labelMixinFields0[2].Descriptor() + // label.DefaultUpdatedAt holds the default value on creation for the updated_at field. + label.DefaultUpdatedAt = labelDescUpdatedAt.Default.(func() time.Time) + // label.UpdateDefaultUpdatedAt holds the default value on update for the updated_at field. + label.UpdateDefaultUpdatedAt = labelDescUpdatedAt.UpdateDefault.(func() time.Time) + // labelDescName is the schema descriptor for name field. + labelDescName := labelMixinFields1[0].Descriptor() + // label.NameValidator is a validator for the "name" field. It is called by the builders before save. + label.NameValidator = func() func(string) error { + validators := labelDescName.Validators + fns := [...]func(string) error{ + validators[0].(func(string) error), + validators[1].(func(string) error), + } + return func(name string) error { + for _, fn := range fns { + if err := fn(name); err != nil { + return err + } + } + return nil + } + }() + // labelDescDescription is the schema descriptor for description field. + labelDescDescription := labelMixinFields1[1].Descriptor() + // label.DescriptionValidator is a validator for the "description" field. It is called by the builders before save. + label.DescriptionValidator = labelDescDescription.Validators[0].(func(string) error) + // labelDescColor is the schema descriptor for color field. + labelDescColor := labelFields[0].Descriptor() + // label.ColorValidator is a validator for the "color" field. It is called by the builders before save. + label.ColorValidator = labelDescColor.Validators[0].(func(string) error) + // labelDescID is the schema descriptor for id field. + labelDescID := labelMixinFields0[0].Descriptor() + // label.DefaultID holds the default value on creation for the id field. + label.DefaultID = labelDescID.Default.(func() uuid.UUID) + locationMixin := schema.Location{}.Mixin() + locationMixinFields0 := locationMixin[0].Fields() + _ = locationMixinFields0 + locationMixinFields1 := locationMixin[1].Fields() + _ = locationMixinFields1 + locationFields := schema.Location{}.Fields() + _ = locationFields + // locationDescCreatedAt is the schema descriptor for created_at field. + locationDescCreatedAt := locationMixinFields0[1].Descriptor() + // location.DefaultCreatedAt holds the default value on creation for the created_at field. + location.DefaultCreatedAt = locationDescCreatedAt.Default.(func() time.Time) + // locationDescUpdatedAt is the schema descriptor for updated_at field. + locationDescUpdatedAt := locationMixinFields0[2].Descriptor() + // location.DefaultUpdatedAt holds the default value on creation for the updated_at field. + location.DefaultUpdatedAt = locationDescUpdatedAt.Default.(func() time.Time) + // location.UpdateDefaultUpdatedAt holds the default value on update for the updated_at field. + location.UpdateDefaultUpdatedAt = locationDescUpdatedAt.UpdateDefault.(func() time.Time) + // locationDescName is the schema descriptor for name field. + locationDescName := locationMixinFields1[0].Descriptor() + // location.NameValidator is a validator for the "name" field. It is called by the builders before save. + location.NameValidator = func() func(string) error { + validators := locationDescName.Validators + fns := [...]func(string) error{ + validators[0].(func(string) error), + validators[1].(func(string) error), + } + return func(name string) error { + for _, fn := range fns { + if err := fn(name); err != nil { + return err + } + } + return nil + } + }() + // locationDescDescription is the schema descriptor for description field. + locationDescDescription := locationMixinFields1[1].Descriptor() + // location.DescriptionValidator is a validator for the "description" field. It is called by the builders before save. + location.DescriptionValidator = locationDescDescription.Validators[0].(func(string) error) + // locationDescID is the schema descriptor for id field. + locationDescID := locationMixinFields0[0].Descriptor() + // location.DefaultID holds the default value on creation for the id field. + location.DefaultID = locationDescID.Default.(func() uuid.UUID) + userMixin := schema.User{}.Mixin() + userMixinFields0 := userMixin[0].Fields() + _ = userMixinFields0 userFields := schema.User{}.Fields() _ = userFields + // userDescCreatedAt is the schema descriptor for created_at field. + userDescCreatedAt := userMixinFields0[1].Descriptor() + // user.DefaultCreatedAt holds the default value on creation for the created_at field. + user.DefaultCreatedAt = userDescCreatedAt.Default.(func() time.Time) + // userDescUpdatedAt is the schema descriptor for updated_at field. + userDescUpdatedAt := userMixinFields0[2].Descriptor() + // user.DefaultUpdatedAt holds the default value on creation for the updated_at field. + user.DefaultUpdatedAt = userDescUpdatedAt.Default.(func() time.Time) + // user.UpdateDefaultUpdatedAt holds the default value on update for the updated_at field. + user.UpdateDefaultUpdatedAt = userDescUpdatedAt.UpdateDefault.(func() time.Time) // userDescName is the schema descriptor for name field. - userDescName := userFields[1].Descriptor() + userDescName := userFields[0].Descriptor() // user.NameValidator is a validator for the "name" field. It is called by the builders before save. - user.NameValidator = userDescName.Validators[0].(func(string) error) + user.NameValidator = func() func(string) error { + validators := userDescName.Validators + fns := [...]func(string) error{ + validators[0].(func(string) error), + validators[1].(func(string) error), + } + return func(name string) error { + for _, fn := range fns { + if err := fn(name); err != nil { + return err + } + } + return nil + } + }() // userDescEmail is the schema descriptor for email field. - userDescEmail := userFields[2].Descriptor() + userDescEmail := userFields[1].Descriptor() // user.EmailValidator is a validator for the "email" field. It is called by the builders before save. - user.EmailValidator = userDescEmail.Validators[0].(func(string) error) + user.EmailValidator = func() func(string) error { + validators := userDescEmail.Validators + fns := [...]func(string) error{ + validators[0].(func(string) error), + validators[1].(func(string) error), + } + return func(email string) error { + for _, fn := range fns { + if err := fn(email); err != nil { + return err + } + } + return nil + } + }() // userDescPassword is the schema descriptor for password field. - userDescPassword := userFields[3].Descriptor() + userDescPassword := userFields[2].Descriptor() // user.PasswordValidator is a validator for the "password" field. It is called by the builders before save. - user.PasswordValidator = userDescPassword.Validators[0].(func(string) error) + user.PasswordValidator = func() func(string) error { + validators := userDescPassword.Validators + fns := [...]func(string) error{ + validators[0].(func(string) error), + validators[1].(func(string) error), + } + return func(password string) error { + for _, fn := range fns { + if err := fn(password); err != nil { + return err + } + } + return nil + } + }() // userDescIsSuperuser is the schema descriptor for is_superuser field. - userDescIsSuperuser := userFields[4].Descriptor() + userDescIsSuperuser := userFields[3].Descriptor() // user.DefaultIsSuperuser holds the default value on creation for the is_superuser field. user.DefaultIsSuperuser = userDescIsSuperuser.Default.(bool) // userDescID is the schema descriptor for id field. - userDescID := userFields[0].Descriptor() + userDescID := userMixinFields0[0].Descriptor() // user.DefaultID holds the default value on creation for the id field. user.DefaultID = userDescID.Default.(func() uuid.UUID) } diff --git a/backend/ent/runtime/runtime.go b/backend/ent/runtime/runtime.go index 3fc430e..f07467e 100644 --- a/backend/ent/runtime/runtime.go +++ b/backend/ent/runtime/runtime.go @@ -1,10 +1,10 @@ -// Code generated by entc, DO NOT EDIT. +// Code generated by ent, DO NOT EDIT. package runtime // The schema-stitching logic is generated in github.com/hay-kot/content/backend/ent/runtime.go const ( - Version = "v0.10.0" // Version of ent codegen. - Sum = "h1:9cBomE1fh+WX34DPYQL7tDNAIvhKa3tXvwxuLyhYCMo=" // Sum of ent codegen. + Version = "v0.11.2" // Version of ent codegen. + Sum = "h1:UM2/BUhF2FfsxPHRxLjQbhqJNaDdVlOwNIAMLs2jyto=" // Sum of ent codegen. ) diff --git a/backend/ent/schema/authtokens.go b/backend/ent/schema/auth_tokens.go similarity index 84% rename from backend/ent/schema/authtokens.go rename to backend/ent/schema/auth_tokens.go index fbe9fd1..09297d0 100644 --- a/backend/ent/schema/authtokens.go +++ b/backend/ent/schema/auth_tokens.go @@ -7,6 +7,7 @@ import ( "entgo.io/ent/schema/edge" "entgo.io/ent/schema/field" "entgo.io/ent/schema/index" + "github.com/hay-kot/content/backend/ent/schema/mixins" ) // AuthTokens holds the schema definition for the AuthTokens entity. @@ -14,6 +15,12 @@ type AuthTokens struct { ent.Schema } +func (AuthTokens) Mixin() []ent.Mixin { + return []ent.Mixin{ + mixins.BaseMixin{}, + } +} + // Fields of the AuthTokens. func (AuthTokens) Fields() []ent.Field { return []ent.Field{ @@ -21,8 +28,6 @@ func (AuthTokens) Fields() []ent.Field { Unique(), field.Time("expires_at"). Default(func() time.Time { return time.Now().Add(time.Hour * 24 * 7) }), - field.Time("created_at"). - Default(time.Now), } } diff --git a/backend/ent/schema/group.go b/backend/ent/schema/group.go new file mode 100644 index 0000000..2929a62 --- /dev/null +++ b/backend/ent/schema/group.go @@ -0,0 +1,41 @@ +package schema + +import ( + "entgo.io/ent" + "entgo.io/ent/schema/edge" + "entgo.io/ent/schema/field" + "github.com/hay-kot/content/backend/ent/schema/mixins" +) + +// Group holds the schema definition for the Group entity. +type Group struct { + ent.Schema +} + +func (Group) Mixin() []ent.Mixin { + return []ent.Mixin{ + mixins.BaseMixin{}, + } +} + +// Fields of the Home. +func (Group) Fields() []ent.Field { + return []ent.Field{ + field.String("name"). + MaxLen(255). + NotEmpty(), + field.Enum("currency"). + Default("usd"). + Values("usd"), // TODO: add more currencies + } +} + +// Edges of the Home. +func (Group) Edges() []ent.Edge { + return []ent.Edge{ + edge.To("users", User.Type), + edge.To("locations", Location.Type), + edge.To("items", Item.Type), + edge.To("labels", Label.Type), + } +} diff --git a/backend/ent/schema/item.go b/backend/ent/schema/item.go new file mode 100644 index 0000000..e2af747 --- /dev/null +++ b/backend/ent/schema/item.go @@ -0,0 +1,80 @@ +package schema + +import ( + "entgo.io/ent" + "entgo.io/ent/schema/edge" + "entgo.io/ent/schema/field" + "github.com/google/uuid" + "github.com/hay-kot/content/backend/ent/schema/mixins" +) + +// Item holds the schema definition for the Item entity. +type Item struct { + ent.Schema +} + +func (Item) Mixin() []ent.Mixin { + return []ent.Mixin{ + mixins.BaseMixin{}, + mixins.DetailsMixin{}, + } +} + +// Fields of the Item. +func (Item) Fields() []ent.Field { + return []ent.Field{ + field.String("notes"). + MaxLen(1000). + Optional(), + // ------------------------------------ + // item identification + field.String("serial_number"). + MaxLen(255). + Optional(), + field.String("model_number"). + MaxLen(255). + Optional(), + field.String("manufacturer"). + MaxLen(255). + Optional(), + // ------------------------------------ + // item purchase + field.Time("purchase_time"). + Optional(), + field.String("purchase_from"). + Optional(), + field.Float("purchase_price"). + Default(0), + field.UUID("purchase_receipt_id", uuid.UUID{}). + Optional(), + // ------------------------------------ + // Sold Details + field.Time("sold_time"). + Optional(), + field.String("sold_to"). + Optional(), + field.Float("sold_price"). + Default(0), + field.UUID("sold_receipt_id", uuid.UUID{}). + Optional(), + field.String("sold_notes"). + MaxLen(1000). + Optional(), + } +} + +// Edges of the Item. +func (Item) Edges() []ent.Edge { + return []ent.Edge{ + edge.From("group", Group.Type). + Ref("items"). + Required(). + Unique(), + edge.From("location", Location.Type). + Ref("items"). + Unique(), + edge.To("fields", ItemField.Type), + edge.From("label", Label.Type). + Ref("items"), + } +} diff --git a/backend/ent/schema/item_field.go b/backend/ent/schema/item_field.go new file mode 100644 index 0000000..e5d157c --- /dev/null +++ b/backend/ent/schema/item_field.go @@ -0,0 +1,48 @@ +package schema + +import ( + "time" + + "entgo.io/ent" + "entgo.io/ent/schema/edge" + "entgo.io/ent/schema/field" + "github.com/hay-kot/content/backend/ent/schema/mixins" +) + +// ItemField holds the schema definition for the ItemField entity. +type ItemField struct { + ent.Schema +} + +func (ItemField) Mixin() []ent.Mixin { + return []ent.Mixin{ + mixins.BaseMixin{}, + mixins.DetailsMixin{}, + } +} + +// Fields of the ItemField. +func (ItemField) Fields() []ent.Field { + return []ent.Field{ + field.Enum("type"). + Values("text", "number", "boolean", "time"), + field.String("text_value"). + MaxLen(500). + Optional(), + field.Int("number_value"). + Optional(), + field.Bool("boolean_value"). + Default(false), + field.Time("time_value"). + Default(time.Now), + } +} + +// Edges of the ItemField. +func (ItemField) Edges() []ent.Edge { + return []ent.Edge{ + edge.From("item", Item.Type). + Ref("fields"). + Unique(), + } +} diff --git a/backend/ent/schema/label.go b/backend/ent/schema/label.go new file mode 100644 index 0000000..492ebb7 --- /dev/null +++ b/backend/ent/schema/label.go @@ -0,0 +1,40 @@ +package schema + +import ( + "entgo.io/ent" + "entgo.io/ent/schema/edge" + "entgo.io/ent/schema/field" + "github.com/hay-kot/content/backend/ent/schema/mixins" +) + +// Label holds the schema definition for the Label entity. +type Label struct { + ent.Schema +} + +func (Label) Mixin() []ent.Mixin { + return []ent.Mixin{ + mixins.BaseMixin{}, + mixins.DetailsMixin{}, + } +} + +// Fields of the Label. +func (Label) Fields() []ent.Field { + return []ent.Field{ + field.String("color"). + MaxLen(255). + Optional(), + } +} + +// Edges of the Label. +func (Label) Edges() []ent.Edge { + return []ent.Edge{ + edge.From("group", Group.Type). + Ref("labels"). + Required(). + Unique(), + edge.To("items", Item.Type), + } +} diff --git a/backend/ent/schema/location.go b/backend/ent/schema/location.go new file mode 100644 index 0000000..0241008 --- /dev/null +++ b/backend/ent/schema/location.go @@ -0,0 +1,35 @@ +package schema + +import ( + "entgo.io/ent" + "entgo.io/ent/schema/edge" + "github.com/hay-kot/content/backend/ent/schema/mixins" +) + +// Location holds the schema definition for the Location entity. +type Location struct { + ent.Schema +} + +func (Location) Mixin() []ent.Mixin { + return []ent.Mixin{ + mixins.BaseMixin{}, + mixins.DetailsMixin{}, + } +} + +// Fields of the Location. +func (Location) Fields() []ent.Field { + return nil +} + +// Edges of the Location. +func (Location) Edges() []ent.Edge { + return []ent.Edge{ + edge.From("group", Group.Type). + Ref("locations"). + Unique(). + Required(), + edge.To("items", Item.Type), + } +} diff --git a/backend/ent/schema/mixins/base.go b/backend/ent/schema/mixins/base.go new file mode 100644 index 0000000..ce5c8cf --- /dev/null +++ b/backend/ent/schema/mixins/base.go @@ -0,0 +1,42 @@ +package mixins + +import ( + "time" + + "entgo.io/ent" + "entgo.io/ent/schema/field" + "entgo.io/ent/schema/mixin" + "github.com/google/uuid" +) + +type BaseMixin struct { + mixin.Schema +} + +func (BaseMixin) Fields() []ent.Field { + return []ent.Field{ + field.UUID("id", uuid.UUID{}). + Default(uuid.New), + field.Time("created_at"). + Immutable(). + Default(time.Now), + field.Time("updated_at"). + Default(time.Now). + UpdateDefault(time.Now), + } +} + +type DetailsMixin struct { + mixin.Schema +} + +func (DetailsMixin) Fields() []ent.Field { + return []ent.Field{ + field.String("name"). + MaxLen(255). + NotEmpty(), + field.String("description"). + MaxLen(1000). + Optional(), + } +} diff --git a/backend/ent/schema/user.go b/backend/ent/schema/user.go index d1fb726..32246a4 100644 --- a/backend/ent/schema/user.go +++ b/backend/ent/schema/user.go @@ -4,7 +4,7 @@ import ( "entgo.io/ent" "entgo.io/ent/schema/edge" "entgo.io/ent/schema/field" - "github.com/google/uuid" + "github.com/hay-kot/content/backend/ent/schema/mixins" ) // User holds the schema definition for the User entity. @@ -12,17 +12,24 @@ type User struct { ent.Schema } +func (User) Mixin() []ent.Mixin { + return []ent.Mixin{ + mixins.BaseMixin{}, + } +} + // Fields of the User. func (User) Fields() []ent.Field { return []ent.Field{ - field.UUID("id", uuid.UUID{}). - Default(uuid.New), field.String("name"). + MaxLen(255). NotEmpty(), field.String("email"). + MaxLen(255). NotEmpty(). Unique(), field.String("password"). + MaxLen(255). NotEmpty(). Sensitive(), field.Bool("is_superuser"). @@ -33,6 +40,10 @@ func (User) Fields() []ent.Field { // Edges of the User. func (User) Edges() []ent.Edge { return []ent.Edge{ + edge.From("group", Group.Type). + Ref("users"). + Required(). + Unique(), edge.To("auth_tokens", AuthTokens.Type), } } diff --git a/backend/ent/tx.go b/backend/ent/tx.go index b93d068..69d1007 100644 --- a/backend/ent/tx.go +++ b/backend/ent/tx.go @@ -1,4 +1,4 @@ -// Code generated by entc, DO NOT EDIT. +// Code generated by ent, DO NOT EDIT. package ent @@ -14,6 +14,16 @@ type Tx struct { config // AuthTokens is the client for interacting with the AuthTokens builders. AuthTokens *AuthTokensClient + // Group is the client for interacting with the Group builders. + Group *GroupClient + // Item is the client for interacting with the Item builders. + Item *ItemClient + // ItemField is the client for interacting with the ItemField builders. + ItemField *ItemFieldClient + // Label is the client for interacting with the Label builders. + Label *LabelClient + // Location is the client for interacting with the Location builders. + Location *LocationClient // User is the client for interacting with the User builders. User *UserClient @@ -152,6 +162,11 @@ func (tx *Tx) Client() *Client { func (tx *Tx) init() { tx.AuthTokens = NewAuthTokensClient(tx.config) + tx.Group = NewGroupClient(tx.config) + tx.Item = NewItemClient(tx.config) + tx.ItemField = NewItemFieldClient(tx.config) + tx.Label = NewLabelClient(tx.config) + tx.Location = NewLocationClient(tx.config) tx.User = NewUserClient(tx.config) } diff --git a/backend/ent/user.go b/backend/ent/user.go index bb6f0b3..b3387f4 100644 --- a/backend/ent/user.go +++ b/backend/ent/user.go @@ -1,13 +1,15 @@ -// Code generated by entc, DO NOT EDIT. +// Code generated by ent, DO NOT EDIT. package ent import ( "fmt" "strings" + "time" "entgo.io/ent/dialect/sql" "github.com/google/uuid" + "github.com/hay-kot/content/backend/ent/group" "github.com/hay-kot/content/backend/ent/user" ) @@ -16,6 +18,10 @@ type User struct { config `json:"-"` // ID of the ent. ID uuid.UUID `json:"id,omitempty"` + // CreatedAt holds the value of the "created_at" field. + CreatedAt time.Time `json:"created_at,omitempty"` + // UpdatedAt holds the value of the "updated_at" field. + UpdatedAt time.Time `json:"updated_at,omitempty"` // Name holds the value of the "name" field. Name string `json:"name,omitempty"` // Email holds the value of the "email" field. @@ -26,22 +32,38 @@ type User struct { IsSuperuser bool `json:"is_superuser,omitempty"` // Edges holds the relations/edges for other nodes in the graph. // The values are being populated by the UserQuery when eager-loading is set. - Edges UserEdges `json:"edges"` + Edges UserEdges `json:"edges"` + group_users *uuid.UUID } // UserEdges holds the relations/edges for other nodes in the graph. type UserEdges struct { + // Group holds the value of the group edge. + Group *Group `json:"group,omitempty"` // AuthTokens holds the value of the auth_tokens edge. AuthTokens []*AuthTokens `json:"auth_tokens,omitempty"` // loadedTypes holds the information for reporting if a // type was loaded (or requested) in eager-loading or not. - loadedTypes [1]bool + loadedTypes [2]bool +} + +// GroupOrErr returns the Group value or an error if the edge +// was not loaded in eager-loading, or loaded but was not found. +func (e UserEdges) GroupOrErr() (*Group, error) { + if e.loadedTypes[0] { + if e.Group == nil { + // Edge was loaded but was not found. + return nil, &NotFoundError{label: group.Label} + } + return e.Group, nil + } + return nil, &NotLoadedError{edge: "group"} } // AuthTokensOrErr returns the AuthTokens value or an error if the edge // was not loaded in eager-loading. func (e UserEdges) AuthTokensOrErr() ([]*AuthTokens, error) { - if e.loadedTypes[0] { + if e.loadedTypes[1] { return e.AuthTokens, nil } return nil, &NotLoadedError{edge: "auth_tokens"} @@ -56,8 +78,12 @@ func (*User) scanValues(columns []string) ([]interface{}, error) { values[i] = new(sql.NullBool) case user.FieldName, user.FieldEmail, user.FieldPassword: values[i] = new(sql.NullString) + case user.FieldCreatedAt, user.FieldUpdatedAt: + values[i] = new(sql.NullTime) case user.FieldID: values[i] = new(uuid.UUID) + case user.ForeignKeys[0]: // group_users + values[i] = &sql.NullScanner{S: new(uuid.UUID)} default: return nil, fmt.Errorf("unexpected column %q for type User", columns[i]) } @@ -79,6 +105,18 @@ func (u *User) assignValues(columns []string, values []interface{}) error { } else if value != nil { u.ID = *value } + case user.FieldCreatedAt: + if value, ok := values[i].(*sql.NullTime); !ok { + return fmt.Errorf("unexpected type %T for field created_at", values[i]) + } else if value.Valid { + u.CreatedAt = value.Time + } + case user.FieldUpdatedAt: + if value, ok := values[i].(*sql.NullTime); !ok { + return fmt.Errorf("unexpected type %T for field updated_at", values[i]) + } else if value.Valid { + u.UpdatedAt = value.Time + } case user.FieldName: if value, ok := values[i].(*sql.NullString); !ok { return fmt.Errorf("unexpected type %T for field name", values[i]) @@ -103,11 +141,23 @@ func (u *User) assignValues(columns []string, values []interface{}) error { } else if value.Valid { u.IsSuperuser = value.Bool } + case user.ForeignKeys[0]: + if value, ok := values[i].(*sql.NullScanner); !ok { + return fmt.Errorf("unexpected type %T for field group_users", values[i]) + } else if value.Valid { + u.group_users = new(uuid.UUID) + *u.group_users = *value.S.(*uuid.UUID) + } } } return nil } +// QueryGroup queries the "group" edge of the User entity. +func (u *User) QueryGroup() *GroupQuery { + return (&UserClient{config: u.config}).QueryGroup(u) +} + // QueryAuthTokens queries the "auth_tokens" edge of the User entity. func (u *User) QueryAuthTokens() *AuthTokensQuery { return (&UserClient{config: u.config}).QueryAuthTokens(u) @@ -123,11 +173,11 @@ func (u *User) Update() *UserUpdateOne { // Unwrap unwraps the User entity that was returned from a transaction after it was closed, // so that all future queries will be executed through the driver which created the transaction. func (u *User) Unwrap() *User { - tx, ok := u.config.driver.(*txDriver) + _tx, ok := u.config.driver.(*txDriver) if !ok { panic("ent: User is not a transactional entity") } - u.config.driver = tx.drv + u.config.driver = _tx.drv return u } @@ -135,13 +185,22 @@ func (u *User) Unwrap() *User { func (u *User) String() string { var builder strings.Builder builder.WriteString("User(") - builder.WriteString(fmt.Sprintf("id=%v", u.ID)) - builder.WriteString(", name=") + builder.WriteString(fmt.Sprintf("id=%v, ", u.ID)) + builder.WriteString("created_at=") + builder.WriteString(u.CreatedAt.Format(time.ANSIC)) + builder.WriteString(", ") + builder.WriteString("updated_at=") + builder.WriteString(u.UpdatedAt.Format(time.ANSIC)) + builder.WriteString(", ") + builder.WriteString("name=") builder.WriteString(u.Name) - builder.WriteString(", email=") + builder.WriteString(", ") + builder.WriteString("email=") builder.WriteString(u.Email) - builder.WriteString(", password=") - builder.WriteString(", is_superuser=") + builder.WriteString(", ") + builder.WriteString("password=") + builder.WriteString(", ") + builder.WriteString("is_superuser=") builder.WriteString(fmt.Sprintf("%v", u.IsSuperuser)) builder.WriteByte(')') return builder.String() diff --git a/backend/ent/user/user.go b/backend/ent/user/user.go index 9bbbd9a..fccdcba 100644 --- a/backend/ent/user/user.go +++ b/backend/ent/user/user.go @@ -1,8 +1,10 @@ -// Code generated by entc, DO NOT EDIT. +// Code generated by ent, DO NOT EDIT. package user import ( + "time" + "github.com/google/uuid" ) @@ -11,6 +13,10 @@ const ( Label = "user" // FieldID holds the string denoting the id field in the database. FieldID = "id" + // FieldCreatedAt holds the string denoting the created_at field in the database. + FieldCreatedAt = "created_at" + // FieldUpdatedAt holds the string denoting the updated_at field in the database. + FieldUpdatedAt = "updated_at" // FieldName holds the string denoting the name field in the database. FieldName = "name" // FieldEmail holds the string denoting the email field in the database. @@ -19,10 +25,19 @@ const ( FieldPassword = "password" // FieldIsSuperuser holds the string denoting the is_superuser field in the database. FieldIsSuperuser = "is_superuser" + // EdgeGroup holds the string denoting the group edge name in mutations. + EdgeGroup = "group" // EdgeAuthTokens holds the string denoting the auth_tokens edge name in mutations. EdgeAuthTokens = "auth_tokens" // Table holds the table name of the user in the database. Table = "users" + // GroupTable is the table that holds the group relation/edge. + GroupTable = "users" + // GroupInverseTable is the table name for the Group entity. + // It exists in this package in order to avoid circular dependency with the "group" package. + GroupInverseTable = "groups" + // GroupColumn is the table column denoting the group relation/edge. + GroupColumn = "group_users" // AuthTokensTable is the table that holds the auth_tokens relation/edge. AuthTokensTable = "auth_tokens" // AuthTokensInverseTable is the table name for the AuthTokens entity. @@ -35,12 +50,20 @@ const ( // Columns holds all SQL columns for user fields. var Columns = []string{ FieldID, + FieldCreatedAt, + FieldUpdatedAt, FieldName, FieldEmail, FieldPassword, FieldIsSuperuser, } +// ForeignKeys holds the SQL foreign-keys that are owned by the "users" +// table and are not defined as standalone fields in the schema. +var ForeignKeys = []string{ + "group_users", +} + // ValidColumn reports if the column name is valid (part of the table columns). func ValidColumn(column string) bool { for i := range Columns { @@ -48,10 +71,21 @@ func ValidColumn(column string) bool { return true } } + for i := range ForeignKeys { + if column == ForeignKeys[i] { + return true + } + } return false } var ( + // DefaultCreatedAt holds the default value on creation for the "created_at" field. + DefaultCreatedAt func() time.Time + // DefaultUpdatedAt holds the default value on creation for the "updated_at" field. + DefaultUpdatedAt func() time.Time + // UpdateDefaultUpdatedAt holds the default value on update for the "updated_at" field. + UpdateDefaultUpdatedAt func() time.Time // NameValidator is a validator for the "name" field. It is called by the builders before save. NameValidator func(string) error // EmailValidator is a validator for the "email" field. It is called by the builders before save. diff --git a/backend/ent/user/where.go b/backend/ent/user/where.go index e17ff3a..b7b468c 100644 --- a/backend/ent/user/where.go +++ b/backend/ent/user/where.go @@ -1,8 +1,10 @@ -// Code generated by entc, DO NOT EDIT. +// Code generated by ent, DO NOT EDIT. package user import ( + "time" + "entgo.io/ent/dialect/sql" "entgo.io/ent/dialect/sql/sqlgraph" "github.com/google/uuid" @@ -33,12 +35,6 @@ func IDNEQ(id uuid.UUID) predicate.User { // IDIn applies the In predicate on the ID field. func IDIn(ids ...uuid.UUID) predicate.User { return predicate.User(func(s *sql.Selector) { - // if not arguments were provided, append the FALSE constants, - // since we can't apply "IN ()". This will make this predicate falsy. - if len(ids) == 0 { - s.Where(sql.False()) - return - } v := make([]interface{}, len(ids)) for i := range v { v[i] = ids[i] @@ -50,12 +46,6 @@ func IDIn(ids ...uuid.UUID) predicate.User { // IDNotIn applies the NotIn predicate on the ID field. func IDNotIn(ids ...uuid.UUID) predicate.User { return predicate.User(func(s *sql.Selector) { - // if not arguments were provided, append the FALSE constants, - // since we can't apply "IN ()". This will make this predicate falsy. - if len(ids) == 0 { - s.Where(sql.False()) - return - } v := make([]interface{}, len(ids)) for i := range v { v[i] = ids[i] @@ -92,6 +82,20 @@ func IDLTE(id uuid.UUID) predicate.User { }) } +// CreatedAt applies equality check predicate on the "created_at" field. It's identical to CreatedAtEQ. +func CreatedAt(v time.Time) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldCreatedAt), v)) + }) +} + +// UpdatedAt applies equality check predicate on the "updated_at" field. It's identical to UpdatedAtEQ. +func UpdatedAt(v time.Time) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldUpdatedAt), v)) + }) +} + // Name applies equality check predicate on the "name" field. It's identical to NameEQ. func Name(v string) predicate.User { return predicate.User(func(s *sql.Selector) { @@ -120,6 +124,134 @@ func IsSuperuser(v bool) predicate.User { }) } +// CreatedAtEQ applies the EQ predicate on the "created_at" field. +func CreatedAtEQ(v time.Time) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldCreatedAt), v)) + }) +} + +// CreatedAtNEQ applies the NEQ predicate on the "created_at" field. +func CreatedAtNEQ(v time.Time) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldCreatedAt), v)) + }) +} + +// CreatedAtIn applies the In predicate on the "created_at" field. +func CreatedAtIn(vs ...time.Time) predicate.User { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.User(func(s *sql.Selector) { + s.Where(sql.In(s.C(FieldCreatedAt), v...)) + }) +} + +// CreatedAtNotIn applies the NotIn predicate on the "created_at" field. +func CreatedAtNotIn(vs ...time.Time) predicate.User { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.User(func(s *sql.Selector) { + s.Where(sql.NotIn(s.C(FieldCreatedAt), v...)) + }) +} + +// CreatedAtGT applies the GT predicate on the "created_at" field. +func CreatedAtGT(v time.Time) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.GT(s.C(FieldCreatedAt), v)) + }) +} + +// CreatedAtGTE applies the GTE predicate on the "created_at" field. +func CreatedAtGTE(v time.Time) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.GTE(s.C(FieldCreatedAt), v)) + }) +} + +// CreatedAtLT applies the LT predicate on the "created_at" field. +func CreatedAtLT(v time.Time) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.LT(s.C(FieldCreatedAt), v)) + }) +} + +// CreatedAtLTE applies the LTE predicate on the "created_at" field. +func CreatedAtLTE(v time.Time) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.LTE(s.C(FieldCreatedAt), v)) + }) +} + +// UpdatedAtEQ applies the EQ predicate on the "updated_at" field. +func UpdatedAtEQ(v time.Time) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldUpdatedAt), v)) + }) +} + +// UpdatedAtNEQ applies the NEQ predicate on the "updated_at" field. +func UpdatedAtNEQ(v time.Time) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldUpdatedAt), v)) + }) +} + +// UpdatedAtIn applies the In predicate on the "updated_at" field. +func UpdatedAtIn(vs ...time.Time) predicate.User { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.User(func(s *sql.Selector) { + s.Where(sql.In(s.C(FieldUpdatedAt), v...)) + }) +} + +// UpdatedAtNotIn applies the NotIn predicate on the "updated_at" field. +func UpdatedAtNotIn(vs ...time.Time) predicate.User { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.User(func(s *sql.Selector) { + s.Where(sql.NotIn(s.C(FieldUpdatedAt), v...)) + }) +} + +// UpdatedAtGT applies the GT predicate on the "updated_at" field. +func UpdatedAtGT(v time.Time) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.GT(s.C(FieldUpdatedAt), v)) + }) +} + +// UpdatedAtGTE applies the GTE predicate on the "updated_at" field. +func UpdatedAtGTE(v time.Time) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.GTE(s.C(FieldUpdatedAt), v)) + }) +} + +// UpdatedAtLT applies the LT predicate on the "updated_at" field. +func UpdatedAtLT(v time.Time) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.LT(s.C(FieldUpdatedAt), v)) + }) +} + +// UpdatedAtLTE applies the LTE predicate on the "updated_at" field. +func UpdatedAtLTE(v time.Time) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.LTE(s.C(FieldUpdatedAt), v)) + }) +} + // NameEQ applies the EQ predicate on the "name" field. func NameEQ(v string) predicate.User { return predicate.User(func(s *sql.Selector) { @@ -141,12 +273,6 @@ func NameIn(vs ...string) predicate.User { v[i] = vs[i] } return predicate.User(func(s *sql.Selector) { - // if not arguments were provided, append the FALSE constants, - // since we can't apply "IN ()". This will make this predicate falsy. - if len(v) == 0 { - s.Where(sql.False()) - return - } s.Where(sql.In(s.C(FieldName), v...)) }) } @@ -158,12 +284,6 @@ func NameNotIn(vs ...string) predicate.User { v[i] = vs[i] } return predicate.User(func(s *sql.Selector) { - // if not arguments were provided, append the FALSE constants, - // since we can't apply "IN ()". This will make this predicate falsy. - if len(v) == 0 { - s.Where(sql.False()) - return - } s.Where(sql.NotIn(s.C(FieldName), v...)) }) } @@ -252,12 +372,6 @@ func EmailIn(vs ...string) predicate.User { v[i] = vs[i] } return predicate.User(func(s *sql.Selector) { - // if not arguments were provided, append the FALSE constants, - // since we can't apply "IN ()". This will make this predicate falsy. - if len(v) == 0 { - s.Where(sql.False()) - return - } s.Where(sql.In(s.C(FieldEmail), v...)) }) } @@ -269,12 +383,6 @@ func EmailNotIn(vs ...string) predicate.User { v[i] = vs[i] } return predicate.User(func(s *sql.Selector) { - // if not arguments were provided, append the FALSE constants, - // since we can't apply "IN ()". This will make this predicate falsy. - if len(v) == 0 { - s.Where(sql.False()) - return - } s.Where(sql.NotIn(s.C(FieldEmail), v...)) }) } @@ -363,12 +471,6 @@ func PasswordIn(vs ...string) predicate.User { v[i] = vs[i] } return predicate.User(func(s *sql.Selector) { - // if not arguments were provided, append the FALSE constants, - // since we can't apply "IN ()". This will make this predicate falsy. - if len(v) == 0 { - s.Where(sql.False()) - return - } s.Where(sql.In(s.C(FieldPassword), v...)) }) } @@ -380,12 +482,6 @@ func PasswordNotIn(vs ...string) predicate.User { v[i] = vs[i] } return predicate.User(func(s *sql.Selector) { - // if not arguments were provided, append the FALSE constants, - // since we can't apply "IN ()". This will make this predicate falsy. - if len(v) == 0 { - s.Where(sql.False()) - return - } s.Where(sql.NotIn(s.C(FieldPassword), v...)) }) } @@ -467,6 +563,34 @@ func IsSuperuserNEQ(v bool) predicate.User { }) } +// HasGroup applies the HasEdge predicate on the "group" edge. +func HasGroup() predicate.User { + return predicate.User(func(s *sql.Selector) { + step := sqlgraph.NewStep( + sqlgraph.From(Table, FieldID), + sqlgraph.To(GroupTable, FieldID), + sqlgraph.Edge(sqlgraph.M2O, true, GroupTable, GroupColumn), + ) + sqlgraph.HasNeighbors(s, step) + }) +} + +// HasGroupWith applies the HasEdge predicate on the "group" edge with a given conditions (other predicates). +func HasGroupWith(preds ...predicate.Group) predicate.User { + return predicate.User(func(s *sql.Selector) { + step := sqlgraph.NewStep( + sqlgraph.From(Table, FieldID), + sqlgraph.To(GroupInverseTable, FieldID), + sqlgraph.Edge(sqlgraph.M2O, true, GroupTable, GroupColumn), + ) + sqlgraph.HasNeighborsWith(s, step, func(s *sql.Selector) { + for _, p := range preds { + p(s) + } + }) + }) +} + // HasAuthTokens applies the HasEdge predicate on the "auth_tokens" edge. func HasAuthTokens() predicate.User { return predicate.User(func(s *sql.Selector) { diff --git a/backend/ent/user_create.go b/backend/ent/user_create.go index 696290d..c0f4c25 100644 --- a/backend/ent/user_create.go +++ b/backend/ent/user_create.go @@ -1,4 +1,4 @@ -// Code generated by entc, DO NOT EDIT. +// Code generated by ent, DO NOT EDIT. package ent @@ -6,11 +6,13 @@ import ( "context" "errors" "fmt" + "time" "entgo.io/ent/dialect/sql/sqlgraph" "entgo.io/ent/schema/field" "github.com/google/uuid" "github.com/hay-kot/content/backend/ent/authtokens" + "github.com/hay-kot/content/backend/ent/group" "github.com/hay-kot/content/backend/ent/user" ) @@ -21,6 +23,34 @@ type UserCreate struct { hooks []Hook } +// SetCreatedAt sets the "created_at" field. +func (uc *UserCreate) SetCreatedAt(t time.Time) *UserCreate { + uc.mutation.SetCreatedAt(t) + return uc +} + +// SetNillableCreatedAt sets the "created_at" field if the given value is not nil. +func (uc *UserCreate) SetNillableCreatedAt(t *time.Time) *UserCreate { + if t != nil { + uc.SetCreatedAt(*t) + } + return uc +} + +// SetUpdatedAt sets the "updated_at" field. +func (uc *UserCreate) SetUpdatedAt(t time.Time) *UserCreate { + uc.mutation.SetUpdatedAt(t) + return uc +} + +// SetNillableUpdatedAt sets the "updated_at" field if the given value is not nil. +func (uc *UserCreate) SetNillableUpdatedAt(t *time.Time) *UserCreate { + if t != nil { + uc.SetUpdatedAt(*t) + } + return uc +} + // SetName sets the "name" field. func (uc *UserCreate) SetName(s string) *UserCreate { uc.mutation.SetName(s) @@ -67,15 +97,26 @@ func (uc *UserCreate) SetNillableID(u *uuid.UUID) *UserCreate { return uc } +// SetGroupID sets the "group" edge to the Group entity by ID. +func (uc *UserCreate) SetGroupID(id uuid.UUID) *UserCreate { + uc.mutation.SetGroupID(id) + return uc +} + +// SetGroup sets the "group" edge to the Group entity. +func (uc *UserCreate) SetGroup(g *Group) *UserCreate { + return uc.SetGroupID(g.ID) +} + // AddAuthTokenIDs adds the "auth_tokens" edge to the AuthTokens entity by IDs. -func (uc *UserCreate) AddAuthTokenIDs(ids ...int) *UserCreate { +func (uc *UserCreate) AddAuthTokenIDs(ids ...uuid.UUID) *UserCreate { uc.mutation.AddAuthTokenIDs(ids...) return uc } // AddAuthTokens adds the "auth_tokens" edges to the AuthTokens entity. func (uc *UserCreate) AddAuthTokens(a ...*AuthTokens) *UserCreate { - ids := make([]int, len(a)) + ids := make([]uuid.UUID, len(a)) for i := range a { ids[i] = a[i].ID } @@ -122,9 +163,15 @@ func (uc *UserCreate) Save(ctx context.Context) (*User, error) { } mut = uc.hooks[i](mut) } - if _, err := mut.Mutate(ctx, uc.mutation); err != nil { + v, err := mut.Mutate(ctx, uc.mutation) + if err != nil { return nil, err } + nv, ok := v.(*User) + if !ok { + return nil, fmt.Errorf("unexpected node type %T returned from UserMutation", v) + } + node = nv } return node, err } @@ -153,6 +200,14 @@ func (uc *UserCreate) ExecX(ctx context.Context) { // defaults sets the default values of the builder before save. func (uc *UserCreate) defaults() { + if _, ok := uc.mutation.CreatedAt(); !ok { + v := user.DefaultCreatedAt() + uc.mutation.SetCreatedAt(v) + } + if _, ok := uc.mutation.UpdatedAt(); !ok { + v := user.DefaultUpdatedAt() + uc.mutation.SetUpdatedAt(v) + } if _, ok := uc.mutation.IsSuperuser(); !ok { v := user.DefaultIsSuperuser uc.mutation.SetIsSuperuser(v) @@ -165,6 +220,12 @@ func (uc *UserCreate) defaults() { // check runs all checks and user-defined validators on the builder. func (uc *UserCreate) check() error { + if _, ok := uc.mutation.CreatedAt(); !ok { + return &ValidationError{Name: "created_at", err: errors.New(`ent: missing required field "User.created_at"`)} + } + if _, ok := uc.mutation.UpdatedAt(); !ok { + return &ValidationError{Name: "updated_at", err: errors.New(`ent: missing required field "User.updated_at"`)} + } if _, ok := uc.mutation.Name(); !ok { return &ValidationError{Name: "name", err: errors.New(`ent: missing required field "User.name"`)} } @@ -192,6 +253,9 @@ func (uc *UserCreate) check() error { if _, ok := uc.mutation.IsSuperuser(); !ok { return &ValidationError{Name: "is_superuser", err: errors.New(`ent: missing required field "User.is_superuser"`)} } + if _, ok := uc.mutation.GroupID(); !ok { + return &ValidationError{Name: "group", err: errors.New(`ent: missing required edge "User.group"`)} + } return nil } @@ -199,7 +263,7 @@ func (uc *UserCreate) sqlSave(ctx context.Context) (*User, error) { _node, _spec := uc.createSpec() if err := sqlgraph.CreateNode(ctx, uc.driver, _spec); err != nil { if sqlgraph.IsConstraintError(err) { - err = &ConstraintError{err.Error(), err} + err = &ConstraintError{msg: err.Error(), wrap: err} } return nil, err } @@ -228,6 +292,22 @@ func (uc *UserCreate) createSpec() (*User, *sqlgraph.CreateSpec) { _node.ID = id _spec.ID.Value = &id } + if value, ok := uc.mutation.CreatedAt(); ok { + _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ + Type: field.TypeTime, + Value: value, + Column: user.FieldCreatedAt, + }) + _node.CreatedAt = value + } + if value, ok := uc.mutation.UpdatedAt(); ok { + _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ + Type: field.TypeTime, + Value: value, + Column: user.FieldUpdatedAt, + }) + _node.UpdatedAt = value + } if value, ok := uc.mutation.Name(); ok { _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ Type: field.TypeString, @@ -260,6 +340,26 @@ func (uc *UserCreate) createSpec() (*User, *sqlgraph.CreateSpec) { }) _node.IsSuperuser = value } + if nodes := uc.mutation.GroupIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2O, + Inverse: true, + Table: user.GroupTable, + Columns: []string{user.GroupColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: group.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _node.group_users = &nodes[0] + _spec.Edges = append(_spec.Edges, edge) + } if nodes := uc.mutation.AuthTokensIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ Rel: sqlgraph.O2M, @@ -269,7 +369,7 @@ func (uc *UserCreate) createSpec() (*User, *sqlgraph.CreateSpec) { Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: &sqlgraph.FieldSpec{ - Type: field.TypeInt, + Type: field.TypeUUID, Column: authtokens.FieldID, }, }, @@ -315,7 +415,7 @@ func (ucb *UserCreateBulk) Save(ctx context.Context) ([]*User, error) { // Invoke the actual operation on the latest mutation in the chain. if err = sqlgraph.BatchCreate(ctx, ucb.driver, spec); err != nil { if sqlgraph.IsConstraintError(err) { - err = &ConstraintError{err.Error(), err} + err = &ConstraintError{msg: err.Error(), wrap: err} } } } diff --git a/backend/ent/user_delete.go b/backend/ent/user_delete.go index 96d5d52..cb23cc3 100644 --- a/backend/ent/user_delete.go +++ b/backend/ent/user_delete.go @@ -1,4 +1,4 @@ -// Code generated by entc, DO NOT EDIT. +// Code generated by ent, DO NOT EDIT. package ent @@ -84,7 +84,11 @@ func (ud *UserDelete) sqlExec(ctx context.Context) (int, error) { } } } - return sqlgraph.DeleteNodes(ctx, ud.driver, _spec) + affected, err := sqlgraph.DeleteNodes(ctx, ud.driver, _spec) + if err != nil && sqlgraph.IsConstraintError(err) { + err = &ConstraintError{msg: err.Error(), wrap: err} + } + return affected, err } // UserDeleteOne is the builder for deleting a single User entity. diff --git a/backend/ent/user_query.go b/backend/ent/user_query.go index a40d95d..8dc2461 100644 --- a/backend/ent/user_query.go +++ b/backend/ent/user_query.go @@ -1,11 +1,10 @@ -// Code generated by entc, DO NOT EDIT. +// Code generated by ent, DO NOT EDIT. package ent import ( "context" "database/sql/driver" - "errors" "fmt" "math" @@ -14,6 +13,7 @@ import ( "entgo.io/ent/schema/field" "github.com/google/uuid" "github.com/hay-kot/content/backend/ent/authtokens" + "github.com/hay-kot/content/backend/ent/group" "github.com/hay-kot/content/backend/ent/predicate" "github.com/hay-kot/content/backend/ent/user" ) @@ -21,14 +21,15 @@ import ( // UserQuery is the builder for querying User entities. type UserQuery struct { config - limit *int - offset *int - unique *bool - order []OrderFunc - fields []string - predicates []predicate.User - // eager-loading edges. + limit *int + offset *int + unique *bool + order []OrderFunc + fields []string + predicates []predicate.User + withGroup *GroupQuery withAuthTokens *AuthTokensQuery + withFKs bool // intermediate query (i.e. traversal path). sql *sql.Selector path func(context.Context) (*sql.Selector, error) @@ -65,6 +66,28 @@ func (uq *UserQuery) Order(o ...OrderFunc) *UserQuery { return uq } +// QueryGroup chains the current query on the "group" edge. +func (uq *UserQuery) QueryGroup() *GroupQuery { + query := &GroupQuery{config: uq.config} + query.path = func(ctx context.Context) (fromU *sql.Selector, err error) { + if err := uq.prepareQuery(ctx); err != nil { + return nil, err + } + selector := uq.sqlQuery(ctx) + if err := selector.Err(); err != nil { + return nil, err + } + step := sqlgraph.NewStep( + sqlgraph.From(user.Table, user.FieldID, selector), + sqlgraph.To(group.Table, group.FieldID), + sqlgraph.Edge(sqlgraph.M2O, true, user.GroupTable, user.GroupColumn), + ) + fromU = sqlgraph.SetNeighbors(uq.driver.Dialect(), step) + return fromU, nil + } + return query +} + // QueryAuthTokens chains the current query on the "auth_tokens" edge. func (uq *UserQuery) QueryAuthTokens() *AuthTokensQuery { query := &AuthTokensQuery{config: uq.config} @@ -133,7 +156,7 @@ func (uq *UserQuery) FirstIDX(ctx context.Context) uuid.UUID { } // Only returns a single User entity found by the query, ensuring it only returns one. -// Returns a *NotSingularError when exactly one User entity is not found. +// Returns a *NotSingularError when more than one User entity is found. // Returns a *NotFoundError when no User entities are found. func (uq *UserQuery) Only(ctx context.Context) (*User, error) { nodes, err := uq.Limit(2).All(ctx) @@ -160,7 +183,7 @@ func (uq *UserQuery) OnlyX(ctx context.Context) *User { } // OnlyID is like Only, but returns the only User ID in the query. -// Returns a *NotSingularError when exactly one User ID is not found. +// Returns a *NotSingularError when more than one User ID is found. // Returns a *NotFoundError when no entities are found. func (uq *UserQuery) OnlyID(ctx context.Context) (id uuid.UUID, err error) { var ids []uuid.UUID @@ -268,13 +291,26 @@ func (uq *UserQuery) Clone() *UserQuery { offset: uq.offset, order: append([]OrderFunc{}, uq.order...), predicates: append([]predicate.User{}, uq.predicates...), + withGroup: uq.withGroup.Clone(), withAuthTokens: uq.withAuthTokens.Clone(), // clone intermediate query. - sql: uq.sql.Clone(), - path: uq.path, + sql: uq.sql.Clone(), + path: uq.path, + unique: uq.unique, } } +// WithGroup tells the query-builder to eager-load the nodes that are connected to +// the "group" edge. The optional arguments are used to configure the query builder of the edge. +func (uq *UserQuery) WithGroup(opts ...func(*GroupQuery)) *UserQuery { + query := &GroupQuery{config: uq.config} + for _, opt := range opts { + opt(query) + } + uq.withGroup = query + return uq +} + // WithAuthTokens tells the query-builder to eager-load the nodes that are connected to // the "auth_tokens" edge. The optional arguments are used to configure the query builder of the edge. func (uq *UserQuery) WithAuthTokens(opts ...func(*AuthTokensQuery)) *UserQuery { @@ -292,25 +328,26 @@ func (uq *UserQuery) WithAuthTokens(opts ...func(*AuthTokensQuery)) *UserQuery { // Example: // // var v []struct { -// Name string `json:"name,omitempty"` +// CreatedAt time.Time `json:"created_at,omitempty"` // Count int `json:"count,omitempty"` // } // // client.User.Query(). -// GroupBy(user.FieldName). +// GroupBy(user.FieldCreatedAt). // Aggregate(ent.Count()). // Scan(ctx, &v) -// func (uq *UserQuery) GroupBy(field string, fields ...string) *UserGroupBy { - group := &UserGroupBy{config: uq.config} - group.fields = append([]string{field}, fields...) - group.path = func(ctx context.Context) (prev *sql.Selector, err error) { + grbuild := &UserGroupBy{config: uq.config} + grbuild.fields = append([]string{field}, fields...) + grbuild.path = func(ctx context.Context) (prev *sql.Selector, err error) { if err := uq.prepareQuery(ctx); err != nil { return nil, err } return uq.sqlQuery(ctx), nil } - return group + grbuild.label = user.Label + grbuild.flds, grbuild.scan = &grbuild.fields, grbuild.Scan + return grbuild } // Select allows the selection one or more fields/columns for the given query, @@ -319,16 +356,18 @@ func (uq *UserQuery) GroupBy(field string, fields ...string) *UserGroupBy { // Example: // // var v []struct { -// Name string `json:"name,omitempty"` +// CreatedAt time.Time `json:"created_at,omitempty"` // } // // client.User.Query(). -// Select(user.FieldName). +// Select(user.FieldCreatedAt). // Scan(ctx, &v) -// func (uq *UserQuery) Select(fields ...string) *UserSelect { uq.fields = append(uq.fields, fields...) - return &UserSelect{UserQuery: uq} + selbuild := &UserSelect{UserQuery: uq} + selbuild.label = user.Label + selbuild.flds, selbuild.scan = &uq.fields, selbuild.Scan + return selbuild } func (uq *UserQuery) prepareQuery(ctx context.Context) error { @@ -347,66 +386,117 @@ func (uq *UserQuery) prepareQuery(ctx context.Context) error { return nil } -func (uq *UserQuery) sqlAll(ctx context.Context) ([]*User, error) { +func (uq *UserQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*User, error) { var ( nodes = []*User{} + withFKs = uq.withFKs _spec = uq.querySpec() - loadedTypes = [1]bool{ + loadedTypes = [2]bool{ + uq.withGroup != nil, uq.withAuthTokens != nil, } ) + if uq.withGroup != nil { + withFKs = true + } + if withFKs { + _spec.Node.Columns = append(_spec.Node.Columns, user.ForeignKeys...) + } _spec.ScanValues = func(columns []string) ([]interface{}, error) { - node := &User{config: uq.config} - nodes = append(nodes, node) - return node.scanValues(columns) + return (*User).scanValues(nil, columns) } _spec.Assign = func(columns []string, values []interface{}) error { - if len(nodes) == 0 { - return fmt.Errorf("ent: Assign called without calling ScanValues") - } - node := nodes[len(nodes)-1] + node := &User{config: uq.config} + nodes = append(nodes, node) node.Edges.loadedTypes = loadedTypes return node.assignValues(columns, values) } + for i := range hooks { + hooks[i](ctx, _spec) + } if err := sqlgraph.QueryNodes(ctx, uq.driver, _spec); err != nil { return nil, err } if len(nodes) == 0 { return nodes, nil } - - if query := uq.withAuthTokens; query != nil { - fks := make([]driver.Value, 0, len(nodes)) - nodeids := make(map[uuid.UUID]*User) - for i := range nodes { - fks = append(fks, nodes[i].ID) - nodeids[nodes[i].ID] = nodes[i] - nodes[i].Edges.AuthTokens = []*AuthTokens{} - } - query.withFKs = true - query.Where(predicate.AuthTokens(func(s *sql.Selector) { - s.Where(sql.InValues(user.AuthTokensColumn, fks...)) - })) - neighbors, err := query.All(ctx) - if err != nil { + if query := uq.withGroup; query != nil { + if err := uq.loadGroup(ctx, query, nodes, nil, + func(n *User, e *Group) { n.Edges.Group = e }); err != nil { return nil, err } - for _, n := range neighbors { - fk := n.user_auth_tokens - if fk == nil { - return nil, fmt.Errorf(`foreign-key "user_auth_tokens" is nil for node %v`, n.ID) - } - node, ok := nodeids[*fk] - if !ok { - return nil, fmt.Errorf(`unexpected foreign-key "user_auth_tokens" returned %v for node %v`, *fk, n.ID) - } - node.Edges.AuthTokens = append(node.Edges.AuthTokens, n) + } + if query := uq.withAuthTokens; query != nil { + if err := uq.loadAuthTokens(ctx, query, nodes, + func(n *User) { n.Edges.AuthTokens = []*AuthTokens{} }, + func(n *User, e *AuthTokens) { n.Edges.AuthTokens = append(n.Edges.AuthTokens, e) }); err != nil { + return nil, err } } - return nodes, nil } +func (uq *UserQuery) loadGroup(ctx context.Context, query *GroupQuery, nodes []*User, init func(*User), assign func(*User, *Group)) error { + ids := make([]uuid.UUID, 0, len(nodes)) + nodeids := make(map[uuid.UUID][]*User) + for i := range nodes { + if nodes[i].group_users == nil { + continue + } + fk := *nodes[i].group_users + if _, ok := nodeids[fk]; !ok { + ids = append(ids, fk) + } + nodeids[fk] = append(nodeids[fk], nodes[i]) + } + query.Where(group.IDIn(ids...)) + neighbors, err := query.All(ctx) + if err != nil { + return err + } + for _, n := range neighbors { + nodes, ok := nodeids[n.ID] + if !ok { + return fmt.Errorf(`unexpected foreign-key "group_users" returned %v`, n.ID) + } + for i := range nodes { + assign(nodes[i], n) + } + } + return nil +} +func (uq *UserQuery) loadAuthTokens(ctx context.Context, query *AuthTokensQuery, nodes []*User, init func(*User), assign func(*User, *AuthTokens)) error { + fks := make([]driver.Value, 0, len(nodes)) + nodeids := make(map[uuid.UUID]*User) + for i := range nodes { + fks = append(fks, nodes[i].ID) + nodeids[nodes[i].ID] = nodes[i] + if init != nil { + init(nodes[i]) + } + } + query.withFKs = true + query.Where(predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.InValues(user.AuthTokensColumn, fks...)) + })) + neighbors, err := query.All(ctx) + if err != nil { + return err + } + for _, n := range neighbors { + fk := n.user_auth_tokens + if fk == nil { + return fmt.Errorf(`foreign-key "user_auth_tokens" is nil for node %v`, n.ID) + } + node, ok := nodeids[*fk] + if !ok { + return fmt.Errorf(`unexpected foreign-key "user_auth_tokens" returned %v for node %v`, *fk, n.ID) + } + assign(node, n) + } + return nil +} + func (uq *UserQuery) sqlCount(ctx context.Context) (int, error) { _spec := uq.querySpec() _spec.Node.Columns = uq.fields @@ -507,6 +597,7 @@ func (uq *UserQuery) sqlQuery(ctx context.Context) *sql.Selector { // UserGroupBy is the group-by builder for User entities. type UserGroupBy struct { config + selector fields []string fns []AggregateFunc // intermediate query (i.e. traversal path). @@ -530,209 +621,6 @@ func (ugb *UserGroupBy) Scan(ctx context.Context, v interface{}) error { return ugb.sqlScan(ctx, v) } -// ScanX is like Scan, but panics if an error occurs. -func (ugb *UserGroupBy) ScanX(ctx context.Context, v interface{}) { - if err := ugb.Scan(ctx, v); err != nil { - panic(err) - } -} - -// Strings returns list of strings from group-by. -// It is only allowed when executing a group-by query with one field. -func (ugb *UserGroupBy) Strings(ctx context.Context) ([]string, error) { - if len(ugb.fields) > 1 { - return nil, errors.New("ent: UserGroupBy.Strings is not achievable when grouping more than 1 field") - } - var v []string - if err := ugb.Scan(ctx, &v); err != nil { - return nil, err - } - return v, nil -} - -// StringsX is like Strings, but panics if an error occurs. -func (ugb *UserGroupBy) StringsX(ctx context.Context) []string { - v, err := ugb.Strings(ctx) - if err != nil { - panic(err) - } - return v -} - -// String returns a single string from a group-by query. -// It is only allowed when executing a group-by query with one field. -func (ugb *UserGroupBy) String(ctx context.Context) (_ string, err error) { - var v []string - if v, err = ugb.Strings(ctx); err != nil { - return - } - switch len(v) { - case 1: - return v[0], nil - case 0: - err = &NotFoundError{user.Label} - default: - err = fmt.Errorf("ent: UserGroupBy.Strings returned %d results when one was expected", len(v)) - } - return -} - -// StringX is like String, but panics if an error occurs. -func (ugb *UserGroupBy) StringX(ctx context.Context) string { - v, err := ugb.String(ctx) - if err != nil { - panic(err) - } - return v -} - -// Ints returns list of ints from group-by. -// It is only allowed when executing a group-by query with one field. -func (ugb *UserGroupBy) Ints(ctx context.Context) ([]int, error) { - if len(ugb.fields) > 1 { - return nil, errors.New("ent: UserGroupBy.Ints is not achievable when grouping more than 1 field") - } - var v []int - if err := ugb.Scan(ctx, &v); err != nil { - return nil, err - } - return v, nil -} - -// IntsX is like Ints, but panics if an error occurs. -func (ugb *UserGroupBy) IntsX(ctx context.Context) []int { - v, err := ugb.Ints(ctx) - if err != nil { - panic(err) - } - return v -} - -// Int returns a single int from a group-by query. -// It is only allowed when executing a group-by query with one field. -func (ugb *UserGroupBy) Int(ctx context.Context) (_ int, err error) { - var v []int - if v, err = ugb.Ints(ctx); err != nil { - return - } - switch len(v) { - case 1: - return v[0], nil - case 0: - err = &NotFoundError{user.Label} - default: - err = fmt.Errorf("ent: UserGroupBy.Ints returned %d results when one was expected", len(v)) - } - return -} - -// IntX is like Int, but panics if an error occurs. -func (ugb *UserGroupBy) IntX(ctx context.Context) int { - v, err := ugb.Int(ctx) - if err != nil { - panic(err) - } - return v -} - -// Float64s returns list of float64s from group-by. -// It is only allowed when executing a group-by query with one field. -func (ugb *UserGroupBy) Float64s(ctx context.Context) ([]float64, error) { - if len(ugb.fields) > 1 { - return nil, errors.New("ent: UserGroupBy.Float64s is not achievable when grouping more than 1 field") - } - var v []float64 - if err := ugb.Scan(ctx, &v); err != nil { - return nil, err - } - return v, nil -} - -// Float64sX is like Float64s, but panics if an error occurs. -func (ugb *UserGroupBy) Float64sX(ctx context.Context) []float64 { - v, err := ugb.Float64s(ctx) - if err != nil { - panic(err) - } - return v -} - -// Float64 returns a single float64 from a group-by query. -// It is only allowed when executing a group-by query with one field. -func (ugb *UserGroupBy) Float64(ctx context.Context) (_ float64, err error) { - var v []float64 - if v, err = ugb.Float64s(ctx); err != nil { - return - } - switch len(v) { - case 1: - return v[0], nil - case 0: - err = &NotFoundError{user.Label} - default: - err = fmt.Errorf("ent: UserGroupBy.Float64s returned %d results when one was expected", len(v)) - } - return -} - -// Float64X is like Float64, but panics if an error occurs. -func (ugb *UserGroupBy) Float64X(ctx context.Context) float64 { - v, err := ugb.Float64(ctx) - if err != nil { - panic(err) - } - return v -} - -// Bools returns list of bools from group-by. -// It is only allowed when executing a group-by query with one field. -func (ugb *UserGroupBy) Bools(ctx context.Context) ([]bool, error) { - if len(ugb.fields) > 1 { - return nil, errors.New("ent: UserGroupBy.Bools is not achievable when grouping more than 1 field") - } - var v []bool - if err := ugb.Scan(ctx, &v); err != nil { - return nil, err - } - return v, nil -} - -// BoolsX is like Bools, but panics if an error occurs. -func (ugb *UserGroupBy) BoolsX(ctx context.Context) []bool { - v, err := ugb.Bools(ctx) - if err != nil { - panic(err) - } - return v -} - -// Bool returns a single bool from a group-by query. -// It is only allowed when executing a group-by query with one field. -func (ugb *UserGroupBy) Bool(ctx context.Context) (_ bool, err error) { - var v []bool - if v, err = ugb.Bools(ctx); err != nil { - return - } - switch len(v) { - case 1: - return v[0], nil - case 0: - err = &NotFoundError{user.Label} - default: - err = fmt.Errorf("ent: UserGroupBy.Bools returned %d results when one was expected", len(v)) - } - return -} - -// BoolX is like Bool, but panics if an error occurs. -func (ugb *UserGroupBy) BoolX(ctx context.Context) bool { - v, err := ugb.Bool(ctx) - if err != nil { - panic(err) - } - return v -} - func (ugb *UserGroupBy) sqlScan(ctx context.Context, v interface{}) error { for _, f := range ugb.fields { if !user.ValidColumn(f) { @@ -774,6 +662,7 @@ func (ugb *UserGroupBy) sqlQuery() *sql.Selector { // UserSelect is the builder for selecting fields of User entities. type UserSelect struct { *UserQuery + selector // intermediate query (i.e. traversal path). sql *sql.Selector } @@ -787,201 +676,6 @@ func (us *UserSelect) Scan(ctx context.Context, v interface{}) error { return us.sqlScan(ctx, v) } -// ScanX is like Scan, but panics if an error occurs. -func (us *UserSelect) ScanX(ctx context.Context, v interface{}) { - if err := us.Scan(ctx, v); err != nil { - panic(err) - } -} - -// Strings returns list of strings from a selector. It is only allowed when selecting one field. -func (us *UserSelect) Strings(ctx context.Context) ([]string, error) { - if len(us.fields) > 1 { - return nil, errors.New("ent: UserSelect.Strings is not achievable when selecting more than 1 field") - } - var v []string - if err := us.Scan(ctx, &v); err != nil { - return nil, err - } - return v, nil -} - -// StringsX is like Strings, but panics if an error occurs. -func (us *UserSelect) StringsX(ctx context.Context) []string { - v, err := us.Strings(ctx) - if err != nil { - panic(err) - } - return v -} - -// String returns a single string from a selector. It is only allowed when selecting one field. -func (us *UserSelect) String(ctx context.Context) (_ string, err error) { - var v []string - if v, err = us.Strings(ctx); err != nil { - return - } - switch len(v) { - case 1: - return v[0], nil - case 0: - err = &NotFoundError{user.Label} - default: - err = fmt.Errorf("ent: UserSelect.Strings returned %d results when one was expected", len(v)) - } - return -} - -// StringX is like String, but panics if an error occurs. -func (us *UserSelect) StringX(ctx context.Context) string { - v, err := us.String(ctx) - if err != nil { - panic(err) - } - return v -} - -// Ints returns list of ints from a selector. It is only allowed when selecting one field. -func (us *UserSelect) Ints(ctx context.Context) ([]int, error) { - if len(us.fields) > 1 { - return nil, errors.New("ent: UserSelect.Ints is not achievable when selecting more than 1 field") - } - var v []int - if err := us.Scan(ctx, &v); err != nil { - return nil, err - } - return v, nil -} - -// IntsX is like Ints, but panics if an error occurs. -func (us *UserSelect) IntsX(ctx context.Context) []int { - v, err := us.Ints(ctx) - if err != nil { - panic(err) - } - return v -} - -// Int returns a single int from a selector. It is only allowed when selecting one field. -func (us *UserSelect) Int(ctx context.Context) (_ int, err error) { - var v []int - if v, err = us.Ints(ctx); err != nil { - return - } - switch len(v) { - case 1: - return v[0], nil - case 0: - err = &NotFoundError{user.Label} - default: - err = fmt.Errorf("ent: UserSelect.Ints returned %d results when one was expected", len(v)) - } - return -} - -// IntX is like Int, but panics if an error occurs. -func (us *UserSelect) IntX(ctx context.Context) int { - v, err := us.Int(ctx) - if err != nil { - panic(err) - } - return v -} - -// Float64s returns list of float64s from a selector. It is only allowed when selecting one field. -func (us *UserSelect) Float64s(ctx context.Context) ([]float64, error) { - if len(us.fields) > 1 { - return nil, errors.New("ent: UserSelect.Float64s is not achievable when selecting more than 1 field") - } - var v []float64 - if err := us.Scan(ctx, &v); err != nil { - return nil, err - } - return v, nil -} - -// Float64sX is like Float64s, but panics if an error occurs. -func (us *UserSelect) Float64sX(ctx context.Context) []float64 { - v, err := us.Float64s(ctx) - if err != nil { - panic(err) - } - return v -} - -// Float64 returns a single float64 from a selector. It is only allowed when selecting one field. -func (us *UserSelect) Float64(ctx context.Context) (_ float64, err error) { - var v []float64 - if v, err = us.Float64s(ctx); err != nil { - return - } - switch len(v) { - case 1: - return v[0], nil - case 0: - err = &NotFoundError{user.Label} - default: - err = fmt.Errorf("ent: UserSelect.Float64s returned %d results when one was expected", len(v)) - } - return -} - -// Float64X is like Float64, but panics if an error occurs. -func (us *UserSelect) Float64X(ctx context.Context) float64 { - v, err := us.Float64(ctx) - if err != nil { - panic(err) - } - return v -} - -// Bools returns list of bools from a selector. It is only allowed when selecting one field. -func (us *UserSelect) Bools(ctx context.Context) ([]bool, error) { - if len(us.fields) > 1 { - return nil, errors.New("ent: UserSelect.Bools is not achievable when selecting more than 1 field") - } - var v []bool - if err := us.Scan(ctx, &v); err != nil { - return nil, err - } - return v, nil -} - -// BoolsX is like Bools, but panics if an error occurs. -func (us *UserSelect) BoolsX(ctx context.Context) []bool { - v, err := us.Bools(ctx) - if err != nil { - panic(err) - } - return v -} - -// Bool returns a single bool from a selector. It is only allowed when selecting one field. -func (us *UserSelect) Bool(ctx context.Context) (_ bool, err error) { - var v []bool - if v, err = us.Bools(ctx); err != nil { - return - } - switch len(v) { - case 1: - return v[0], nil - case 0: - err = &NotFoundError{user.Label} - default: - err = fmt.Errorf("ent: UserSelect.Bools returned %d results when one was expected", len(v)) - } - return -} - -// BoolX is like Bool, but panics if an error occurs. -func (us *UserSelect) BoolX(ctx context.Context) bool { - v, err := us.Bool(ctx) - if err != nil { - panic(err) - } - return v -} - func (us *UserSelect) sqlScan(ctx context.Context, v interface{}) error { rows := &sql.Rows{} query, args := us.sql.Query() diff --git a/backend/ent/user_update.go b/backend/ent/user_update.go index 9a6a3cb..1cccc6f 100644 --- a/backend/ent/user_update.go +++ b/backend/ent/user_update.go @@ -1,4 +1,4 @@ -// Code generated by entc, DO NOT EDIT. +// Code generated by ent, DO NOT EDIT. package ent @@ -6,11 +6,14 @@ import ( "context" "errors" "fmt" + "time" "entgo.io/ent/dialect/sql" "entgo.io/ent/dialect/sql/sqlgraph" "entgo.io/ent/schema/field" + "github.com/google/uuid" "github.com/hay-kot/content/backend/ent/authtokens" + "github.com/hay-kot/content/backend/ent/group" "github.com/hay-kot/content/backend/ent/predicate" "github.com/hay-kot/content/backend/ent/user" ) @@ -28,6 +31,12 @@ func (uu *UserUpdate) Where(ps ...predicate.User) *UserUpdate { return uu } +// SetUpdatedAt sets the "updated_at" field. +func (uu *UserUpdate) SetUpdatedAt(t time.Time) *UserUpdate { + uu.mutation.SetUpdatedAt(t) + return uu +} + // SetName sets the "name" field. func (uu *UserUpdate) SetName(s string) *UserUpdate { uu.mutation.SetName(s) @@ -60,15 +69,26 @@ func (uu *UserUpdate) SetNillableIsSuperuser(b *bool) *UserUpdate { return uu } +// SetGroupID sets the "group" edge to the Group entity by ID. +func (uu *UserUpdate) SetGroupID(id uuid.UUID) *UserUpdate { + uu.mutation.SetGroupID(id) + return uu +} + +// SetGroup sets the "group" edge to the Group entity. +func (uu *UserUpdate) SetGroup(g *Group) *UserUpdate { + return uu.SetGroupID(g.ID) +} + // AddAuthTokenIDs adds the "auth_tokens" edge to the AuthTokens entity by IDs. -func (uu *UserUpdate) AddAuthTokenIDs(ids ...int) *UserUpdate { +func (uu *UserUpdate) AddAuthTokenIDs(ids ...uuid.UUID) *UserUpdate { uu.mutation.AddAuthTokenIDs(ids...) return uu } // AddAuthTokens adds the "auth_tokens" edges to the AuthTokens entity. func (uu *UserUpdate) AddAuthTokens(a ...*AuthTokens) *UserUpdate { - ids := make([]int, len(a)) + ids := make([]uuid.UUID, len(a)) for i := range a { ids[i] = a[i].ID } @@ -80,6 +100,12 @@ func (uu *UserUpdate) Mutation() *UserMutation { return uu.mutation } +// ClearGroup clears the "group" edge to the Group entity. +func (uu *UserUpdate) ClearGroup() *UserUpdate { + uu.mutation.ClearGroup() + return uu +} + // ClearAuthTokens clears all "auth_tokens" edges to the AuthTokens entity. func (uu *UserUpdate) ClearAuthTokens() *UserUpdate { uu.mutation.ClearAuthTokens() @@ -87,14 +113,14 @@ func (uu *UserUpdate) ClearAuthTokens() *UserUpdate { } // RemoveAuthTokenIDs removes the "auth_tokens" edge to AuthTokens entities by IDs. -func (uu *UserUpdate) RemoveAuthTokenIDs(ids ...int) *UserUpdate { +func (uu *UserUpdate) RemoveAuthTokenIDs(ids ...uuid.UUID) *UserUpdate { uu.mutation.RemoveAuthTokenIDs(ids...) return uu } // RemoveAuthTokens removes "auth_tokens" edges to AuthTokens entities. func (uu *UserUpdate) RemoveAuthTokens(a ...*AuthTokens) *UserUpdate { - ids := make([]int, len(a)) + ids := make([]uuid.UUID, len(a)) for i := range a { ids[i] = a[i].ID } @@ -107,6 +133,7 @@ func (uu *UserUpdate) Save(ctx context.Context) (int, error) { err error affected int ) + uu.defaults() if len(uu.hooks) == 0 { if err = uu.check(); err != nil { return 0, err @@ -161,6 +188,14 @@ func (uu *UserUpdate) ExecX(ctx context.Context) { } } +// defaults sets the default values of the builder before save. +func (uu *UserUpdate) defaults() { + if _, ok := uu.mutation.UpdatedAt(); !ok { + v := user.UpdateDefaultUpdatedAt() + uu.mutation.SetUpdatedAt(v) + } +} + // check runs all checks and user-defined validators on the builder. func (uu *UserUpdate) check() error { if v, ok := uu.mutation.Name(); ok { @@ -178,6 +213,9 @@ func (uu *UserUpdate) check() error { return &ValidationError{Name: "password", err: fmt.Errorf(`ent: validator failed for field "User.password": %w`, err)} } } + if _, ok := uu.mutation.GroupID(); uu.mutation.GroupCleared() && !ok { + return errors.New(`ent: clearing a required unique edge "User.group"`) + } return nil } @@ -199,6 +237,13 @@ func (uu *UserUpdate) sqlSave(ctx context.Context) (n int, err error) { } } } + if value, ok := uu.mutation.UpdatedAt(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeTime, + Value: value, + Column: user.FieldUpdatedAt, + }) + } if value, ok := uu.mutation.Name(); ok { _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ Type: field.TypeString, @@ -227,6 +272,41 @@ func (uu *UserUpdate) sqlSave(ctx context.Context) (n int, err error) { Column: user.FieldIsSuperuser, }) } + if uu.mutation.GroupCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2O, + Inverse: true, + Table: user.GroupTable, + Columns: []string{user.GroupColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: group.FieldID, + }, + }, + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := uu.mutation.GroupIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2O, + Inverse: true, + Table: user.GroupTable, + Columns: []string{user.GroupColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: group.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Add = append(_spec.Edges.Add, edge) + } if uu.mutation.AuthTokensCleared() { edge := &sqlgraph.EdgeSpec{ Rel: sqlgraph.O2M, @@ -236,7 +316,7 @@ func (uu *UserUpdate) sqlSave(ctx context.Context) (n int, err error) { Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: &sqlgraph.FieldSpec{ - Type: field.TypeInt, + Type: field.TypeUUID, Column: authtokens.FieldID, }, }, @@ -252,7 +332,7 @@ func (uu *UserUpdate) sqlSave(ctx context.Context) (n int, err error) { Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: &sqlgraph.FieldSpec{ - Type: field.TypeInt, + Type: field.TypeUUID, Column: authtokens.FieldID, }, }, @@ -271,7 +351,7 @@ func (uu *UserUpdate) sqlSave(ctx context.Context) (n int, err error) { Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: &sqlgraph.FieldSpec{ - Type: field.TypeInt, + Type: field.TypeUUID, Column: authtokens.FieldID, }, }, @@ -285,7 +365,7 @@ func (uu *UserUpdate) sqlSave(ctx context.Context) (n int, err error) { if _, ok := err.(*sqlgraph.NotFoundError); ok { err = &NotFoundError{user.Label} } else if sqlgraph.IsConstraintError(err) { - err = &ConstraintError{err.Error(), err} + err = &ConstraintError{msg: err.Error(), wrap: err} } return 0, err } @@ -300,6 +380,12 @@ type UserUpdateOne struct { mutation *UserMutation } +// SetUpdatedAt sets the "updated_at" field. +func (uuo *UserUpdateOne) SetUpdatedAt(t time.Time) *UserUpdateOne { + uuo.mutation.SetUpdatedAt(t) + return uuo +} + // SetName sets the "name" field. func (uuo *UserUpdateOne) SetName(s string) *UserUpdateOne { uuo.mutation.SetName(s) @@ -332,15 +418,26 @@ func (uuo *UserUpdateOne) SetNillableIsSuperuser(b *bool) *UserUpdateOne { return uuo } +// SetGroupID sets the "group" edge to the Group entity by ID. +func (uuo *UserUpdateOne) SetGroupID(id uuid.UUID) *UserUpdateOne { + uuo.mutation.SetGroupID(id) + return uuo +} + +// SetGroup sets the "group" edge to the Group entity. +func (uuo *UserUpdateOne) SetGroup(g *Group) *UserUpdateOne { + return uuo.SetGroupID(g.ID) +} + // AddAuthTokenIDs adds the "auth_tokens" edge to the AuthTokens entity by IDs. -func (uuo *UserUpdateOne) AddAuthTokenIDs(ids ...int) *UserUpdateOne { +func (uuo *UserUpdateOne) AddAuthTokenIDs(ids ...uuid.UUID) *UserUpdateOne { uuo.mutation.AddAuthTokenIDs(ids...) return uuo } // AddAuthTokens adds the "auth_tokens" edges to the AuthTokens entity. func (uuo *UserUpdateOne) AddAuthTokens(a ...*AuthTokens) *UserUpdateOne { - ids := make([]int, len(a)) + ids := make([]uuid.UUID, len(a)) for i := range a { ids[i] = a[i].ID } @@ -352,6 +449,12 @@ func (uuo *UserUpdateOne) Mutation() *UserMutation { return uuo.mutation } +// ClearGroup clears the "group" edge to the Group entity. +func (uuo *UserUpdateOne) ClearGroup() *UserUpdateOne { + uuo.mutation.ClearGroup() + return uuo +} + // ClearAuthTokens clears all "auth_tokens" edges to the AuthTokens entity. func (uuo *UserUpdateOne) ClearAuthTokens() *UserUpdateOne { uuo.mutation.ClearAuthTokens() @@ -359,14 +462,14 @@ func (uuo *UserUpdateOne) ClearAuthTokens() *UserUpdateOne { } // RemoveAuthTokenIDs removes the "auth_tokens" edge to AuthTokens entities by IDs. -func (uuo *UserUpdateOne) RemoveAuthTokenIDs(ids ...int) *UserUpdateOne { +func (uuo *UserUpdateOne) RemoveAuthTokenIDs(ids ...uuid.UUID) *UserUpdateOne { uuo.mutation.RemoveAuthTokenIDs(ids...) return uuo } // RemoveAuthTokens removes "auth_tokens" edges to AuthTokens entities. func (uuo *UserUpdateOne) RemoveAuthTokens(a ...*AuthTokens) *UserUpdateOne { - ids := make([]int, len(a)) + ids := make([]uuid.UUID, len(a)) for i := range a { ids[i] = a[i].ID } @@ -386,6 +489,7 @@ func (uuo *UserUpdateOne) Save(ctx context.Context) (*User, error) { err error node *User ) + uuo.defaults() if len(uuo.hooks) == 0 { if err = uuo.check(); err != nil { return nil, err @@ -411,9 +515,15 @@ func (uuo *UserUpdateOne) Save(ctx context.Context) (*User, error) { } mut = uuo.hooks[i](mut) } - if _, err := mut.Mutate(ctx, uuo.mutation); err != nil { + v, err := mut.Mutate(ctx, uuo.mutation) + if err != nil { return nil, err } + nv, ok := v.(*User) + if !ok { + return nil, fmt.Errorf("unexpected node type %T returned from UserMutation", v) + } + node = nv } return node, err } @@ -440,6 +550,14 @@ func (uuo *UserUpdateOne) ExecX(ctx context.Context) { } } +// defaults sets the default values of the builder before save. +func (uuo *UserUpdateOne) defaults() { + if _, ok := uuo.mutation.UpdatedAt(); !ok { + v := user.UpdateDefaultUpdatedAt() + uuo.mutation.SetUpdatedAt(v) + } +} + // check runs all checks and user-defined validators on the builder. func (uuo *UserUpdateOne) check() error { if v, ok := uuo.mutation.Name(); ok { @@ -457,6 +575,9 @@ func (uuo *UserUpdateOne) check() error { return &ValidationError{Name: "password", err: fmt.Errorf(`ent: validator failed for field "User.password": %w`, err)} } } + if _, ok := uuo.mutation.GroupID(); uuo.mutation.GroupCleared() && !ok { + return errors.New(`ent: clearing a required unique edge "User.group"`) + } return nil } @@ -495,6 +616,13 @@ func (uuo *UserUpdateOne) sqlSave(ctx context.Context) (_node *User, err error) } } } + if value, ok := uuo.mutation.UpdatedAt(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeTime, + Value: value, + Column: user.FieldUpdatedAt, + }) + } if value, ok := uuo.mutation.Name(); ok { _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ Type: field.TypeString, @@ -523,6 +651,41 @@ func (uuo *UserUpdateOne) sqlSave(ctx context.Context) (_node *User, err error) Column: user.FieldIsSuperuser, }) } + if uuo.mutation.GroupCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2O, + Inverse: true, + Table: user.GroupTable, + Columns: []string{user.GroupColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: group.FieldID, + }, + }, + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := uuo.mutation.GroupIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2O, + Inverse: true, + Table: user.GroupTable, + Columns: []string{user.GroupColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: group.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Add = append(_spec.Edges.Add, edge) + } if uuo.mutation.AuthTokensCleared() { edge := &sqlgraph.EdgeSpec{ Rel: sqlgraph.O2M, @@ -532,7 +695,7 @@ func (uuo *UserUpdateOne) sqlSave(ctx context.Context) (_node *User, err error) Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: &sqlgraph.FieldSpec{ - Type: field.TypeInt, + Type: field.TypeUUID, Column: authtokens.FieldID, }, }, @@ -548,7 +711,7 @@ func (uuo *UserUpdateOne) sqlSave(ctx context.Context) (_node *User, err error) Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: &sqlgraph.FieldSpec{ - Type: field.TypeInt, + Type: field.TypeUUID, Column: authtokens.FieldID, }, }, @@ -567,7 +730,7 @@ func (uuo *UserUpdateOne) sqlSave(ctx context.Context) (_node *User, err error) Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: &sqlgraph.FieldSpec{ - Type: field.TypeInt, + Type: field.TypeUUID, Column: authtokens.FieldID, }, }, @@ -584,7 +747,7 @@ func (uuo *UserUpdateOne) sqlSave(ctx context.Context) (_node *User, err error) if _, ok := err.(*sqlgraph.NotFoundError); ok { err = &NotFoundError{user.Label} } else if sqlgraph.IsConstraintError(err) { - err = &ConstraintError{err.Error(), err} + err = &ConstraintError{msg: err.Error(), wrap: err} } return nil, err } From b83505104a8bd523fdb273484b5293b274a12c3c Mon Sep 17 00:00:00 2001 From: Hayden <64056131+hay-kot@users.noreply.github.com> Date: Tue, 30 Aug 2022 10:05:11 -0800 Subject: [PATCH 005/530] align types with new db schema --- backend/app/api/docs/docs.go | 489 +++++++++++++++++- backend/app/api/docs/swagger.json | 489 +++++++++++++++++- backend/app/api/docs/swagger.yaml | 362 ++++++++++++- backend/app/api/main.go | 1 - backend/app/api/middleware.go | 2 +- backend/app/api/routes.go | 42 +- backend/app/api/seed.go | 11 +- backend/app/api/v1/main_test.go | 5 +- backend/app/api/v1/v1_ctrl_admin.go | 8 +- backend/app/api/v1/v1_ctrl_admin_test.go | 10 +- backend/app/api/v1/v1_ctrl_user.go | 33 +- backend/app/generator/main.go | 19 +- backend/config.template.yml | 1 + backend/go.sum | 4 + backend/internal/config/conf_seed.go | 1 + backend/internal/mapper/users_automapper.go | 27 - backend/internal/repo/repo_group.go | 29 ++ .../repo/{token_ent.go => repo_tokens.go} | 7 +- ...{token_ent_test.go => repo_tokens_test.go} | 0 .../repo/{users_ent.go => repo_users.go} | 62 +-- .../{users_ent_test.go => repo_users_test.go} | 3 +- backend/internal/repo/repos_all.go | 6 +- backend/internal/repo/token_interface.go | 20 - backend/internal/repo/users_interface.go | 27 - backend/internal/services/contexts.go | 8 +- backend/internal/services/contexts_test.go | 4 +- backend/internal/services/service_admin.go | 13 +- backend/internal/services/service_user.go | 31 +- backend/internal/types/users_types.go | 27 +- backend/internal/types/users_types_test.go | 13 - 30 files changed, 1491 insertions(+), 263 deletions(-) delete mode 100644 backend/internal/mapper/users_automapper.go create mode 100644 backend/internal/repo/repo_group.go rename backend/internal/repo/{token_ent.go => repo_tokens.go} (90%) rename backend/internal/repo/{token_ent_test.go => repo_tokens_test.go} (100%) rename backend/internal/repo/{users_ent.go => repo_users.go} (63%) rename backend/internal/repo/{users_ent_test.go => repo_users_test.go} (97%) delete mode 100644 backend/internal/repo/token_interface.go delete mode 100644 backend/internal/repo/users_interface.go diff --git a/backend/app/api/docs/docs.go b/backend/app/api/docs/docs.go index 9a371a6..a6d2d37 100644 --- a/backend/app/api/docs/docs.go +++ b/backend/app/api/docs/docs.go @@ -80,7 +80,7 @@ const docTemplate = `{ "item": { "type": "array", "items": { - "$ref": "#/definitions/types.UserOut" + "$ref": "#/definitions/ent.User" } } } @@ -126,7 +126,7 @@ const docTemplate = `{ "type": "object", "properties": { "item": { - "$ref": "#/definitions/types.UserOut" + "$ref": "#/definitions/ent.User" } } } @@ -171,7 +171,7 @@ const docTemplate = `{ "type": "object", "properties": { "item": { - "$ref": "#/definitions/types.UserOut" + "$ref": "#/definitions/ent.User" } } } @@ -223,7 +223,7 @@ const docTemplate = `{ "type": "object", "properties": { "item": { - "$ref": "#/definitions/types.UserOut" + "$ref": "#/definitions/ent.User" } } } @@ -337,6 +337,48 @@ const docTemplate = `{ } } }, + "/v1/users/register": { + "post": { + "produces": [ + "application/json" + ], + "tags": [ + "User" + ], + "summary": "Get the current user", + "parameters": [ + { + "description": "User Data", + "name": "payload", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/types.UserRegistration" + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "allOf": [ + { + "$ref": "#/definitions/server.Result" + }, + { + "type": "object", + "properties": { + "item": { + "$ref": "#/definitions/ent.User" + } + } + } + ] + } + } + } + } + }, "/v1/users/self": { "get": { "security": [ @@ -363,7 +405,7 @@ const docTemplate = `{ "type": "object", "properties": { "item": { - "$ref": "#/definitions/types.UserOut" + "$ref": "#/definitions/ent.User" } } } @@ -441,6 +483,418 @@ const docTemplate = `{ } }, "definitions": { + "ent.AuthTokens": { + "type": "object", + "properties": { + "created_at": { + "description": "CreatedAt holds the value of the \"created_at\" field.", + "type": "string" + }, + "edges": { + "description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the AuthTokensQuery when eager-loading is set.", + "$ref": "#/definitions/ent.AuthTokensEdges" + }, + "expires_at": { + "description": "ExpiresAt holds the value of the \"expires_at\" field.", + "type": "string" + }, + "id": { + "description": "ID of the ent.", + "type": "string" + }, + "token": { + "description": "Token holds the value of the \"token\" field.", + "type": "array", + "items": { + "type": "integer" + } + }, + "updated_at": { + "description": "UpdatedAt holds the value of the \"updated_at\" field.", + "type": "string" + } + } + }, + "ent.AuthTokensEdges": { + "type": "object", + "properties": { + "user": { + "description": "User holds the value of the user edge.", + "$ref": "#/definitions/ent.User" + } + } + }, + "ent.Group": { + "type": "object", + "properties": { + "created_at": { + "description": "CreatedAt holds the value of the \"created_at\" field.", + "type": "string" + }, + "currency": { + "description": "Currency holds the value of the \"currency\" field.", + "type": "string" + }, + "edges": { + "description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the GroupQuery when eager-loading is set.", + "$ref": "#/definitions/ent.GroupEdges" + }, + "id": { + "description": "ID of the ent.", + "type": "string" + }, + "name": { + "description": "Name holds the value of the \"name\" field.", + "type": "string" + }, + "updated_at": { + "description": "UpdatedAt holds the value of the \"updated_at\" field.", + "type": "string" + } + } + }, + "ent.GroupEdges": { + "type": "object", + "properties": { + "items": { + "description": "Items holds the value of the items edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.Item" + } + }, + "labels": { + "description": "Labels holds the value of the labels edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.Label" + } + }, + "locations": { + "description": "Locations holds the value of the locations edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.Location" + } + }, + "users": { + "description": "Users holds the value of the users edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.User" + } + } + } + }, + "ent.Item": { + "type": "object", + "properties": { + "created_at": { + "description": "CreatedAt holds the value of the \"created_at\" field.", + "type": "string" + }, + "description": { + "description": "Description holds the value of the \"description\" field.", + "type": "string" + }, + "edges": { + "description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the ItemQuery when eager-loading is set.", + "$ref": "#/definitions/ent.ItemEdges" + }, + "id": { + "description": "ID of the ent.", + "type": "string" + }, + "manufacturer": { + "description": "Manufacturer holds the value of the \"manufacturer\" field.", + "type": "string" + }, + "model_number": { + "description": "ModelNumber holds the value of the \"model_number\" field.", + "type": "string" + }, + "name": { + "description": "Name holds the value of the \"name\" field.", + "type": "string" + }, + "notes": { + "description": "Notes holds the value of the \"notes\" field.", + "type": "string" + }, + "purchase_from": { + "description": "PurchaseFrom holds the value of the \"purchase_from\" field.", + "type": "string" + }, + "purchase_price": { + "description": "PurchasePrice holds the value of the \"purchase_price\" field.", + "type": "number" + }, + "purchase_receipt_id": { + "description": "PurchaseReceiptID holds the value of the \"purchase_receipt_id\" field.", + "type": "string" + }, + "purchase_time": { + "description": "PurchaseTime holds the value of the \"purchase_time\" field.", + "type": "string" + }, + "serial_number": { + "description": "SerialNumber holds the value of the \"serial_number\" field.", + "type": "string" + }, + "sold_notes": { + "description": "SoldNotes holds the value of the \"sold_notes\" field.", + "type": "string" + }, + "sold_price": { + "description": "SoldPrice holds the value of the \"sold_price\" field.", + "type": "number" + }, + "sold_receipt_id": { + "description": "SoldReceiptID holds the value of the \"sold_receipt_id\" field.", + "type": "string" + }, + "sold_time": { + "description": "SoldTime holds the value of the \"sold_time\" field.", + "type": "string" + }, + "sold_to": { + "description": "SoldTo holds the value of the \"sold_to\" field.", + "type": "string" + }, + "updated_at": { + "description": "UpdatedAt holds the value of the \"updated_at\" field.", + "type": "string" + } + } + }, + "ent.ItemEdges": { + "type": "object", + "properties": { + "fields": { + "description": "Fields holds the value of the fields edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.ItemField" + } + }, + "group": { + "description": "Group holds the value of the group edge.", + "$ref": "#/definitions/ent.Group" + }, + "label": { + "description": "Label holds the value of the label edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.Label" + } + }, + "location": { + "description": "Location holds the value of the location edge.", + "$ref": "#/definitions/ent.Location" + } + } + }, + "ent.ItemField": { + "type": "object", + "properties": { + "boolean_value": { + "description": "BooleanValue holds the value of the \"boolean_value\" field.", + "type": "boolean" + }, + "created_at": { + "description": "CreatedAt holds the value of the \"created_at\" field.", + "type": "string" + }, + "description": { + "description": "Description holds the value of the \"description\" field.", + "type": "string" + }, + "edges": { + "description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the ItemFieldQuery when eager-loading is set.", + "$ref": "#/definitions/ent.ItemFieldEdges" + }, + "id": { + "description": "ID of the ent.", + "type": "string" + }, + "name": { + "description": "Name holds the value of the \"name\" field.", + "type": "string" + }, + "number_value": { + "description": "NumberValue holds the value of the \"number_value\" field.", + "type": "integer" + }, + "text_value": { + "description": "TextValue holds the value of the \"text_value\" field.", + "type": "string" + }, + "time_value": { + "description": "TimeValue holds the value of the \"time_value\" field.", + "type": "string" + }, + "type": { + "description": "Type holds the value of the \"type\" field.", + "type": "string" + }, + "updated_at": { + "description": "UpdatedAt holds the value of the \"updated_at\" field.", + "type": "string" + } + } + }, + "ent.ItemFieldEdges": { + "type": "object", + "properties": { + "item": { + "description": "Item holds the value of the item edge.", + "$ref": "#/definitions/ent.Item" + } + } + }, + "ent.Label": { + "type": "object", + "properties": { + "color": { + "description": "Color holds the value of the \"color\" field.", + "type": "string" + }, + "created_at": { + "description": "CreatedAt holds the value of the \"created_at\" field.", + "type": "string" + }, + "description": { + "description": "Description holds the value of the \"description\" field.", + "type": "string" + }, + "edges": { + "description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the LabelQuery when eager-loading is set.", + "$ref": "#/definitions/ent.LabelEdges" + }, + "id": { + "description": "ID of the ent.", + "type": "string" + }, + "name": { + "description": "Name holds the value of the \"name\" field.", + "type": "string" + }, + "updated_at": { + "description": "UpdatedAt holds the value of the \"updated_at\" field.", + "type": "string" + } + } + }, + "ent.LabelEdges": { + "type": "object", + "properties": { + "group": { + "description": "Group holds the value of the group edge.", + "$ref": "#/definitions/ent.Group" + }, + "items": { + "description": "Items holds the value of the items edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.Item" + } + } + } + }, + "ent.Location": { + "type": "object", + "properties": { + "created_at": { + "description": "CreatedAt holds the value of the \"created_at\" field.", + "type": "string" + }, + "description": { + "description": "Description holds the value of the \"description\" field.", + "type": "string" + }, + "edges": { + "description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the LocationQuery when eager-loading is set.", + "$ref": "#/definitions/ent.LocationEdges" + }, + "id": { + "description": "ID of the ent.", + "type": "string" + }, + "name": { + "description": "Name holds the value of the \"name\" field.", + "type": "string" + }, + "updated_at": { + "description": "UpdatedAt holds the value of the \"updated_at\" field.", + "type": "string" + } + } + }, + "ent.LocationEdges": { + "type": "object", + "properties": { + "group": { + "description": "Group holds the value of the group edge.", + "$ref": "#/definitions/ent.Group" + }, + "items": { + "description": "Items holds the value of the items edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.Item" + } + } + } + }, + "ent.User": { + "type": "object", + "properties": { + "created_at": { + "description": "CreatedAt holds the value of the \"created_at\" field.", + "type": "string" + }, + "edges": { + "description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the UserQuery when eager-loading is set.", + "$ref": "#/definitions/ent.UserEdges" + }, + "email": { + "description": "Email holds the value of the \"email\" field.", + "type": "string" + }, + "id": { + "description": "ID of the ent.", + "type": "string" + }, + "is_superuser": { + "description": "IsSuperuser holds the value of the \"is_superuser\" field.", + "type": "boolean" + }, + "name": { + "description": "Name holds the value of the \"name\" field.", + "type": "string" + }, + "updated_at": { + "description": "UpdatedAt holds the value of the \"updated_at\" field.", + "type": "string" + } + } + }, + "ent.UserEdges": { + "type": "object", + "properties": { + "auth_tokens": { + "description": "AuthTokens holds the value of the auth_tokens edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.AuthTokens" + } + }, + "group": { + "description": "Group holds the value of the group edge.", + "$ref": "#/definitions/ent.Group" + } + } + }, "server.Result": { "type": "object", "properties": { @@ -491,6 +945,9 @@ const docTemplate = `{ "email": { "type": "string" }, + "groupID": { + "type": "string" + }, "isSuperuser": { "type": "boolean" }, @@ -502,20 +959,28 @@ const docTemplate = `{ } } }, - "types.UserOut": { + "types.UserIn": { "type": "object", "properties": { "email": { "type": "string" }, - "id": { - "type": "string" - }, - "isSuperuser": { - "type": "boolean" - }, "name": { "type": "string" + }, + "password": { + "type": "string" + } + } + }, + "types.UserRegistration": { + "type": "object", + "properties": { + "groupName": { + "type": "string" + }, + "user": { + "$ref": "#/definitions/types.UserIn" } } }, diff --git a/backend/app/api/docs/swagger.json b/backend/app/api/docs/swagger.json index 51932ec..3124ca5 100644 --- a/backend/app/api/docs/swagger.json +++ b/backend/app/api/docs/swagger.json @@ -72,7 +72,7 @@ "item": { "type": "array", "items": { - "$ref": "#/definitions/types.UserOut" + "$ref": "#/definitions/ent.User" } } } @@ -118,7 +118,7 @@ "type": "object", "properties": { "item": { - "$ref": "#/definitions/types.UserOut" + "$ref": "#/definitions/ent.User" } } } @@ -163,7 +163,7 @@ "type": "object", "properties": { "item": { - "$ref": "#/definitions/types.UserOut" + "$ref": "#/definitions/ent.User" } } } @@ -215,7 +215,7 @@ "type": "object", "properties": { "item": { - "$ref": "#/definitions/types.UserOut" + "$ref": "#/definitions/ent.User" } } } @@ -329,6 +329,48 @@ } } }, + "/v1/users/register": { + "post": { + "produces": [ + "application/json" + ], + "tags": [ + "User" + ], + "summary": "Get the current user", + "parameters": [ + { + "description": "User Data", + "name": "payload", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/types.UserRegistration" + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "allOf": [ + { + "$ref": "#/definitions/server.Result" + }, + { + "type": "object", + "properties": { + "item": { + "$ref": "#/definitions/ent.User" + } + } + } + ] + } + } + } + } + }, "/v1/users/self": { "get": { "security": [ @@ -355,7 +397,7 @@ "type": "object", "properties": { "item": { - "$ref": "#/definitions/types.UserOut" + "$ref": "#/definitions/ent.User" } } } @@ -433,6 +475,418 @@ } }, "definitions": { + "ent.AuthTokens": { + "type": "object", + "properties": { + "created_at": { + "description": "CreatedAt holds the value of the \"created_at\" field.", + "type": "string" + }, + "edges": { + "description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the AuthTokensQuery when eager-loading is set.", + "$ref": "#/definitions/ent.AuthTokensEdges" + }, + "expires_at": { + "description": "ExpiresAt holds the value of the \"expires_at\" field.", + "type": "string" + }, + "id": { + "description": "ID of the ent.", + "type": "string" + }, + "token": { + "description": "Token holds the value of the \"token\" field.", + "type": "array", + "items": { + "type": "integer" + } + }, + "updated_at": { + "description": "UpdatedAt holds the value of the \"updated_at\" field.", + "type": "string" + } + } + }, + "ent.AuthTokensEdges": { + "type": "object", + "properties": { + "user": { + "description": "User holds the value of the user edge.", + "$ref": "#/definitions/ent.User" + } + } + }, + "ent.Group": { + "type": "object", + "properties": { + "created_at": { + "description": "CreatedAt holds the value of the \"created_at\" field.", + "type": "string" + }, + "currency": { + "description": "Currency holds the value of the \"currency\" field.", + "type": "string" + }, + "edges": { + "description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the GroupQuery when eager-loading is set.", + "$ref": "#/definitions/ent.GroupEdges" + }, + "id": { + "description": "ID of the ent.", + "type": "string" + }, + "name": { + "description": "Name holds the value of the \"name\" field.", + "type": "string" + }, + "updated_at": { + "description": "UpdatedAt holds the value of the \"updated_at\" field.", + "type": "string" + } + } + }, + "ent.GroupEdges": { + "type": "object", + "properties": { + "items": { + "description": "Items holds the value of the items edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.Item" + } + }, + "labels": { + "description": "Labels holds the value of the labels edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.Label" + } + }, + "locations": { + "description": "Locations holds the value of the locations edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.Location" + } + }, + "users": { + "description": "Users holds the value of the users edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.User" + } + } + } + }, + "ent.Item": { + "type": "object", + "properties": { + "created_at": { + "description": "CreatedAt holds the value of the \"created_at\" field.", + "type": "string" + }, + "description": { + "description": "Description holds the value of the \"description\" field.", + "type": "string" + }, + "edges": { + "description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the ItemQuery when eager-loading is set.", + "$ref": "#/definitions/ent.ItemEdges" + }, + "id": { + "description": "ID of the ent.", + "type": "string" + }, + "manufacturer": { + "description": "Manufacturer holds the value of the \"manufacturer\" field.", + "type": "string" + }, + "model_number": { + "description": "ModelNumber holds the value of the \"model_number\" field.", + "type": "string" + }, + "name": { + "description": "Name holds the value of the \"name\" field.", + "type": "string" + }, + "notes": { + "description": "Notes holds the value of the \"notes\" field.", + "type": "string" + }, + "purchase_from": { + "description": "PurchaseFrom holds the value of the \"purchase_from\" field.", + "type": "string" + }, + "purchase_price": { + "description": "PurchasePrice holds the value of the \"purchase_price\" field.", + "type": "number" + }, + "purchase_receipt_id": { + "description": "PurchaseReceiptID holds the value of the \"purchase_receipt_id\" field.", + "type": "string" + }, + "purchase_time": { + "description": "PurchaseTime holds the value of the \"purchase_time\" field.", + "type": "string" + }, + "serial_number": { + "description": "SerialNumber holds the value of the \"serial_number\" field.", + "type": "string" + }, + "sold_notes": { + "description": "SoldNotes holds the value of the \"sold_notes\" field.", + "type": "string" + }, + "sold_price": { + "description": "SoldPrice holds the value of the \"sold_price\" field.", + "type": "number" + }, + "sold_receipt_id": { + "description": "SoldReceiptID holds the value of the \"sold_receipt_id\" field.", + "type": "string" + }, + "sold_time": { + "description": "SoldTime holds the value of the \"sold_time\" field.", + "type": "string" + }, + "sold_to": { + "description": "SoldTo holds the value of the \"sold_to\" field.", + "type": "string" + }, + "updated_at": { + "description": "UpdatedAt holds the value of the \"updated_at\" field.", + "type": "string" + } + } + }, + "ent.ItemEdges": { + "type": "object", + "properties": { + "fields": { + "description": "Fields holds the value of the fields edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.ItemField" + } + }, + "group": { + "description": "Group holds the value of the group edge.", + "$ref": "#/definitions/ent.Group" + }, + "label": { + "description": "Label holds the value of the label edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.Label" + } + }, + "location": { + "description": "Location holds the value of the location edge.", + "$ref": "#/definitions/ent.Location" + } + } + }, + "ent.ItemField": { + "type": "object", + "properties": { + "boolean_value": { + "description": "BooleanValue holds the value of the \"boolean_value\" field.", + "type": "boolean" + }, + "created_at": { + "description": "CreatedAt holds the value of the \"created_at\" field.", + "type": "string" + }, + "description": { + "description": "Description holds the value of the \"description\" field.", + "type": "string" + }, + "edges": { + "description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the ItemFieldQuery when eager-loading is set.", + "$ref": "#/definitions/ent.ItemFieldEdges" + }, + "id": { + "description": "ID of the ent.", + "type": "string" + }, + "name": { + "description": "Name holds the value of the \"name\" field.", + "type": "string" + }, + "number_value": { + "description": "NumberValue holds the value of the \"number_value\" field.", + "type": "integer" + }, + "text_value": { + "description": "TextValue holds the value of the \"text_value\" field.", + "type": "string" + }, + "time_value": { + "description": "TimeValue holds the value of the \"time_value\" field.", + "type": "string" + }, + "type": { + "description": "Type holds the value of the \"type\" field.", + "type": "string" + }, + "updated_at": { + "description": "UpdatedAt holds the value of the \"updated_at\" field.", + "type": "string" + } + } + }, + "ent.ItemFieldEdges": { + "type": "object", + "properties": { + "item": { + "description": "Item holds the value of the item edge.", + "$ref": "#/definitions/ent.Item" + } + } + }, + "ent.Label": { + "type": "object", + "properties": { + "color": { + "description": "Color holds the value of the \"color\" field.", + "type": "string" + }, + "created_at": { + "description": "CreatedAt holds the value of the \"created_at\" field.", + "type": "string" + }, + "description": { + "description": "Description holds the value of the \"description\" field.", + "type": "string" + }, + "edges": { + "description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the LabelQuery when eager-loading is set.", + "$ref": "#/definitions/ent.LabelEdges" + }, + "id": { + "description": "ID of the ent.", + "type": "string" + }, + "name": { + "description": "Name holds the value of the \"name\" field.", + "type": "string" + }, + "updated_at": { + "description": "UpdatedAt holds the value of the \"updated_at\" field.", + "type": "string" + } + } + }, + "ent.LabelEdges": { + "type": "object", + "properties": { + "group": { + "description": "Group holds the value of the group edge.", + "$ref": "#/definitions/ent.Group" + }, + "items": { + "description": "Items holds the value of the items edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.Item" + } + } + } + }, + "ent.Location": { + "type": "object", + "properties": { + "created_at": { + "description": "CreatedAt holds the value of the \"created_at\" field.", + "type": "string" + }, + "description": { + "description": "Description holds the value of the \"description\" field.", + "type": "string" + }, + "edges": { + "description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the LocationQuery when eager-loading is set.", + "$ref": "#/definitions/ent.LocationEdges" + }, + "id": { + "description": "ID of the ent.", + "type": "string" + }, + "name": { + "description": "Name holds the value of the \"name\" field.", + "type": "string" + }, + "updated_at": { + "description": "UpdatedAt holds the value of the \"updated_at\" field.", + "type": "string" + } + } + }, + "ent.LocationEdges": { + "type": "object", + "properties": { + "group": { + "description": "Group holds the value of the group edge.", + "$ref": "#/definitions/ent.Group" + }, + "items": { + "description": "Items holds the value of the items edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.Item" + } + } + } + }, + "ent.User": { + "type": "object", + "properties": { + "created_at": { + "description": "CreatedAt holds the value of the \"created_at\" field.", + "type": "string" + }, + "edges": { + "description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the UserQuery when eager-loading is set.", + "$ref": "#/definitions/ent.UserEdges" + }, + "email": { + "description": "Email holds the value of the \"email\" field.", + "type": "string" + }, + "id": { + "description": "ID of the ent.", + "type": "string" + }, + "is_superuser": { + "description": "IsSuperuser holds the value of the \"is_superuser\" field.", + "type": "boolean" + }, + "name": { + "description": "Name holds the value of the \"name\" field.", + "type": "string" + }, + "updated_at": { + "description": "UpdatedAt holds the value of the \"updated_at\" field.", + "type": "string" + } + } + }, + "ent.UserEdges": { + "type": "object", + "properties": { + "auth_tokens": { + "description": "AuthTokens holds the value of the auth_tokens edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.AuthTokens" + } + }, + "group": { + "description": "Group holds the value of the group edge.", + "$ref": "#/definitions/ent.Group" + } + } + }, "server.Result": { "type": "object", "properties": { @@ -483,6 +937,9 @@ "email": { "type": "string" }, + "groupID": { + "type": "string" + }, "isSuperuser": { "type": "boolean" }, @@ -494,20 +951,28 @@ } } }, - "types.UserOut": { + "types.UserIn": { "type": "object", "properties": { "email": { "type": "string" }, - "id": { - "type": "string" - }, - "isSuperuser": { - "type": "boolean" - }, "name": { "type": "string" + }, + "password": { + "type": "string" + } + } + }, + "types.UserRegistration": { + "type": "object", + "properties": { + "groupName": { + "type": "string" + }, + "user": { + "$ref": "#/definitions/types.UserIn" } } }, diff --git a/backend/app/api/docs/swagger.yaml b/backend/app/api/docs/swagger.yaml index b268aa0..5845631 100644 --- a/backend/app/api/docs/swagger.yaml +++ b/backend/app/api/docs/swagger.yaml @@ -1,5 +1,316 @@ basePath: /api definitions: + ent.AuthTokens: + properties: + created_at: + description: CreatedAt holds the value of the "created_at" field. + type: string + edges: + $ref: '#/definitions/ent.AuthTokensEdges' + description: |- + Edges holds the relations/edges for other nodes in the graph. + The values are being populated by the AuthTokensQuery when eager-loading is set. + expires_at: + description: ExpiresAt holds the value of the "expires_at" field. + type: string + id: + description: ID of the ent. + type: string + token: + description: Token holds the value of the "token" field. + items: + type: integer + type: array + updated_at: + description: UpdatedAt holds the value of the "updated_at" field. + type: string + type: object + ent.AuthTokensEdges: + properties: + user: + $ref: '#/definitions/ent.User' + description: User holds the value of the user edge. + type: object + ent.Group: + properties: + created_at: + description: CreatedAt holds the value of the "created_at" field. + type: string + currency: + description: Currency holds the value of the "currency" field. + type: string + edges: + $ref: '#/definitions/ent.GroupEdges' + description: |- + Edges holds the relations/edges for other nodes in the graph. + The values are being populated by the GroupQuery when eager-loading is set. + id: + description: ID of the ent. + type: string + name: + description: Name holds the value of the "name" field. + type: string + updated_at: + description: UpdatedAt holds the value of the "updated_at" field. + type: string + type: object + ent.GroupEdges: + properties: + items: + description: Items holds the value of the items edge. + items: + $ref: '#/definitions/ent.Item' + type: array + labels: + description: Labels holds the value of the labels edge. + items: + $ref: '#/definitions/ent.Label' + type: array + locations: + description: Locations holds the value of the locations edge. + items: + $ref: '#/definitions/ent.Location' + type: array + users: + description: Users holds the value of the users edge. + items: + $ref: '#/definitions/ent.User' + type: array + type: object + ent.Item: + properties: + created_at: + description: CreatedAt holds the value of the "created_at" field. + type: string + description: + description: Description holds the value of the "description" field. + type: string + edges: + $ref: '#/definitions/ent.ItemEdges' + description: |- + Edges holds the relations/edges for other nodes in the graph. + The values are being populated by the ItemQuery when eager-loading is set. + id: + description: ID of the ent. + type: string + manufacturer: + description: Manufacturer holds the value of the "manufacturer" field. + type: string + model_number: + description: ModelNumber holds the value of the "model_number" field. + type: string + name: + description: Name holds the value of the "name" field. + type: string + notes: + description: Notes holds the value of the "notes" field. + type: string + purchase_from: + description: PurchaseFrom holds the value of the "purchase_from" field. + type: string + purchase_price: + description: PurchasePrice holds the value of the "purchase_price" field. + type: number + purchase_receipt_id: + description: PurchaseReceiptID holds the value of the "purchase_receipt_id" + field. + type: string + purchase_time: + description: PurchaseTime holds the value of the "purchase_time" field. + type: string + serial_number: + description: SerialNumber holds the value of the "serial_number" field. + type: string + sold_notes: + description: SoldNotes holds the value of the "sold_notes" field. + type: string + sold_price: + description: SoldPrice holds the value of the "sold_price" field. + type: number + sold_receipt_id: + description: SoldReceiptID holds the value of the "sold_receipt_id" field. + type: string + sold_time: + description: SoldTime holds the value of the "sold_time" field. + type: string + sold_to: + description: SoldTo holds the value of the "sold_to" field. + type: string + updated_at: + description: UpdatedAt holds the value of the "updated_at" field. + type: string + type: object + ent.ItemEdges: + properties: + fields: + description: Fields holds the value of the fields edge. + items: + $ref: '#/definitions/ent.ItemField' + type: array + group: + $ref: '#/definitions/ent.Group' + description: Group holds the value of the group edge. + label: + description: Label holds the value of the label edge. + items: + $ref: '#/definitions/ent.Label' + type: array + location: + $ref: '#/definitions/ent.Location' + description: Location holds the value of the location edge. + type: object + ent.ItemField: + properties: + boolean_value: + description: BooleanValue holds the value of the "boolean_value" field. + type: boolean + created_at: + description: CreatedAt holds the value of the "created_at" field. + type: string + description: + description: Description holds the value of the "description" field. + type: string + edges: + $ref: '#/definitions/ent.ItemFieldEdges' + description: |- + Edges holds the relations/edges for other nodes in the graph. + The values are being populated by the ItemFieldQuery when eager-loading is set. + id: + description: ID of the ent. + type: string + name: + description: Name holds the value of the "name" field. + type: string + number_value: + description: NumberValue holds the value of the "number_value" field. + type: integer + text_value: + description: TextValue holds the value of the "text_value" field. + type: string + time_value: + description: TimeValue holds the value of the "time_value" field. + type: string + type: + description: Type holds the value of the "type" field. + type: string + updated_at: + description: UpdatedAt holds the value of the "updated_at" field. + type: string + type: object + ent.ItemFieldEdges: + properties: + item: + $ref: '#/definitions/ent.Item' + description: Item holds the value of the item edge. + type: object + ent.Label: + properties: + color: + description: Color holds the value of the "color" field. + type: string + created_at: + description: CreatedAt holds the value of the "created_at" field. + type: string + description: + description: Description holds the value of the "description" field. + type: string + edges: + $ref: '#/definitions/ent.LabelEdges' + description: |- + Edges holds the relations/edges for other nodes in the graph. + The values are being populated by the LabelQuery when eager-loading is set. + id: + description: ID of the ent. + type: string + name: + description: Name holds the value of the "name" field. + type: string + updated_at: + description: UpdatedAt holds the value of the "updated_at" field. + type: string + type: object + ent.LabelEdges: + properties: + group: + $ref: '#/definitions/ent.Group' + description: Group holds the value of the group edge. + items: + description: Items holds the value of the items edge. + items: + $ref: '#/definitions/ent.Item' + type: array + type: object + ent.Location: + properties: + created_at: + description: CreatedAt holds the value of the "created_at" field. + type: string + description: + description: Description holds the value of the "description" field. + type: string + edges: + $ref: '#/definitions/ent.LocationEdges' + description: |- + Edges holds the relations/edges for other nodes in the graph. + The values are being populated by the LocationQuery when eager-loading is set. + id: + description: ID of the ent. + type: string + name: + description: Name holds the value of the "name" field. + type: string + updated_at: + description: UpdatedAt holds the value of the "updated_at" field. + type: string + type: object + ent.LocationEdges: + properties: + group: + $ref: '#/definitions/ent.Group' + description: Group holds the value of the group edge. + items: + description: Items holds the value of the items edge. + items: + $ref: '#/definitions/ent.Item' + type: array + type: object + ent.User: + properties: + created_at: + description: CreatedAt holds the value of the "created_at" field. + type: string + edges: + $ref: '#/definitions/ent.UserEdges' + description: |- + Edges holds the relations/edges for other nodes in the graph. + The values are being populated by the UserQuery when eager-loading is set. + email: + description: Email holds the value of the "email" field. + type: string + id: + description: ID of the ent. + type: string + is_superuser: + description: IsSuperuser holds the value of the "is_superuser" field. + type: boolean + name: + description: Name holds the value of the "name" field. + type: string + updated_at: + description: UpdatedAt holds the value of the "updated_at" field. + type: string + type: object + ent.UserEdges: + properties: + auth_tokens: + description: AuthTokens holds the value of the auth_tokens edge. + items: + $ref: '#/definitions/ent.AuthTokens' + type: array + group: + $ref: '#/definitions/ent.Group' + description: Group holds the value of the group edge. + type: object server.Result: properties: details: {} @@ -33,6 +344,8 @@ definitions: properties: email: type: string + groupID: + type: string isSuperuser: type: boolean name: @@ -40,16 +353,21 @@ definitions: password: type: string type: object - types.UserOut: + types.UserIn: properties: email: type: string - id: - type: string - isSuperuser: - type: boolean name: type: string + password: + type: string + type: object + types.UserRegistration: + properties: + groupName: + type: string + user: + $ref: '#/definitions/types.UserIn' type: object types.UserUpdate: properties: @@ -99,7 +417,7 @@ paths: - properties: item: items: - $ref: '#/definitions/types.UserOut' + $ref: '#/definitions/ent.User' type: array type: object security: @@ -125,7 +443,7 @@ paths: - $ref: '#/definitions/server.Result' - properties: item: - $ref: '#/definitions/types.UserOut' + $ref: '#/definitions/ent.User' type: object security: - Bearer: [] @@ -167,7 +485,7 @@ paths: - $ref: '#/definitions/server.Result' - properties: item: - $ref: '#/definitions/types.UserOut' + $ref: '#/definitions/ent.User' type: object security: - Bearer: [] @@ -197,7 +515,7 @@ paths: - $ref: '#/definitions/server.Result' - properties: item: - $ref: '#/definitions/types.UserOut' + $ref: '#/definitions/ent.User' type: object security: - Bearer: [] @@ -253,6 +571,30 @@ paths: summary: User Token Refresh tags: - Authentication + /v1/users/register: + post: + parameters: + - description: User Data + in: body + name: payload + required: true + schema: + $ref: '#/definitions/types.UserRegistration' + produces: + - application/json + responses: + "200": + description: OK + schema: + allOf: + - $ref: '#/definitions/server.Result' + - properties: + item: + $ref: '#/definitions/ent.User' + type: object + summary: Get the current user + tags: + - User /v1/users/self: get: produces: @@ -265,7 +607,7 @@ paths: - $ref: '#/definitions/server.Result' - properties: item: - $ref: '#/definitions/types.UserOut' + $ref: '#/definitions/ent.User' type: object security: - Bearer: [] diff --git a/backend/app/api/main.go b/backend/app/api/main.go index cfd59fb..29f0c22 100644 --- a/backend/app/api/main.go +++ b/backend/app/api/main.go @@ -97,7 +97,6 @@ func run(cfg *config.Config) error { routes := app.newRouter(app.repos) app.LogRoutes(routes) - app.EnsureAdministrator() app.SeedDatabase(app.repos) app.logger.Info("Starting HTTP Server", logger.Props{ diff --git a/backend/app/api/middleware.go b/backend/app/api/middleware.go index 3e2da68..d10ab21 100644 --- a/backend/app/api/middleware.go +++ b/backend/app/api/middleware.go @@ -64,7 +64,7 @@ func (a *app) mwAuthToken(next http.Handler) http.Handler { return } - r = r.WithContext(services.SetUserCtx(r.Context(), &usr, requestToken)) + r = r.WithContext(services.SetUserCtx(r.Context(), usr, requestToken)) next.ServeHTTP(w, r) }) diff --git a/backend/app/api/routes.go b/backend/app/api/routes.go index b784c26..0fed8b2 100644 --- a/backend/app/api/routes.go +++ b/backend/app/api/routes.go @@ -36,26 +36,30 @@ func (a *app) newRouter(repos *repo.AllRepos) *chi.Mux { // ========================================================================= // API Version 1 - v1Base := v1.BaseUrlFunc(prefix) - v1Handlers := v1.NewControllerV1(a.logger, a.services) - r.Post(v1Base("/users/login"), v1Handlers.HandleAuthLogin()) - r.Group(func(r chi.Router) { - r.Use(a.mwAuthToken) - r.Get(v1Base("/users/self"), v1Handlers.HandleUserSelf()) - r.Put(v1Base("/users/self"), v1Handlers.HandleUserUpdate()) - r.Put(v1Base("/users/self/password"), v1Handlers.HandleUserUpdatePassword()) - r.Post(v1Base("/users/logout"), v1Handlers.HandleAuthLogout()) - r.Get(v1Base("/users/refresh"), v1Handlers.HandleAuthRefresh()) - }) - r.Group(func(r chi.Router) { - r.Use(a.mwAdminOnly) - r.Get(v1Base("/admin/users"), v1Handlers.HandleAdminUserGetAll()) - r.Post(v1Base("/admin/users"), v1Handlers.HandleAdminUserCreate()) - r.Get(v1Base("/admin/users/{id}"), v1Handlers.HandleAdminUserGet()) - r.Put(v1Base("/admin/users/{id}"), v1Handlers.HandleAdminUserUpdate()) - r.Delete(v1Base("/admin/users/{id}"), v1Handlers.HandleAdminUserDelete()) - }) + v1Base := v1.BaseUrlFunc(prefix) + { + v1Handlers := v1.NewControllerV1(a.logger, a.services) + r.Post(v1Base("/users/register"), v1Handlers.HandleUserRegistration()) + r.Post(v1Base("/users/login"), v1Handlers.HandleAuthLogin()) + r.Group(func(r chi.Router) { + r.Use(a.mwAuthToken) + r.Get(v1Base("/users/self"), v1Handlers.HandleUserSelf()) + r.Put(v1Base("/users/self"), v1Handlers.HandleUserUpdate()) + r.Put(v1Base("/users/self/password"), v1Handlers.HandleUserUpdatePassword()) + r.Post(v1Base("/users/logout"), v1Handlers.HandleAuthLogout()) + r.Get(v1Base("/users/refresh"), v1Handlers.HandleAuthRefresh()) + }) + + r.Group(func(r chi.Router) { + r.Use(a.mwAdminOnly) + r.Get(v1Base("/admin/users"), v1Handlers.HandleAdminUserGetAll()) + r.Post(v1Base("/admin/users"), v1Handlers.HandleAdminUserCreate()) + r.Get(v1Base("/admin/users/{id}"), v1Handlers.HandleAdminUserGet()) + r.Put(v1Base("/admin/users/{id}"), v1Handlers.HandleAdminUserUpdate()) + r.Delete(v1Base("/admin/users/{id}"), v1Handlers.HandleAdminUserDelete()) + }) + } return r } diff --git a/backend/app/api/seed.go b/backend/app/api/seed.go index 7d66774..42c3600 100644 --- a/backend/app/api/seed.go +++ b/backend/app/api/seed.go @@ -11,6 +11,7 @@ import ( ) const ( + DefaultGroup = "Default" DefaultName = "Admin" DefaultEmail = "admin@admin.com" DefaultPassword = "admin" @@ -22,15 +23,13 @@ func (a *app) EnsureAdministrator() { superusers, err := a.repos.Users.GetSuperusers(context.Background()) if err != nil { - a.logger.Error(err, nil) + a.logger.Fatal(err, nil) } - if len(superusers) > 0 { return } pw, _ := hasher.HashPassword(DefaultPassword) - newSuperUser := types.UserCreate{ Name: DefaultName, Email: DefaultEmail, @@ -56,6 +55,11 @@ func (a *app) SeedDatabase(repos *repo.AllRepos) { return } + group, err := repos.Groups.Create(context.Background(), DefaultGroup) + if err != nil { + a.logger.Fatal(err, nil) + } + for _, user := range a.conf.Seed.Users { // Check if User Exists @@ -82,6 +86,7 @@ func (a *app) SeedDatabase(repos *repo.AllRepos) { Email: user.Email, IsSuperuser: user.IsSuperuser, Password: hashedPw, + GroupID: group.ID, }) if err != nil { diff --git a/backend/app/api/v1/main_test.go b/backend/app/api/v1/main_test.go index 3713dee..43c7132 100644 --- a/backend/app/api/v1/main_test.go +++ b/backend/app/api/v1/main_test.go @@ -4,13 +4,14 @@ import ( "context" "testing" + "github.com/hay-kot/content/backend/ent" "github.com/hay-kot/content/backend/internal/mocks" "github.com/hay-kot/content/backend/internal/mocks/factories" "github.com/hay-kot/content/backend/internal/types" ) var mockHandler = &V1Controller{} -var users = []types.UserOut{} +var users = []*ent.User{} func userPool() func() { create := []types.UserCreate{ @@ -20,7 +21,7 @@ func userPool() func() { factories.UserFactory(), } - userOut := []types.UserOut{} + userOut := []*ent.User{} for _, user := range create { usrOut, _ := mockHandler.svc.Admin.Create(context.Background(), user) diff --git a/backend/app/api/v1/v1_ctrl_admin.go b/backend/app/api/v1/v1_ctrl_admin.go index 4961c6b..a67a6f2 100644 --- a/backend/app/api/v1/v1_ctrl_admin.go +++ b/backend/app/api/v1/v1_ctrl_admin.go @@ -17,7 +17,7 @@ import ( // @Summary Gets all users from the database // @Tags Admin: Users // @Produce json -// @Success 200 {object} server.Result{item=[]types.UserOut} +// @Success 200 {object} server.Result{item=[]ent.User} // @Router /v1/admin/users [get] // @Security Bearer func (ctrl *V1Controller) HandleAdminUserGetAll() http.HandlerFunc { @@ -38,7 +38,7 @@ func (ctrl *V1Controller) HandleAdminUserGetAll() http.HandlerFunc { // @Tags Admin: Users // @Produce json // @Param id path string true "User ID" -// @Success 200 {object} server.Result{item=types.UserOut} +// @Success 200 {object} server.Result{item=ent.User} // @Router /v1/admin/users/{id} [get] // @Security Bearer func (ctrl *V1Controller) HandleAdminUserGet() http.HandlerFunc { @@ -71,7 +71,7 @@ func (ctrl *V1Controller) HandleAdminUserGet() http.HandlerFunc { // @Tags Admin: Users // @Produce json // @Param payload body types.UserCreate true "User Data" -// @Success 200 {object} server.Result{item=types.UserOut} +// @Success 200 {object} server.Result{item=ent.User} // @Router /v1/admin/users [POST] // @Security Bearer func (ctrl *V1Controller) HandleAdminUserCreate() http.HandlerFunc { @@ -129,7 +129,7 @@ func (ctrl *V1Controller) HandleAdminUserCreate() http.HandlerFunc { // @Param id path string true "User ID" // @Param payload body types.UserUpdate true "User Data" // @Produce json -// @Success 200 {object} server.Result{item=types.UserOut} +// @Success 200 {object} server.Result{item=ent.User} // @Router /v1/admin/users/{id} [PUT] // @Security Bearer func (ctrl *V1Controller) HandleAdminUserUpdate() http.HandlerFunc { diff --git a/backend/app/api/v1/v1_ctrl_admin_test.go b/backend/app/api/v1/v1_ctrl_admin_test.go index 6c0d8e9..b85cfec 100644 --- a/backend/app/api/v1/v1_ctrl_admin_test.go +++ b/backend/app/api/v1/v1_ctrl_admin_test.go @@ -9,9 +9,9 @@ import ( "net/http/httptest" "testing" + "github.com/hay-kot/content/backend/ent" "github.com/hay-kot/content/backend/internal/mocks/chimocker" "github.com/hay-kot/content/backend/internal/mocks/factories" - "github.com/hay-kot/content/backend/internal/types" "github.com/hay-kot/content/backend/pkgs/server" "github.com/stretchr/testify/assert" ) @@ -23,11 +23,11 @@ const ( ) type usersResponse struct { - Users []types.UserOut `json:"item"` + Users []*ent.User `json:"item"` } type userResponse struct { - User types.UserOut `json:"item"` + User *ent.User `json:"item"` } func Test_HandleAdminUserGetAll_Success(t *testing.T) { @@ -37,7 +37,7 @@ func Test_HandleAdminUserGetAll_Success(t *testing.T) { mockHandler.HandleAdminUserGetAll()(r, req) response := usersResponse{ - Users: []types.UserOut{}, + Users: []*ent.User{}, } _ = json.Unmarshal(r.Body.Bytes(), &response) @@ -68,7 +68,7 @@ func Test_HandleAdminUserGet_Success(t *testing.T) { assert.Equal(t, http.StatusOK, res.Code) response := userResponse{ - User: types.UserOut{}, + User: &ent.User{}, } _ = json.Unmarshal(res.Body.Bytes(), &response) diff --git a/backend/app/api/v1/v1_ctrl_user.go b/backend/app/api/v1/v1_ctrl_user.go index 68c6be2..a3371c2 100644 --- a/backend/app/api/v1/v1_ctrl_user.go +++ b/backend/app/api/v1/v1_ctrl_user.go @@ -4,24 +4,53 @@ import ( "errors" "net/http" + "github.com/google/uuid" "github.com/hay-kot/content/backend/internal/services" "github.com/hay-kot/content/backend/internal/types" "github.com/hay-kot/content/backend/pkgs/logger" "github.com/hay-kot/content/backend/pkgs/server" ) +// HandleUserSelf godoc +// @Summary Get the current user +// @Tags User +// @Produce json +// @Param payload body types.UserRegistration true "User Data" +// @Success 200 {object} server.Result{item=ent.User} +// @Router /v1/users/register [Post] +func (ctrl *V1Controller) HandleUserRegistration() http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + regData := types.UserRegistration{} + + if err := server.Decode(r, ®Data); err != nil { + ctrl.log.Error(err, nil) + server.RespondError(w, http.StatusInternalServerError, err) + return + } + + usr, err := ctrl.svc.User.RegisterUser(r.Context(), regData) + if err != nil { + ctrl.log.Error(err, nil) + server.RespondError(w, http.StatusInternalServerError, err) + return + } + + _ = server.Respond(w, http.StatusOK, server.Wrap(usr)) + } +} + // HandleUserSelf godoc // @Summary Get the current user // @Tags User // @Produce json -// @Success 200 {object} server.Result{item=types.UserOut} +// @Success 200 {object} server.Result{item=ent.User} // @Router /v1/users/self [GET] // @Security Bearer func (ctrl *V1Controller) HandleUserSelf() http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { token := services.UseTokenCtx(r.Context()) usr, err := ctrl.svc.User.GetSelf(r.Context(), token) - if usr.IsNull() || err != nil { + if usr.ID == uuid.Nil || err != nil { ctrl.log.Error(errors.New("no user within request context"), nil) server.RespondInternalServerError(w) return diff --git a/backend/app/generator/main.go b/backend/app/generator/main.go index 3cc45a6..8caf35c 100644 --- a/backend/app/generator/main.go +++ b/backend/app/generator/main.go @@ -4,7 +4,6 @@ import ( "time" "github.com/google/uuid" - "github.com/hay-kot/content/backend/ent" "github.com/hay-kot/content/backend/internal/types" "github.com/hay-kot/content/backend/pkgs/automapper" "github.com/tkrajina/typescriptify-golang-structs/typescriptify" @@ -12,22 +11,7 @@ import ( // generateMappers serialized the config file into a list of automapper struct func generateMappers() []automapper.AutoMapper { - return []automapper.AutoMapper{ - { - Package: "mapper", - Prefix: "users", - Name: "User Out", - Schema: automapper.Schema{ - Type: types.UserOut{}, - Prefix: "types", - }, - Model: automapper.Model{ - Type: ent.User{}, - Prefix: "ent", - }, - Imports: []string{}, - }, - } + return []automapper.AutoMapper{} } func generateTypeScript() { @@ -43,7 +27,6 @@ func generateTypeScript() { types.ApiSummary{}, // User Types - types.UserOut{}, types.UserCreate{}, types.UserIn{}, types.UserUpdate{}, diff --git a/backend/config.template.yml b/backend/config.template.yml index 0dc2626..d80911b 100644 --- a/backend/config.template.yml +++ b/backend/config.template.yml @@ -20,6 +20,7 @@ mailer: from: example@email.com seed: enabled: true + group: Default users: - name: Admin email: admin@admin.com diff --git a/backend/go.sum b/backend/go.sum index cc23f5f..2ad1220 100644 --- a/backend/go.sum +++ b/backend/go.sum @@ -53,14 +53,18 @@ github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN github.com/mailru/easyjson v0.7.6/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= +github.com/mattn/go-runewidth v0.0.9 h1:Lm995f3rfxdpd6TSmuVCHVb/QhupuXlYr8sCI/QdE+0= github.com/mattn/go-sqlite3 v1.14.15 h1:vfoHhTN1af61xCRSWzFIWzx2YskyMTwHLrExkBOjvxI= github.com/mattn/go-sqlite3 v1.14.15/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg= github.com/mitchellh/go-wordwrap v1.0.1 h1:TLuKupo69TCn6TQSyGxwI1EblZZEsQ0vMlAFQflz0v0= github.com/mitchellh/go-wordwrap v1.0.1/go.mod h1:R62XHJLzvMFRBbcrT7m7WgmE1eOyTSsCt+hzestvNj0= github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= +github.com/olekukonko/tablewriter v0.0.5 h1:P2Ga83D34wi1o9J6Wh1mRuqd4mF/x/lgBS7N7AbDhec= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ= +github.com/spf13/cobra v1.5.0 h1:X+jTBEBqF0bHN+9cSMgmfuvv2VHJ9ezmFNf9Y/XstYU= +github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= diff --git a/backend/internal/config/conf_seed.go b/backend/internal/config/conf_seed.go index e076593..67a409a 100644 --- a/backend/internal/config/conf_seed.go +++ b/backend/internal/config/conf_seed.go @@ -10,4 +10,5 @@ type SeedUser struct { type Seed struct { Enabled bool `yaml:"enabled" conf:"default:false"` Users []SeedUser `yaml:"users"` + Group string `yaml:"group"` } diff --git a/backend/internal/mapper/users_automapper.go b/backend/internal/mapper/users_automapper.go deleted file mode 100644 index 78392f1..0000000 --- a/backend/internal/mapper/users_automapper.go +++ /dev/null @@ -1,27 +0,0 @@ -// Code generated by "/pkgs/automapper"; DO NOT EDIT. -package mapper - -import ( - "github.com/hay-kot/content/backend/ent" - "github.com/hay-kot/content/backend/internal/types" -) - -func UserOutFromModel(from ent.User) types.UserOut { - return types.UserOut{ - ID: from.ID, - Name: from.Name, - Email: from.Email, - Password: from.Password, - IsSuperuser: from.IsSuperuser, - } -} - -func UserOutToModel(from types.UserOut) ent.User { - return ent.User{ - ID: from.ID, - Name: from.Name, - Email: from.Email, - Password: from.Password, - IsSuperuser: from.IsSuperuser, - } -} diff --git a/backend/internal/repo/repo_group.go b/backend/internal/repo/repo_group.go new file mode 100644 index 0000000..85915f5 --- /dev/null +++ b/backend/internal/repo/repo_group.go @@ -0,0 +1,29 @@ +package repo + +import ( + "context" + + "github.com/google/uuid" + "github.com/hay-kot/content/backend/ent" +) + +type EntGroupRepository struct { + db *ent.Client +} + +func (r *EntGroupRepository) Create(ctx context.Context, name string) (*ent.Group, error) { + dbGroup, err := r.db.Group.Create().SetName(name).Save(ctx) + + if err != nil { + return dbGroup, err + } + return dbGroup, nil +} + +func (r *EntGroupRepository) GetOneId(ctx context.Context, id uuid.UUID) (*ent.Group, error) { + dbGroup, err := r.db.Group.Get(ctx, id) + if err != nil { + return dbGroup, err + } + return dbGroup, nil +} diff --git a/backend/internal/repo/token_ent.go b/backend/internal/repo/repo_tokens.go similarity index 90% rename from backend/internal/repo/token_ent.go rename to backend/internal/repo/repo_tokens.go index 7f3807d..a1e292c 100644 --- a/backend/internal/repo/token_ent.go +++ b/backend/internal/repo/repo_tokens.go @@ -6,7 +6,6 @@ import ( "github.com/hay-kot/content/backend/ent" "github.com/hay-kot/content/backend/ent/authtokens" - "github.com/hay-kot/content/backend/internal/mapper" "github.com/hay-kot/content/backend/internal/types" ) @@ -15,7 +14,7 @@ type EntTokenRepository struct { } // GetUserFromToken get's a user from a token -func (r *EntTokenRepository) GetUserFromToken(ctx context.Context, token []byte) (types.UserOut, error) { +func (r *EntTokenRepository) GetUserFromToken(ctx context.Context, token []byte) (*ent.User, error) { dbToken, err := r.db.AuthTokens.Query(). Where(authtokens.Token(token)). Where(authtokens.ExpiresAtGTE(time.Now())). @@ -23,10 +22,10 @@ func (r *EntTokenRepository) GetUserFromToken(ctx context.Context, token []byte) Only(ctx) if err != nil { - return types.UserOut{}, err + return nil, err } - return mapper.UserOutFromModel(*dbToken.Edges.User), nil + return dbToken.Edges.User, nil } // Creates a token for a user diff --git a/backend/internal/repo/token_ent_test.go b/backend/internal/repo/repo_tokens_test.go similarity index 100% rename from backend/internal/repo/token_ent_test.go rename to backend/internal/repo/repo_tokens_test.go diff --git a/backend/internal/repo/users_ent.go b/backend/internal/repo/repo_users.go similarity index 63% rename from backend/internal/repo/users_ent.go rename to backend/internal/repo/repo_users.go index 632c0fb..8a68231 100644 --- a/backend/internal/repo/users_ent.go +++ b/backend/internal/repo/repo_users.go @@ -13,66 +13,41 @@ type EntUserRepository struct { db *ent.Client } -func (e *EntUserRepository) toUserOut(usr *types.UserOut, entUsr *ent.User) { - usr.ID = entUsr.ID - usr.Password = entUsr.Password - usr.Name = entUsr.Name - usr.Email = entUsr.Email - usr.IsSuperuser = entUsr.IsSuperuser -} - -func (e *EntUserRepository) GetOneId(ctx context.Context, id uuid.UUID) (types.UserOut, error) { +func (e *EntUserRepository) GetOneId(ctx context.Context, id uuid.UUID) (*ent.User, error) { usr, err := e.db.User.Query().Where(user.ID(id)).Only(ctx) - usrOut := types.UserOut{} - if err != nil { - return usrOut, err + return usr, err } - e.toUserOut(&usrOut, usr) - - return usrOut, nil + return usr, nil } -func (e *EntUserRepository) GetOneEmail(ctx context.Context, email string) (types.UserOut, error) { +func (e *EntUserRepository) GetOneEmail(ctx context.Context, email string) (*ent.User, error) { usr, err := e.db.User.Query().Where(user.Email(email)).Only(ctx) - usrOut := types.UserOut{} - if err != nil { - return usrOut, err + return usr, err } - e.toUserOut(&usrOut, usr) - - return usrOut, nil + return usr, nil } -func (e *EntUserRepository) GetAll(ctx context.Context) ([]types.UserOut, error) { +func (e *EntUserRepository) GetAll(ctx context.Context) ([]*ent.User, error) { users, err := e.db.User.Query().All(ctx) if err != nil { return nil, err } - var usrs []types.UserOut - - for _, usr := range users { - usrOut := types.UserOut{} - e.toUserOut(&usrOut, usr) - usrs = append(usrs, usrOut) - } - - return usrs, nil + return users, nil } -func (e *EntUserRepository) Create(ctx context.Context, usr types.UserCreate) (types.UserOut, error) { +func (e *EntUserRepository) Create(ctx context.Context, usr types.UserCreate) (*ent.User, error) { err := usr.Validate() - usrOut := types.UserOut{} if err != nil { - return usrOut, err + return &ent.User{}, err } entUser, err := e.db.User. @@ -81,11 +56,10 @@ func (e *EntUserRepository) Create(ctx context.Context, usr types.UserCreate) (t SetEmail(usr.Email). SetPassword(usr.Password). SetIsSuperuser(usr.IsSuperuser). + SetGroupID(usr.GroupID). Save(ctx) - e.toUserOut(&usrOut, entUser) - - return usrOut, err + return entUser, err } func (e *EntUserRepository) Update(ctx context.Context, ID uuid.UUID, data types.UserUpdate) error { @@ -122,20 +96,12 @@ func (e *EntUserRepository) DeleteAll(ctx context.Context) error { return err } -func (e *EntUserRepository) GetSuperusers(ctx context.Context) ([]types.UserOut, error) { +func (e *EntUserRepository) GetSuperusers(ctx context.Context) ([]*ent.User, error) { users, err := e.db.User.Query().Where(user.IsSuperuser(true)).All(ctx) if err != nil { return nil, err } - var usrs []types.UserOut - - for _, usr := range users { - usrOut := types.UserOut{} - e.toUserOut(&usrOut, usr) - usrs = append(usrs, usrOut) - } - - return usrs, nil + return users, nil } diff --git a/backend/internal/repo/users_ent_test.go b/backend/internal/repo/repo_users_test.go similarity index 97% rename from backend/internal/repo/users_ent_test.go rename to backend/internal/repo/repo_users_test.go index 2bf9687..6740733 100644 --- a/backend/internal/repo/users_ent_test.go +++ b/backend/internal/repo/repo_users_test.go @@ -5,6 +5,7 @@ import ( "fmt" "testing" + "github.com/hay-kot/content/backend/ent" "github.com/hay-kot/content/backend/internal/types" "github.com/hay-kot/content/backend/pkgs/faker" "github.com/stretchr/testify/assert" @@ -66,7 +67,7 @@ func Test_EntUserRepo_GetAll(t *testing.T) { ctx := context.Background() - created := []types.UserOut{} + created := []*ent.User{} for _, usr := range toCreate { usrOut, _ := testRepos.Users.Create(ctx, usr) diff --git a/backend/internal/repo/repos_all.go b/backend/internal/repo/repos_all.go index 7703aa3..4a7b324 100644 --- a/backend/internal/repo/repos_all.go +++ b/backend/internal/repo/repos_all.go @@ -4,13 +4,15 @@ import "github.com/hay-kot/content/backend/ent" // AllRepos is a container for all the repository interfaces type AllRepos struct { - Users UserRepository - AuthTokens TokenRepository + Users *EntUserRepository + AuthTokens *EntTokenRepository + Groups *EntGroupRepository } func EntAllRepos(db *ent.Client) *AllRepos { return &AllRepos{ Users: &EntUserRepository{db}, AuthTokens: &EntTokenRepository{db}, + Groups: &EntGroupRepository{db}, } } diff --git a/backend/internal/repo/token_interface.go b/backend/internal/repo/token_interface.go deleted file mode 100644 index f610d9d..0000000 --- a/backend/internal/repo/token_interface.go +++ /dev/null @@ -1,20 +0,0 @@ -package repo - -import ( - "context" - - "github.com/hay-kot/content/backend/internal/types" -) - -type TokenRepository interface { - // GetUserFromToken get's a user from a token - GetUserFromToken(ctx context.Context, token []byte) (types.UserOut, error) - // Creates a token for a user - CreateToken(ctx context.Context, createToken types.UserAuthTokenCreate) (types.UserAuthToken, error) - // DeleteToken remove a single token from the database - equivalent to revoke or logout - DeleteToken(ctx context.Context, token []byte) error - // PurgeExpiredTokens removes all expired tokens from the database - PurgeExpiredTokens(ctx context.Context) (int, error) - // DeleteAll removes all tokens from the database - DeleteAll(ctx context.Context) (int, error) -} diff --git a/backend/internal/repo/users_interface.go b/backend/internal/repo/users_interface.go deleted file mode 100644 index 2e594ba..0000000 --- a/backend/internal/repo/users_interface.go +++ /dev/null @@ -1,27 +0,0 @@ -package repo - -import ( - "context" - - "github.com/google/uuid" - "github.com/hay-kot/content/backend/internal/types" -) - -type UserRepository interface { - // GetOneId returns a user by id - GetOneId(ctx context.Context, ID uuid.UUID) (types.UserOut, error) - // GetOneEmail returns a user by email - GetOneEmail(ctx context.Context, email string) (types.UserOut, error) - // GetAll returns all users - GetAll(ctx context.Context) ([]types.UserOut, error) - // Get Super Users - GetSuperusers(ctx context.Context) ([]types.UserOut, error) - // Create creates a new user - Create(ctx context.Context, user types.UserCreate) (types.UserOut, error) - // Update updates a user - Update(ctx context.Context, ID uuid.UUID, user types.UserUpdate) error - // Delete deletes a user - Delete(ctx context.Context, ID uuid.UUID) error - - DeleteAll(ctx context.Context) error -} diff --git a/backend/internal/services/contexts.go b/backend/internal/services/contexts.go index bc18fcc..6411f5c 100644 --- a/backend/internal/services/contexts.go +++ b/backend/internal/services/contexts.go @@ -3,7 +3,7 @@ package services import ( "context" - "github.com/hay-kot/content/backend/internal/types" + "github.com/hay-kot/content/backend/ent" ) type contextKeys struct { @@ -17,16 +17,16 @@ var ( // SetUserCtx is a helper function that sets the ContextUser and ContextUserToken // values within the context of a web request (or any context). -func SetUserCtx(ctx context.Context, user *types.UserOut, token string) context.Context { +func SetUserCtx(ctx context.Context, user *ent.User, token string) context.Context { ctx = context.WithValue(ctx, ContextUser, user) ctx = context.WithValue(ctx, ContextUserToken, token) return ctx } // UseUserCtx is a helper function that returns the user from the context. -func UseUserCtx(ctx context.Context) *types.UserOut { +func UseUserCtx(ctx context.Context) *ent.User { if val := ctx.Value(ContextUser); val != nil { - return val.(*types.UserOut) + return val.(*ent.User) } return nil } diff --git a/backend/internal/services/contexts_test.go b/backend/internal/services/contexts_test.go index cf5a862..251baad 100644 --- a/backend/internal/services/contexts_test.go +++ b/backend/internal/services/contexts_test.go @@ -5,12 +5,12 @@ import ( "testing" "github.com/google/uuid" - "github.com/hay-kot/content/backend/internal/types" + "github.com/hay-kot/content/backend/ent" "github.com/stretchr/testify/assert" ) func Test_SetAuthContext(t *testing.T) { - user := &types.UserOut{ + user := &ent.User{ ID: uuid.New(), } diff --git a/backend/internal/services/service_admin.go b/backend/internal/services/service_admin.go index 6fda029..8be4f68 100644 --- a/backend/internal/services/service_admin.go +++ b/backend/internal/services/service_admin.go @@ -4,6 +4,7 @@ import ( "context" "github.com/google/uuid" + "github.com/hay-kot/content/backend/ent" "github.com/hay-kot/content/backend/internal/repo" "github.com/hay-kot/content/backend/internal/types" ) @@ -12,27 +13,27 @@ type AdminService struct { repos *repo.AllRepos } -func (svc *AdminService) Create(ctx context.Context, usr types.UserCreate) (types.UserOut, error) { +func (svc *AdminService) Create(ctx context.Context, usr types.UserCreate) (*ent.User, error) { return svc.repos.Users.Create(ctx, usr) } -func (svc *AdminService) GetAll(ctx context.Context) ([]types.UserOut, error) { +func (svc *AdminService) GetAll(ctx context.Context) ([]*ent.User, error) { return svc.repos.Users.GetAll(ctx) } -func (svc *AdminService) GetByID(ctx context.Context, id uuid.UUID) (types.UserOut, error) { +func (svc *AdminService) GetByID(ctx context.Context, id uuid.UUID) (*ent.User, error) { return svc.repos.Users.GetOneId(ctx, id) } -func (svc *AdminService) GetByEmail(ctx context.Context, email string) (types.UserOut, error) { +func (svc *AdminService) GetByEmail(ctx context.Context, email string) (*ent.User, error) { return svc.repos.Users.GetOneEmail(ctx, email) } -func (svc *AdminService) UpdateProperties(ctx context.Context, ID uuid.UUID, data types.UserUpdate) (types.UserOut, error) { +func (svc *AdminService) UpdateProperties(ctx context.Context, ID uuid.UUID, data types.UserUpdate) (*ent.User, error) { err := svc.repos.Users.Update(ctx, ID, data) if err != nil { - return types.UserOut{}, err + return &ent.User{}, err } return svc.repos.Users.GetOneId(ctx, ID) diff --git a/backend/internal/services/service_user.go b/backend/internal/services/service_user.go index 0bb8d5f..de1cb27 100644 --- a/backend/internal/services/service_user.go +++ b/backend/internal/services/service_user.go @@ -6,6 +6,7 @@ import ( "time" "github.com/google/uuid" + "github.com/hay-kot/content/backend/ent" "github.com/hay-kot/content/backend/internal/repo" "github.com/hay-kot/content/backend/internal/types" "github.com/hay-kot/content/backend/pkgs/hasher" @@ -22,17 +23,41 @@ type UserService struct { repos *repo.AllRepos } +func (svc *UserService) RegisterUser(ctx context.Context, data types.UserRegistration) (*ent.User, error) { + group, err := svc.repos.Groups.Create(ctx, data.GroupName) + if err != nil { + return &ent.User{}, err + } + + hashed, _ := hasher.HashPassword(data.User.Password) + + usrCreate := types.UserCreate{ + Name: data.User.Name, + Email: data.User.Email, + Password: hashed, + IsSuperuser: false, + GroupID: group.ID, + } + + usr, err := svc.repos.Users.Create(ctx, usrCreate) + if err != nil { + return &ent.User{}, err + } + + return usr, nil +} + // GetSelf returns the user that is currently logged in based of the token provided within -func (svc *UserService) GetSelf(ctx context.Context, requestToken string) (types.UserOut, error) { +func (svc *UserService) GetSelf(ctx context.Context, requestToken string) (*ent.User, error) { hash := hasher.HashToken(requestToken) return svc.repos.AuthTokens.GetUserFromToken(ctx, hash) } -func (svc *UserService) UpdateSelf(ctx context.Context, ID uuid.UUID, data types.UserUpdate) (types.UserOut, error) { +func (svc *UserService) UpdateSelf(ctx context.Context, ID uuid.UUID, data types.UserUpdate) (*ent.User, error) { err := svc.repos.Users.Update(ctx, ID, data) if err != nil { - return types.UserOut{}, err + return &ent.User{}, err } return svc.repos.Users.GetOneId(ctx, ID) diff --git a/backend/internal/types/users_types.go b/backend/internal/types/users_types.go index db1c404..81cc932 100644 --- a/backend/internal/types/users_types.go +++ b/backend/internal/types/users_types.go @@ -23,10 +23,11 @@ type UserIn struct { // in the database. It should to create users from an API unless the user has // rights to create SuperUsers. For regular user in data use the UserIn struct. type UserCreate struct { - Name string `json:"name"` - Email string `json:"email"` - Password string `json:"password"` - IsSuperuser bool `json:"isSuperuser"` + Name string `json:"name"` + Email string `json:"email"` + Password string `json:"password"` + IsSuperuser bool `json:"isSuperuser"` + GroupID uuid.UUID `json:"groupID"` } func (u *UserCreate) Validate() error { @@ -39,20 +40,12 @@ func (u *UserCreate) Validate() error { return nil } -type UserOut struct { - ID uuid.UUID `json:"id"` - Name string `json:"name"` - Email string `json:"email"` - Password string `json:"-"` - IsSuperuser bool `json:"isSuperuser"` -} - -// IsNull is a proxy call for `usr.Id == uuid.Nil` -func (usr *UserOut) IsNull() bool { - return usr.ID == uuid.Nil -} - type UserUpdate struct { Name *string `json:"name"` Email *string `json:"email"` } + +type UserRegistration struct { + User UserIn `json:"user"` + GroupName string `json:"groupName"` +} diff --git a/backend/internal/types/users_types_test.go b/backend/internal/types/users_types_test.go index bc3b825..9f4a942 100644 --- a/backend/internal/types/users_types_test.go +++ b/backend/internal/types/users_types_test.go @@ -2,9 +2,6 @@ package types import ( "testing" - - "github.com/google/uuid" - "github.com/stretchr/testify/assert" ) func TestUserCreate_Validate(t *testing.T) { @@ -64,13 +61,3 @@ func TestUserCreate_Validate(t *testing.T) { }) } } - -func TestUserOut_IsNull(t *testing.T) { - nullUser := UserOut{} - - assert.True(t, nullUser.IsNull()) - - nullUser.ID = uuid.New() - - assert.False(t, nullUser.IsNull()) -} From 3829e2e3f5096845e01547848e1a4a1def334000 Mon Sep 17 00:00:00 2001 From: Hayden <64056131+hay-kot@users.noreply.github.com> Date: Tue, 30 Aug 2022 10:05:21 -0800 Subject: [PATCH 006/530] init frontend --- frontend/.editorconfig | 12 + frontend/.gitignore | 10 + frontend/.prettierrc | 9 + frontend/LICENSE | 21 + frontend/README.md | 137 + frontend/auto-imports.d.ts | 252 + frontend/components.d.ts | 18 + frontend/index.html | 34 + frontend/locales/en.json | 22 + frontend/locales/id.json | 22 + frontend/package.json | 69 + frontend/pnpm-lock.yaml | 5409 ++++++++++++++++++++ frontend/postcss.config.js | 6 + frontend/public/apple-touch-icon.png | Bin 0 -> 7911 bytes frontend/public/favicon-16x16.png | Bin 0 -> 647 bytes frontend/public/favicon-32x32.png | Bin 0 -> 1417 bytes frontend/public/favicon.ico | Bin 0 -> 15406 bytes frontend/public/pwa-192x192.png | Bin 0 -> 8867 bytes frontend/public/pwa-512x512.png | Bin 0 -> 22375 bytes frontend/public/robots.txt | 2 + frontend/public/site.webmanifest | 1 + frontend/src/App.vue | 3 + frontend/src/__test__/basic.spec.ts | 7 + frontend/src/assets/logo.png | Bin 0 -> 36025 bytes frontend/src/components/AppHeader.vue | 80 + frontend/src/components/Form/TextField.vue | 31 + frontend/src/env.d.ts | 8 + frontend/src/layouts/404.vue | 5 + frontend/src/layouts/default.vue | 18 + frontend/src/main.ts | 19 + frontend/src/modules/i18n.ts | 29 + frontend/src/modules/pinia.ts | 14 + frontend/src/modules/pwa.ts | 10 + frontend/src/pages/[...all].vue | 19 + frontend/src/pages/index.vue | 157 + frontend/src/router.ts | 17 + frontend/src/store/index.ts | 7 + frontend/src/styles/index.css | 3 + frontend/src/types/ViteSetupModule.ts | 3 + frontend/tailwind.config.js | 16 + frontend/tsconfig.json | 34 + frontend/typed-router.d.ts | 95 + frontend/vite.config.ts | 131 + 43 files changed, 6730 insertions(+) create mode 100644 frontend/.editorconfig create mode 100644 frontend/.gitignore create mode 100644 frontend/.prettierrc create mode 100644 frontend/LICENSE create mode 100644 frontend/README.md create mode 100644 frontend/auto-imports.d.ts create mode 100644 frontend/components.d.ts create mode 100644 frontend/index.html create mode 100644 frontend/locales/en.json create mode 100644 frontend/locales/id.json create mode 100644 frontend/package.json create mode 100644 frontend/pnpm-lock.yaml create mode 100644 frontend/postcss.config.js create mode 100644 frontend/public/apple-touch-icon.png create mode 100644 frontend/public/favicon-16x16.png create mode 100644 frontend/public/favicon-32x32.png create mode 100644 frontend/public/favicon.ico create mode 100644 frontend/public/pwa-192x192.png create mode 100644 frontend/public/pwa-512x512.png create mode 100644 frontend/public/robots.txt create mode 100644 frontend/public/site.webmanifest create mode 100644 frontend/src/App.vue create mode 100644 frontend/src/__test__/basic.spec.ts create mode 100644 frontend/src/assets/logo.png create mode 100644 frontend/src/components/AppHeader.vue create mode 100644 frontend/src/components/Form/TextField.vue create mode 100644 frontend/src/env.d.ts create mode 100644 frontend/src/layouts/404.vue create mode 100644 frontend/src/layouts/default.vue create mode 100644 frontend/src/main.ts create mode 100644 frontend/src/modules/i18n.ts create mode 100644 frontend/src/modules/pinia.ts create mode 100644 frontend/src/modules/pwa.ts create mode 100644 frontend/src/pages/[...all].vue create mode 100644 frontend/src/pages/index.vue create mode 100644 frontend/src/router.ts create mode 100644 frontend/src/store/index.ts create mode 100644 frontend/src/styles/index.css create mode 100644 frontend/src/types/ViteSetupModule.ts create mode 100644 frontend/tailwind.config.js create mode 100644 frontend/tsconfig.json create mode 100644 frontend/typed-router.d.ts create mode 100644 frontend/vite.config.ts diff --git a/frontend/.editorconfig b/frontend/.editorconfig new file mode 100644 index 0000000..9554c73 --- /dev/null +++ b/frontend/.editorconfig @@ -0,0 +1,12 @@ +root = true + +[*] +end_of_line = lf +insert_final_newline = true + +# Matches multiple files with brace expansion notation +[*.{js,jsx,html,sass,vue,ts,tsx,json}] +charset = utf-8 +indent_style = tab +indent_size = 4 +trim_trailing_whitespace = true diff --git a/frontend/.gitignore b/frontend/.gitignore new file mode 100644 index 0000000..a0782ae --- /dev/null +++ b/frontend/.gitignore @@ -0,0 +1,10 @@ +node_modules +.DS_Store +dist +dist-ssr +*.local +.*-debug.log +*.log +.vercel +.vite-ssg-temp +.idea diff --git a/frontend/.prettierrc b/frontend/.prettierrc new file mode 100644 index 0000000..4b09075 --- /dev/null +++ b/frontend/.prettierrc @@ -0,0 +1,9 @@ +{ + "arrowParens": "avoid", + "semi": true, + "tabWidth": 4, + "useTabs": true, + "vueIndentScriptAndStyle": true, + "singleQuote": true, + "trailingComma": "es5" +} diff --git a/frontend/LICENSE b/frontend/LICENSE new file mode 100644 index 0000000..a441282 --- /dev/null +++ b/frontend/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2021 Christopher Reeve + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/frontend/README.md b/frontend/README.md new file mode 100644 index 0000000..c9c506a --- /dev/null +++ b/frontend/README.md @@ -0,0 +1,137 @@ +

+ Vitailse - Opinionated Vite Starter Template with TailwindCSS +

+ +Opinionated Vite starter template with [TailwindCSS](https://tailwindcss.com/) + +Inspired by [Vitesse](https://github.com/antfu/vitesse) ❤ + +## Features + +- ⚡️ [Vue 3](https://github.com/vuejs/vue-next), [Vite 2](https://github.com/vitejs/vite), [pnpm](https://pnpm.js.org/), [ESBuild](https://github.com/evanw/esbuild) - born with fastness + +- 🗂 [File based routing](./src/pages) + +- 📦 [Components auto importing](./src/components) + +- 🍍 [State Management via Pinia](https://pinia.esm.dev/) + +- 📑 [Layout system](./src/layouts) + +- 📲 [PWA](https://github.com/antfu/vite-plugin-pwa) + +- 🌍 [I18n ready](./locales) + +- 🎨 [Tailwind CSS](https://tailwindcss.com/) - Rapidly build modern websites without ever leaving your HTML. + +- 😃 [Use icons from any icon sets, with no compromise](https://github.com/antfu/unplugin-icons) + +- 🔥 Use the [new ` + + diff --git a/frontend/locales/en.json b/frontend/locales/en.json new file mode 100644 index 0000000..adad875 --- /dev/null +++ b/frontend/locales/en.json @@ -0,0 +1,22 @@ +{ + "pages": { + "home": "Home", + "other": { + "menu": "Other Page", + "desc": "An example of other pages" + }, + "not-found": "Page not found" + }, + "app": { + "offline": "App ready to work offline", + "new-content": "New content available, click on reload button to update." + }, + "intro": { + "desc": "Welcome to Vitailse, Vite starter template with {tailwindurl}", + "github": "Please give stars and report any issues on our {githuburl}" + }, + "button": { + "reload": "Reload", + "close": "Close" + } +} diff --git a/frontend/locales/id.json b/frontend/locales/id.json new file mode 100644 index 0000000..53e7c41 --- /dev/null +++ b/frontend/locales/id.json @@ -0,0 +1,22 @@ +{ + "pages": { + "home": "Beranda", + "other": { + "menu": "Halaman lain", + "desc": "Contoh untuk halaman lain" + }, + "not-found": "Laman tidak ditemukan" + }, + "app": { + "offline": "Aplikasi siap digunakan tanpa jaringan internet", + "new-content": "Konten baru ditemukan, Tekan tombol perbarui untuk memperbarui laman." + }, + "intro": { + "desc": "Selamat datang di Vitailse, Template awal vite dengan ", + "github": "Mohon berikan bintang dan laporkan masalah pada " + }, + "button": { + "reload": "Perbarui", + "close": "Tutup" + } +} diff --git a/frontend/package.json b/frontend/package.json new file mode 100644 index 0000000..45dd54b --- /dev/null +++ b/frontend/package.json @@ -0,0 +1,69 @@ +{ + "name": "@zynth/vitailse", + "description": "Vite starter template with TailwindCSS", + "version": "0.1.0", + "main": "src/main.ts", + "repository": { + "type": "git", + "url": "git+https://github.com/zynth17/vitailse.git" + }, + "keywords": [ + "vitailse", + "tailwindcss", + "vite", + "vitesse" + ], + "author": "Christopher Reeeve", + "license": "MIT", + "bugs": { + "url": "https://github.com/zynth17/vitailse/issues" + }, + "homepage": "https://github.com/zynth17/vitailse#readme", + "scripts": { + "dev": "vite", + "build": "vite-ssg build", + "serve": "vite preview", + "https-preview": "serve dist" + }, + "dependencies": { + "@tailwindcss/aspect-ratio": "^0.4.0", + "@tailwindcss/forms": "^0.5.2", + "@tailwindcss/typography": "^0.5.4", + "@types/node": "^18.0.4", + "@vueuse/components": "^8.9.3", + "@vueuse/core": "^8.9.3", + "@vueuse/head": "^0.7.6", + "autoprefixer": "^10.4.7", + "daisyui": "^2.24.0", + "pinia": "^2.0.16", + "postcss": "^8.4.14", + "tailwindcss": "^3.1.6", + "vue": "^3.2.37", + "vue-i18n": "^9.1.10", + "vue-router": "^4.1.2", + "workbox": "^0.0.0", + "workbox-window": "^6.5.3" + }, + "devDependencies": { + "@iconify/json": "^2.1.78", + "@intlify/vite-plugin-vue-i18n": "^5.0.0", + "@vitejs/plugin-vue": "^3.0.0", + "@vue/compiler-sfc": "^3.2.37", + "@vue/server-renderer": "^3.2.37", + "critters": "^0.0.16", + "https-localhost": "^4.7.1", + "typescript": "^4.7.4", + "unplugin-auto-import": "^0.9.3", + "unplugin-icons": "^0.14.7", + "unplugin-vue-components": "0.21.1", + "unplugin-vue-router": "^0.0.21", + "vite": "^3.0.0", + "vite-plugin-pwa": "^0.12.3", + "vite-plugin-vue-layouts": "^0.7.0", + "vite-plugin-vue-type-imports": "^0.2.0", + "vite-ssg": "^0.20.2", + "vite-ssg-sitemap": "^0.3.2", + "vitest": "^0.18.0", + "vue-tsc": "^0.38.5" + } +} diff --git a/frontend/pnpm-lock.yaml b/frontend/pnpm-lock.yaml new file mode 100644 index 0000000..956b60f --- /dev/null +++ b/frontend/pnpm-lock.yaml @@ -0,0 +1,5409 @@ +lockfileVersion: 5.4 + +specifiers: + '@iconify/json': ^2.1.78 + '@intlify/vite-plugin-vue-i18n': ^5.0.0 + '@tailwindcss/aspect-ratio': ^0.4.0 + '@tailwindcss/forms': ^0.5.2 + '@tailwindcss/typography': ^0.5.4 + '@types/node': ^18.0.4 + '@vitejs/plugin-vue': ^3.0.0 + '@vue/compiler-sfc': ^3.2.37 + '@vue/server-renderer': ^3.2.37 + '@vueuse/components': ^8.9.3 + '@vueuse/core': ^8.9.3 + '@vueuse/head': ^0.7.6 + autoprefixer: ^10.4.7 + critters: ^0.0.16 + daisyui: ^2.24.0 + https-localhost: ^4.7.1 + pinia: ^2.0.16 + postcss: ^8.4.14 + tailwindcss: ^3.1.6 + typescript: ^4.7.4 + unplugin-auto-import: ^0.9.3 + unplugin-icons: ^0.14.7 + unplugin-vue-components: 0.21.1 + unplugin-vue-router: ^0.0.21 + vite: ^3.0.0 + vite-plugin-pwa: ^0.12.3 + vite-plugin-vue-layouts: ^0.7.0 + vite-plugin-vue-type-imports: ^0.2.0 + vite-ssg: ^0.20.2 + vite-ssg-sitemap: ^0.3.2 + vitest: ^0.18.0 + vue: ^3.2.37 + vue-i18n: ^9.1.10 + vue-router: ^4.1.2 + vue-tsc: ^0.38.5 + workbox: ^0.0.0 + workbox-window: ^6.5.3 + +dependencies: + '@tailwindcss/aspect-ratio': 0.4.0_tailwindcss@3.1.6 + '@tailwindcss/forms': 0.5.2_tailwindcss@3.1.6 + '@tailwindcss/typography': 0.5.4_tailwindcss@3.1.6 + '@types/node': 18.0.4 + '@vueuse/components': 8.9.3_vue@3.2.37 + '@vueuse/core': 8.9.3_vue@3.2.37 + '@vueuse/head': 0.7.6_vue@3.2.37 + autoprefixer: 10.4.7_postcss@8.4.14 + daisyui: 2.24.0_ugi4xkrfysqkt4c4y6hkyfj344 + pinia: 2.0.16_j6bzmzd4ujpabbp5objtwxyjp4 + postcss: 8.4.14 + tailwindcss: 3.1.6_postcss@8.4.14 + vue: 3.2.37 + vue-i18n: 9.1.10_vue@3.2.37 + vue-router: 4.1.2_vue@3.2.37 + workbox: 0.0.0 + workbox-window: 6.5.3 + +devDependencies: + '@iconify/json': 2.1.78 + '@intlify/vite-plugin-vue-i18n': 5.0.0_vite@3.0.0+vue-i18n@9.1.10 + '@vitejs/plugin-vue': 3.0.0_vite@3.0.0+vue@3.2.37 + '@vue/compiler-sfc': 3.2.37 + '@vue/server-renderer': 3.2.37_vue@3.2.37 + critters: 0.0.16 + https-localhost: 4.7.1 + typescript: 4.7.4 + unplugin-auto-import: 0.9.3_gvio5bgcjg37ethveel5rvqgym + unplugin-icons: 0.14.7_5vhdwjrvx3yqj3k5avrlxtwoii + unplugin-vue-components: 0.21.1_vite@3.0.0+vue@3.2.37 + unplugin-vue-router: 0.0.21_6ef32vilt6ae74xg3uetebg7ja + vite: 3.0.0 + vite-plugin-pwa: 0.12.3_2slanrkxy3rtt36tq3uv5vcihy + vite-plugin-vue-layouts: 0.7.0_5vcanmustn365rg452uyprxay4 + vite-plugin-vue-type-imports: 0.2.0_2yymnzrok6eda47acnj2yjm3ae + vite-ssg: 0.20.2_zodgg63wx4ia7g264h7fi3aaae + vite-ssg-sitemap: 0.3.2 + vitest: 0.18.0 + vue-tsc: 0.38.5_typescript@4.7.4 + +packages: + + /@ampproject/remapping/2.2.0: + resolution: {integrity: sha512-qRmjj8nj9qmLTQXXmaR1cck3UXSRMPrbsLJAasZpF+t3riI71BXed5ebIOYwQntykeZuhjsdweEc9BxH5Jc26w==} + engines: {node: '>=6.0.0'} + dependencies: + '@jridgewell/gen-mapping': 0.1.1 + '@jridgewell/trace-mapping': 0.3.14 + dev: true + + /@antfu/install-pkg/0.1.0: + resolution: {integrity: sha512-VaIJd3d1o7irZfK1U0nvBsHMyjkuyMP3HKYVV53z8DKyulkHKmjhhtccXO51WSPeeSHIeoJEoNOKavYpS7jkZw==} + dependencies: + execa: 5.1.1 + find-up: 5.0.0 + dev: true + + /@antfu/utils/0.5.2: + resolution: {integrity: sha512-CQkeV+oJxUazwjlHD0/3ZD08QWKuGQkhnrKo3e6ly5pd48VUpXbb77q0xMU4+vc2CkJnDS02Eq/M9ugyX20XZA==} + dev: true + + /@apideck/better-ajv-errors/0.3.6_ajv@8.11.0: + resolution: {integrity: sha512-P+ZygBLZtkp0qqOAJJVX4oX/sFo5JR3eBWwwuqHHhK0GIgQOKWrAfiAaWX0aArHkRWHMuggFEgAZNxVPwPZYaA==} + engines: {node: '>=10'} + peerDependencies: + ajv: '>=8' + dependencies: + ajv: 8.11.0 + json-schema: 0.4.0 + jsonpointer: 5.0.1 + leven: 3.1.0 + dev: true + + /@babel/code-frame/7.18.6: + resolution: {integrity: sha512-TDCmlK5eOvH+eH7cdAFlNXeVJqWIQ7gW9tY1GJIpUtFb6CmjVyq2VM3u71bOyR8CRihcCgMUYoDNyLXao3+70Q==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/highlight': 7.18.6 + dev: true + + /@babel/compat-data/7.18.8: + resolution: {integrity: sha512-HSmX4WZPPK3FUxYp7g2T6EyO8j96HlZJlxmKPSh6KAcqwyDrfx7hKjXpAW/0FhFfTJsR0Yt4lAjLI2coMptIHQ==} + engines: {node: '>=6.9.0'} + dev: true + + /@babel/core/7.18.6: + resolution: {integrity: sha512-cQbWBpxcbbs/IUredIPkHiAGULLV8iwgNRMFzvbhEXISp4f3rUUXE5+TIw6KwUWUR3DwyI6gmBRnmAtYaWehwQ==} + engines: {node: '>=6.9.0'} + dependencies: + '@ampproject/remapping': 2.2.0 + '@babel/code-frame': 7.18.6 + '@babel/generator': 7.18.7 + '@babel/helper-compilation-targets': 7.18.6_@babel+core@7.18.6 + '@babel/helper-module-transforms': 7.18.8 + '@babel/helpers': 7.18.6 + '@babel/parser': 7.18.8 + '@babel/template': 7.18.6 + '@babel/traverse': 7.18.8 + '@babel/types': 7.18.8 + convert-source-map: 1.8.0 + debug: 4.3.4 + gensync: 1.0.0-beta.2 + json5: 2.2.1 + semver: 6.3.0 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/generator/7.18.7: + resolution: {integrity: sha512-shck+7VLlY72a2w9c3zYWuE1pwOKEiQHV7GTUbSnhyl5eu3i04t30tBY82ZRWrDfo3gkakCFtevExnxbkf2a3A==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/types': 7.18.8 + '@jridgewell/gen-mapping': 0.3.2 + jsesc: 2.5.2 + dev: true + + /@babel/helper-annotate-as-pure/7.18.6: + resolution: {integrity: sha512-duORpUiYrEpzKIop6iNbjnwKLAKnJ47csTyRACyEmWj0QdUrm5aqNJGHSSEQSUAvNW0ojX0dOmK9dZduvkfeXA==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/types': 7.18.8 + dev: true + + /@babel/helper-builder-binary-assignment-operator-visitor/7.18.6: + resolution: {integrity: sha512-KT10c1oWEpmrIRYnthbzHgoOf6B+Xd6a5yhdbNtdhtG7aO1or5HViuf1TQR36xY/QprXA5nvxO6nAjhJ4y38jw==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/helper-explode-assignable-expression': 7.18.6 + '@babel/types': 7.18.8 + dev: true + + /@babel/helper-compilation-targets/7.18.6_@babel+core@7.18.6: + resolution: {integrity: sha512-vFjbfhNCzqdeAtZflUFrG5YIFqGTqsctrtkZ1D/NB0mDW9TwW3GmmUepYY4G9wCET5rY5ugz4OGTcLd614IzQg==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0 + dependencies: + '@babel/compat-data': 7.18.8 + '@babel/core': 7.18.6 + '@babel/helper-validator-option': 7.18.6 + browserslist: 4.21.2 + semver: 6.3.0 + dev: true + + /@babel/helper-create-class-features-plugin/7.18.6_@babel+core@7.18.6: + resolution: {integrity: sha512-YfDzdnoxHGV8CzqHGyCbFvXg5QESPFkXlHtvdCkesLjjVMT2Adxe4FGUR5ChIb3DxSaXO12iIOCWoXdsUVwnqw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-annotate-as-pure': 7.18.6 + '@babel/helper-environment-visitor': 7.18.6 + '@babel/helper-function-name': 7.18.6 + '@babel/helper-member-expression-to-functions': 7.18.6 + '@babel/helper-optimise-call-expression': 7.18.6 + '@babel/helper-replace-supers': 7.18.6 + '@babel/helper-split-export-declaration': 7.18.6 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/helper-create-regexp-features-plugin/7.18.6_@babel+core@7.18.6: + resolution: {integrity: sha512-7LcpH1wnQLGrI+4v+nPp+zUvIkF9x0ddv1Hkdue10tg3gmRnLy97DXh4STiOf1qeIInyD69Qv5kKSZzKD8B/7A==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-annotate-as-pure': 7.18.6 + regexpu-core: 5.1.0 + dev: true + + /@babel/helper-define-polyfill-provider/0.3.1_@babel+core@7.18.6: + resolution: {integrity: sha512-J9hGMpJQmtWmj46B3kBHmL38UhJGhYX7eqkcq+2gsstyYt341HmPeWspihX43yVRA0mS+8GGk2Gckc7bY/HCmA==} + peerDependencies: + '@babel/core': ^7.4.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-compilation-targets': 7.18.6_@babel+core@7.18.6 + '@babel/helper-module-imports': 7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + '@babel/traverse': 7.18.8 + debug: 4.3.4 + lodash.debounce: 4.0.8 + resolve: 1.22.1 + semver: 6.3.0 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/helper-environment-visitor/7.18.6: + resolution: {integrity: sha512-8n6gSfn2baOY+qlp+VSzsosjCVGFqWKmDF0cCWOybh52Dw3SEyoWR1KrhMJASjLwIEkkAufZ0xvr+SxLHSpy2Q==} + engines: {node: '>=6.9.0'} + dev: true + + /@babel/helper-explode-assignable-expression/7.18.6: + resolution: {integrity: sha512-eyAYAsQmB80jNfg4baAtLeWAQHfHFiR483rzFK+BhETlGZaQC9bsfrugfXDCbRHLQbIA7U5NxhhOxN7p/dWIcg==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/types': 7.18.8 + dev: true + + /@babel/helper-function-name/7.18.6: + resolution: {integrity: sha512-0mWMxV1aC97dhjCah5U5Ua7668r5ZmSC2DLfH2EZnf9c3/dHZKiFa5pRLMH5tjSl471tY6496ZWk/kjNONBxhw==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/template': 7.18.6 + '@babel/types': 7.18.8 + dev: true + + /@babel/helper-hoist-variables/7.18.6: + resolution: {integrity: sha512-UlJQPkFqFULIcyW5sbzgbkxn2FKRgwWiRexcuaR8RNJRy8+LLveqPjwZV/bwrLZCN0eUHD/x8D0heK1ozuoo6Q==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/types': 7.18.8 + dev: true + + /@babel/helper-member-expression-to-functions/7.18.6: + resolution: {integrity: sha512-CeHxqwwipekotzPDUuJOfIMtcIHBuc7WAzLmTYWctVigqS5RktNMQ5bEwQSuGewzYnCtTWa3BARXeiLxDTv+Ng==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/types': 7.18.8 + dev: true + + /@babel/helper-module-imports/7.18.6: + resolution: {integrity: sha512-0NFvs3VkuSYbFi1x2Vd6tKrywq+z/cLeYC/RJNFrIX/30Bf5aiGYbtvGXolEktzJH8o5E5KJ3tT+nkxuuZFVlA==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/types': 7.18.8 + dev: true + + /@babel/helper-module-transforms/7.18.8: + resolution: {integrity: sha512-che3jvZwIcZxrwh63VfnFTUzcAM9v/lznYkkRxIBGMPt1SudOKHAEec0SIRCfiuIzTcF7VGj/CaTT6gY4eWxvA==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/helper-environment-visitor': 7.18.6 + '@babel/helper-module-imports': 7.18.6 + '@babel/helper-simple-access': 7.18.6 + '@babel/helper-split-export-declaration': 7.18.6 + '@babel/helper-validator-identifier': 7.18.6 + '@babel/template': 7.18.6 + '@babel/traverse': 7.18.8 + '@babel/types': 7.18.8 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/helper-optimise-call-expression/7.18.6: + resolution: {integrity: sha512-HP59oD9/fEHQkdcbgFCnbmgH5vIQTJbxh2yf+CdM89/glUNnuzr87Q8GIjGEnOktTROemO0Pe0iPAYbqZuOUiA==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/types': 7.18.8 + dev: true + + /@babel/helper-plugin-utils/7.18.6: + resolution: {integrity: sha512-gvZnm1YAAxh13eJdkb9EWHBnF3eAub3XTLCZEehHT2kWxiKVRL64+ae5Y6Ivne0mVHmMYKT+xWgZO+gQhuLUBg==} + engines: {node: '>=6.9.0'} + dev: true + + /@babel/helper-remap-async-to-generator/7.18.6_@babel+core@7.18.6: + resolution: {integrity: sha512-z5wbmV55TveUPZlCLZvxWHtrjuJd+8inFhk7DG0WW87/oJuGDcjDiu7HIvGcpf5464L6xKCg3vNkmlVVz9hwyQ==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-annotate-as-pure': 7.18.6 + '@babel/helper-environment-visitor': 7.18.6 + '@babel/helper-wrap-function': 7.18.6 + '@babel/types': 7.18.8 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/helper-replace-supers/7.18.6: + resolution: {integrity: sha512-fTf7zoXnUGl9gF25fXCWE26t7Tvtyn6H4hkLSYhATwJvw2uYxd3aoXplMSe0g9XbwK7bmxNes7+FGO0rB/xC0g==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/helper-environment-visitor': 7.18.6 + '@babel/helper-member-expression-to-functions': 7.18.6 + '@babel/helper-optimise-call-expression': 7.18.6 + '@babel/traverse': 7.18.8 + '@babel/types': 7.18.8 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/helper-simple-access/7.18.6: + resolution: {integrity: sha512-iNpIgTgyAvDQpDj76POqg+YEt8fPxx3yaNBg3S30dxNKm2SWfYhD0TGrK/Eu9wHpUW63VQU894TsTg+GLbUa1g==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/types': 7.18.8 + dev: true + + /@babel/helper-skip-transparent-expression-wrappers/7.18.6: + resolution: {integrity: sha512-4KoLhwGS9vGethZpAhYnMejWkX64wsnHPDwvOsKWU6Fg4+AlK2Jz3TyjQLMEPvz+1zemi/WBdkYxCD0bAfIkiw==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/types': 7.18.8 + dev: true + + /@babel/helper-split-export-declaration/7.18.6: + resolution: {integrity: sha512-bde1etTx6ZyTmobl9LLMMQsaizFVZrquTEHOqKeQESMKo4PlObf+8+JA25ZsIpZhT/WEd39+vOdLXAFG/nELpA==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/types': 7.18.8 + dev: true + + /@babel/helper-validator-identifier/7.18.6: + resolution: {integrity: sha512-MmetCkz9ej86nJQV+sFCxoGGrUbU3q02kgLciwkrt9QqEB7cP39oKEY0PakknEO0Gu20SskMRi+AYZ3b1TpN9g==} + engines: {node: '>=6.9.0'} + + /@babel/helper-validator-option/7.18.6: + resolution: {integrity: sha512-XO7gESt5ouv/LRJdrVjkShckw6STTaB7l9BrpBaAHDeF5YZT+01PCwmR0SJHnkW6i8OwW/EVWRShfi4j2x+KQw==} + engines: {node: '>=6.9.0'} + dev: true + + /@babel/helper-wrap-function/7.18.6: + resolution: {integrity: sha512-I5/LZfozwMNbwr/b1vhhuYD+J/mU+gfGAj5td7l5Rv9WYmH6i3Om69WGKNmlIpsVW/mF6O5bvTKbvDQZVgjqOw==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/helper-function-name': 7.18.6 + '@babel/template': 7.18.6 + '@babel/traverse': 7.18.8 + '@babel/types': 7.18.8 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/helpers/7.18.6: + resolution: {integrity: sha512-vzSiiqbQOghPngUYt/zWGvK3LAsPhz55vc9XNN0xAl2gV4ieShI2OQli5duxWHD+72PZPTKAcfcZDE1Cwc5zsQ==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/template': 7.18.6 + '@babel/traverse': 7.18.8 + '@babel/types': 7.18.8 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/highlight/7.18.6: + resolution: {integrity: sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/helper-validator-identifier': 7.18.6 + chalk: 2.4.2 + js-tokens: 4.0.0 + dev: true + + /@babel/parser/7.18.8: + resolution: {integrity: sha512-RSKRfYX20dyH+elbJK2uqAkVyucL+xXzhqlMD5/ZXx+dAAwpyB7HsvnHe/ZUGOF+xLr5Wx9/JoXVTj6BQE2/oA==} + engines: {node: '>=6.0.0'} + hasBin: true + dependencies: + '@babel/types': 7.18.8 + + /@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/7.18.6_@babel+core@7.18.6: + resolution: {integrity: sha512-Dgxsyg54Fx1d4Nge8UnvTrED63vrwOdPmyvPzlNN/boaliRP54pm3pGzZD1SJUwrBA+Cs/xdG8kXX6Mn/RfISQ==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + dev: true + + /@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining/7.18.6_@babel+core@7.18.6: + resolution: {integrity: sha512-Udgu8ZRgrBrttVz6A0EVL0SJ1z+RLbIeqsu632SA1hf0awEppD6TvdznoH+orIF8wtFFAV/Enmw9Y+9oV8TQcw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.13.0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + '@babel/helper-skip-transparent-expression-wrappers': 7.18.6 + '@babel/plugin-proposal-optional-chaining': 7.18.6_@babel+core@7.18.6 + dev: true + + /@babel/plugin-proposal-async-generator-functions/7.18.6_@babel+core@7.18.6: + resolution: {integrity: sha512-WAz4R9bvozx4qwf74M+sfqPMKfSqwM0phxPTR6iJIi8robgzXwkEgmeJG1gEKhm6sDqT/U9aV3lfcqybIpev8w==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-environment-visitor': 7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + '@babel/helper-remap-async-to-generator': 7.18.6_@babel+core@7.18.6 + '@babel/plugin-syntax-async-generators': 7.8.4_@babel+core@7.18.6 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/plugin-proposal-class-properties/7.18.6_@babel+core@7.18.6: + resolution: {integrity: sha512-cumfXOF0+nzZrrN8Rf0t7M+tF6sZc7vhQwYQck9q1/5w2OExlD+b4v4RpMJFaV1Z7WcDRgO6FqvxqxGlwo+RHQ==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-create-class-features-plugin': 7.18.6_@babel+core@7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/plugin-proposal-class-static-block/7.18.6_@babel+core@7.18.6: + resolution: {integrity: sha512-+I3oIiNxrCpup3Gi8n5IGMwj0gOCAjcJUSQEcotNnCCPMEnixawOQ+KeJPlgfjzx+FKQ1QSyZOWe7wmoJp7vhw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.12.0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-create-class-features-plugin': 7.18.6_@babel+core@7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + '@babel/plugin-syntax-class-static-block': 7.14.5_@babel+core@7.18.6 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/plugin-proposal-dynamic-import/7.18.6_@babel+core@7.18.6: + resolution: {integrity: sha512-1auuwmK+Rz13SJj36R+jqFPMJWyKEDd7lLSdOj4oJK0UTgGueSAtkrCvz9ewmgyU/P941Rv2fQwZJN8s6QruXw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + '@babel/plugin-syntax-dynamic-import': 7.8.3_@babel+core@7.18.6 + dev: true + + /@babel/plugin-proposal-export-namespace-from/7.18.6_@babel+core@7.18.6: + resolution: {integrity: sha512-zr/QcUlUo7GPo6+X1wC98NJADqmy5QTFWWhqeQWiki4XHafJtLl/YMGkmRB2szDD2IYJCCdBTd4ElwhId9T7Xw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + '@babel/plugin-syntax-export-namespace-from': 7.8.3_@babel+core@7.18.6 + dev: true + + /@babel/plugin-proposal-json-strings/7.18.6_@babel+core@7.18.6: + resolution: {integrity: sha512-lr1peyn9kOdbYc0xr0OdHTZ5FMqS6Di+H0Fz2I/JwMzGmzJETNeOFq2pBySw6X/KFL5EWDjlJuMsUGRFb8fQgQ==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + '@babel/plugin-syntax-json-strings': 7.8.3_@babel+core@7.18.6 + dev: true + + /@babel/plugin-proposal-logical-assignment-operators/7.18.6_@babel+core@7.18.6: + resolution: {integrity: sha512-zMo66azZth/0tVd7gmkxOkOjs2rpHyhpcFo565PUP37hSp6hSd9uUKIfTDFMz58BwqgQKhJ9YxtM5XddjXVn+Q==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + '@babel/plugin-syntax-logical-assignment-operators': 7.10.4_@babel+core@7.18.6 + dev: true + + /@babel/plugin-proposal-nullish-coalescing-operator/7.18.6_@babel+core@7.18.6: + resolution: {integrity: sha512-wQxQzxYeJqHcfppzBDnm1yAY0jSRkUXR2z8RePZYrKwMKgMlE8+Z6LUno+bd6LvbGh8Gltvy74+9pIYkr+XkKA==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + '@babel/plugin-syntax-nullish-coalescing-operator': 7.8.3_@babel+core@7.18.6 + dev: true + + /@babel/plugin-proposal-numeric-separator/7.18.6_@babel+core@7.18.6: + resolution: {integrity: sha512-ozlZFogPqoLm8WBr5Z8UckIoE4YQ5KESVcNudyXOR8uqIkliTEgJ3RoketfG6pmzLdeZF0H/wjE9/cCEitBl7Q==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + '@babel/plugin-syntax-numeric-separator': 7.10.4_@babel+core@7.18.6 + dev: true + + /@babel/plugin-proposal-object-rest-spread/7.18.6_@babel+core@7.18.6: + resolution: {integrity: sha512-9yuM6wr4rIsKa1wlUAbZEazkCrgw2sMPEXCr4Rnwetu7cEW1NydkCWytLuYletbf8vFxdJxFhwEZqMpOx2eZyw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/compat-data': 7.18.8 + '@babel/core': 7.18.6 + '@babel/helper-compilation-targets': 7.18.6_@babel+core@7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + '@babel/plugin-syntax-object-rest-spread': 7.8.3_@babel+core@7.18.6 + '@babel/plugin-transform-parameters': 7.18.8_@babel+core@7.18.6 + dev: true + + /@babel/plugin-proposal-optional-catch-binding/7.18.6_@babel+core@7.18.6: + resolution: {integrity: sha512-Q40HEhs9DJQyaZfUjjn6vE8Cv4GmMHCYuMGIWUnlxH6400VGxOuwWsPt4FxXxJkC/5eOzgn0z21M9gMT4MOhbw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + '@babel/plugin-syntax-optional-catch-binding': 7.8.3_@babel+core@7.18.6 + dev: true + + /@babel/plugin-proposal-optional-chaining/7.18.6_@babel+core@7.18.6: + resolution: {integrity: sha512-PatI6elL5eMzoypFAiYDpYQyMtXTn+iMhuxxQt5mAXD4fEmKorpSI3PHd+i3JXBJN3xyA6MvJv7at23HffFHwA==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + '@babel/helper-skip-transparent-expression-wrappers': 7.18.6 + '@babel/plugin-syntax-optional-chaining': 7.8.3_@babel+core@7.18.6 + dev: true + + /@babel/plugin-proposal-private-methods/7.18.6_@babel+core@7.18.6: + resolution: {integrity: sha512-nutsvktDItsNn4rpGItSNV2sz1XwS+nfU0Rg8aCx3W3NOKVzdMjJRu0O5OkgDp3ZGICSTbgRpxZoWsxoKRvbeA==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-create-class-features-plugin': 7.18.6_@babel+core@7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/plugin-proposal-private-property-in-object/7.18.6_@babel+core@7.18.6: + resolution: {integrity: sha512-9Rysx7FOctvT5ouj5JODjAFAkgGoudQuLPamZb0v1TGLpapdNaftzifU8NTWQm0IRjqoYypdrSmyWgkocDQ8Dw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-annotate-as-pure': 7.18.6 + '@babel/helper-create-class-features-plugin': 7.18.6_@babel+core@7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + '@babel/plugin-syntax-private-property-in-object': 7.14.5_@babel+core@7.18.6 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/plugin-proposal-unicode-property-regex/7.18.6_@babel+core@7.18.6: + resolution: {integrity: sha512-2BShG/d5yoZyXZfVePH91urL5wTG6ASZU9M4o03lKK8u8UW1y08OMttBSOADTcJrnPMpvDXRG3G8fyLh4ovs8w==} + engines: {node: '>=4'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-create-regexp-features-plugin': 7.18.6_@babel+core@7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + dev: true + + /@babel/plugin-syntax-async-generators/7.8.4_@babel+core@7.18.6: + resolution: {integrity: sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw==} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + dev: true + + /@babel/plugin-syntax-class-properties/7.12.13_@babel+core@7.18.6: + resolution: {integrity: sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA==} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + dev: true + + /@babel/plugin-syntax-class-static-block/7.14.5_@babel+core@7.18.6: + resolution: {integrity: sha512-b+YyPmr6ldyNnM6sqYeMWE+bgJcJpO6yS4QD7ymxgH34GBPNDM/THBh8iunyvKIZztiwLH4CJZ0RxTk9emgpjw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + dev: true + + /@babel/plugin-syntax-dynamic-import/7.8.3_@babel+core@7.18.6: + resolution: {integrity: sha512-5gdGbFon+PszYzqs83S3E5mpi7/y/8M9eC90MRTZfduQOYW76ig6SOSPNe41IG5LoP3FGBn2N0RjVDSQiS94kQ==} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + dev: true + + /@babel/plugin-syntax-export-namespace-from/7.8.3_@babel+core@7.18.6: + resolution: {integrity: sha512-MXf5laXo6c1IbEbegDmzGPwGNTsHZmEy6QGznu5Sh2UCWvueywb2ee+CCE4zQiZstxU9BMoQO9i6zUFSY0Kj0Q==} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + dev: true + + /@babel/plugin-syntax-import-assertions/7.18.6_@babel+core@7.18.6: + resolution: {integrity: sha512-/DU3RXad9+bZwrgWJQKbr39gYbJpLJHezqEzRzi/BHRlJ9zsQb4CK2CA/5apllXNomwA1qHwzvHl+AdEmC5krQ==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + dev: true + + /@babel/plugin-syntax-json-strings/7.8.3_@babel+core@7.18.6: + resolution: {integrity: sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA==} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + dev: true + + /@babel/plugin-syntax-logical-assignment-operators/7.10.4_@babel+core@7.18.6: + resolution: {integrity: sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig==} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + dev: true + + /@babel/plugin-syntax-nullish-coalescing-operator/7.8.3_@babel+core@7.18.6: + resolution: {integrity: sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ==} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + dev: true + + /@babel/plugin-syntax-numeric-separator/7.10.4_@babel+core@7.18.6: + resolution: {integrity: sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug==} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + dev: true + + /@babel/plugin-syntax-object-rest-spread/7.8.3_@babel+core@7.18.6: + resolution: {integrity: sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA==} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + dev: true + + /@babel/plugin-syntax-optional-catch-binding/7.8.3_@babel+core@7.18.6: + resolution: {integrity: sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q==} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + dev: true + + /@babel/plugin-syntax-optional-chaining/7.8.3_@babel+core@7.18.6: + resolution: {integrity: sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg==} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + dev: true + + /@babel/plugin-syntax-private-property-in-object/7.14.5_@babel+core@7.18.6: + resolution: {integrity: sha512-0wVnp9dxJ72ZUJDV27ZfbSj6iHLoytYZmh3rFcxNnvsJF3ktkzLDZPy/mA17HGsaQT3/DQsWYX1f1QGWkCoVUg==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + dev: true + + /@babel/plugin-syntax-top-level-await/7.14.5_@babel+core@7.18.6: + resolution: {integrity: sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + dev: true + + /@babel/plugin-transform-arrow-functions/7.18.6_@babel+core@7.18.6: + resolution: {integrity: sha512-9S9X9RUefzrsHZmKMbDXxweEH+YlE8JJEuat9FdvW9Qh1cw7W64jELCtWNkPBPX5En45uy28KGvA/AySqUh8CQ==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + dev: true + + /@babel/plugin-transform-async-to-generator/7.18.6_@babel+core@7.18.6: + resolution: {integrity: sha512-ARE5wZLKnTgPW7/1ftQmSi1CmkqqHo2DNmtztFhvgtOWSDfq0Cq9/9L+KnZNYSNrydBekhW3rwShduf59RoXag==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-module-imports': 7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + '@babel/helper-remap-async-to-generator': 7.18.6_@babel+core@7.18.6 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/plugin-transform-block-scoped-functions/7.18.6_@babel+core@7.18.6: + resolution: {integrity: sha512-ExUcOqpPWnliRcPqves5HJcJOvHvIIWfuS4sroBUenPuMdmW+SMHDakmtS7qOo13sVppmUijqeTv7qqGsvURpQ==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + dev: true + + /@babel/plugin-transform-block-scoping/7.18.6_@babel+core@7.18.6: + resolution: {integrity: sha512-pRqwb91C42vs1ahSAWJkxOxU1RHWDn16XAa6ggQ72wjLlWyYeAcLvTtE0aM8ph3KNydy9CQF2nLYcjq1WysgxQ==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + dev: true + + /@babel/plugin-transform-classes/7.18.8_@babel+core@7.18.6: + resolution: {integrity: sha512-RySDoXdF6hgHSHuAW4aLGyVQdmvEX/iJtjVre52k0pxRq4hzqze+rAVP++NmNv596brBpYmaiKgTZby7ziBnVg==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-annotate-as-pure': 7.18.6 + '@babel/helper-environment-visitor': 7.18.6 + '@babel/helper-function-name': 7.18.6 + '@babel/helper-optimise-call-expression': 7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + '@babel/helper-replace-supers': 7.18.6 + '@babel/helper-split-export-declaration': 7.18.6 + globals: 11.12.0 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/plugin-transform-computed-properties/7.18.6_@babel+core@7.18.6: + resolution: {integrity: sha512-9repI4BhNrR0KenoR9vm3/cIc1tSBIo+u1WVjKCAynahj25O8zfbiE6JtAtHPGQSs4yZ+bA8mRasRP+qc+2R5A==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + dev: true + + /@babel/plugin-transform-destructuring/7.18.6_@babel+core@7.18.6: + resolution: {integrity: sha512-tgy3u6lRp17ilY8r1kP4i2+HDUwxlVqq3RTc943eAWSzGgpU1qhiKpqZ5CMyHReIYPHdo3Kg8v8edKtDqSVEyQ==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + dev: true + + /@babel/plugin-transform-dotall-regex/7.18.6_@babel+core@7.18.6: + resolution: {integrity: sha512-6S3jpun1eEbAxq7TdjLotAsl4WpQI9DxfkycRcKrjhQYzU87qpXdknpBg/e+TdcMehqGnLFi7tnFUBR02Vq6wg==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-create-regexp-features-plugin': 7.18.6_@babel+core@7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + dev: true + + /@babel/plugin-transform-duplicate-keys/7.18.6_@babel+core@7.18.6: + resolution: {integrity: sha512-NJU26U/208+sxYszf82nmGYqVF9QN8py2HFTblPT9hbawi8+1C5a9JubODLTGFuT0qlkqVinmkwOD13s0sZktg==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + dev: true + + /@babel/plugin-transform-exponentiation-operator/7.18.6_@babel+core@7.18.6: + resolution: {integrity: sha512-wzEtc0+2c88FVR34aQmiz56dxEkxr2g8DQb/KfaFa1JYXOFVsbhvAonFN6PwVWj++fKmku8NP80plJ5Et4wqHw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-builder-binary-assignment-operator-visitor': 7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + dev: true + + /@babel/plugin-transform-for-of/7.18.8_@babel+core@7.18.6: + resolution: {integrity: sha512-yEfTRnjuskWYo0k1mHUqrVWaZwrdq8AYbfrpqULOJOaucGSp4mNMVps+YtA8byoevxS/urwU75vyhQIxcCgiBQ==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + dev: true + + /@babel/plugin-transform-function-name/7.18.6_@babel+core@7.18.6: + resolution: {integrity: sha512-kJha/Gbs5RjzIu0CxZwf5e3aTTSlhZnHMT8zPWnJMjNpLOUgqevg+PN5oMH68nMCXnfiMo4Bhgxqj59KHTlAnA==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-compilation-targets': 7.18.6_@babel+core@7.18.6 + '@babel/helper-function-name': 7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + dev: true + + /@babel/plugin-transform-literals/7.18.6_@babel+core@7.18.6: + resolution: {integrity: sha512-x3HEw0cJZVDoENXOp20HlypIHfl0zMIhMVZEBVTfmqbObIpsMxMbmU5nOEO8R7LYT+z5RORKPlTI5Hj4OsO9/Q==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + dev: true + + /@babel/plugin-transform-member-expression-literals/7.18.6_@babel+core@7.18.6: + resolution: {integrity: sha512-qSF1ihLGO3q+/g48k85tUjD033C29TNTVB2paCwZPVmOsjn9pClvYYrM2VeJpBY2bcNkuny0YUyTNRyRxJ54KA==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + dev: true + + /@babel/plugin-transform-modules-amd/7.18.6_@babel+core@7.18.6: + resolution: {integrity: sha512-Pra5aXsmTsOnjM3IajS8rTaLCy++nGM4v3YR4esk5PCsyg9z8NA5oQLwxzMUtDBd8F+UmVza3VxoAaWCbzH1rg==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-module-transforms': 7.18.8 + '@babel/helper-plugin-utils': 7.18.6 + babel-plugin-dynamic-import-node: 2.3.3 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/plugin-transform-modules-commonjs/7.18.6_@babel+core@7.18.6: + resolution: {integrity: sha512-Qfv2ZOWikpvmedXQJDSbxNqy7Xr/j2Y8/KfijM0iJyKkBTmWuvCA1yeH1yDM7NJhBW/2aXxeucLj6i80/LAJ/Q==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-module-transforms': 7.18.8 + '@babel/helper-plugin-utils': 7.18.6 + '@babel/helper-simple-access': 7.18.6 + babel-plugin-dynamic-import-node: 2.3.3 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/plugin-transform-modules-systemjs/7.18.6_@babel+core@7.18.6: + resolution: {integrity: sha512-UbPYpXxLjTw6w6yXX2BYNxF3p6QY225wcTkfQCy3OMnSlS/C3xGtwUjEzGkldb/sy6PWLiCQ3NbYfjWUTI3t4g==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-hoist-variables': 7.18.6 + '@babel/helper-module-transforms': 7.18.8 + '@babel/helper-plugin-utils': 7.18.6 + '@babel/helper-validator-identifier': 7.18.6 + babel-plugin-dynamic-import-node: 2.3.3 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/plugin-transform-modules-umd/7.18.6_@babel+core@7.18.6: + resolution: {integrity: sha512-dcegErExVeXcRqNtkRU/z8WlBLnvD4MRnHgNs3MytRO1Mn1sHRyhbcpYbVMGclAqOjdW+9cfkdZno9dFdfKLfQ==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-module-transforms': 7.18.8 + '@babel/helper-plugin-utils': 7.18.6 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/plugin-transform-named-capturing-groups-regex/7.18.6_@babel+core@7.18.6: + resolution: {integrity: sha512-UmEOGF8XgaIqD74bC8g7iV3RYj8lMf0Bw7NJzvnS9qQhM4mg+1WHKotUIdjxgD2RGrgFLZZPCFPFj3P/kVDYhg==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-create-regexp-features-plugin': 7.18.6_@babel+core@7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + dev: true + + /@babel/plugin-transform-new-target/7.18.6_@babel+core@7.18.6: + resolution: {integrity: sha512-DjwFA/9Iu3Z+vrAn+8pBUGcjhxKguSMlsFqeCKbhb9BAV756v0krzVK04CRDi/4aqmk8BsHb4a/gFcaA5joXRw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + dev: true + + /@babel/plugin-transform-object-super/7.18.6_@babel+core@7.18.6: + resolution: {integrity: sha512-uvGz6zk+pZoS1aTZrOvrbj6Pp/kK2mp45t2B+bTDre2UgsZZ8EZLSJtUg7m/no0zOJUWgFONpB7Zv9W2tSaFlA==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + '@babel/helper-replace-supers': 7.18.6 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/plugin-transform-parameters/7.18.8_@babel+core@7.18.6: + resolution: {integrity: sha512-ivfbE3X2Ss+Fj8nnXvKJS6sjRG4gzwPMsP+taZC+ZzEGjAYlvENixmt1sZ5Ca6tWls+BlKSGKPJ6OOXvXCbkFg==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + dev: true + + /@babel/plugin-transform-property-literals/7.18.6_@babel+core@7.18.6: + resolution: {integrity: sha512-cYcs6qlgafTud3PAzrrRNbQtfpQ8+y/+M5tKmksS9+M1ckbH6kzY8MrexEM9mcA6JDsukE19iIRvAyYl463sMg==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + dev: true + + /@babel/plugin-transform-regenerator/7.18.6_@babel+core@7.18.6: + resolution: {integrity: sha512-poqRI2+qiSdeldcz4wTSTXBRryoq3Gc70ye7m7UD5Ww0nE29IXqMl6r7Nd15WBgRd74vloEMlShtH6CKxVzfmQ==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + regenerator-transform: 0.15.0 + dev: true + + /@babel/plugin-transform-reserved-words/7.18.6_@babel+core@7.18.6: + resolution: {integrity: sha512-oX/4MyMoypzHjFrT1CdivfKZ+XvIPMFXwwxHp/r0Ddy2Vuomt4HDFGmft1TAY2yiTKiNSsh3kjBAzcM8kSdsjA==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + dev: true + + /@babel/plugin-transform-shorthand-properties/7.18.6_@babel+core@7.18.6: + resolution: {integrity: sha512-eCLXXJqv8okzg86ywZJbRn19YJHU4XUa55oz2wbHhaQVn/MM+XhukiT7SYqp/7o00dg52Rj51Ny+Ecw4oyoygw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + dev: true + + /@babel/plugin-transform-spread/7.18.6_@babel+core@7.18.6: + resolution: {integrity: sha512-ayT53rT/ENF8WWexIRg9AiV9h0aIteyWn5ptfZTZQrjk/+f3WdrJGCY4c9wcgl2+MKkKPhzbYp97FTsquZpDCw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + '@babel/helper-skip-transparent-expression-wrappers': 7.18.6 + dev: true + + /@babel/plugin-transform-sticky-regex/7.18.6_@babel+core@7.18.6: + resolution: {integrity: sha512-kfiDrDQ+PBsQDO85yj1icueWMfGfJFKN1KCkndygtu/C9+XUfydLC8Iv5UYJqRwy4zk8EcplRxEOeLyjq1gm6Q==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + dev: true + + /@babel/plugin-transform-template-literals/7.18.6_@babel+core@7.18.6: + resolution: {integrity: sha512-UuqlRrQmT2SWRvahW46cGSany0uTlcj8NYOS5sRGYi8FxPYPoLd5DDmMd32ZXEj2Jq+06uGVQKHxa/hJx2EzKw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + dev: true + + /@babel/plugin-transform-typeof-symbol/7.18.6_@babel+core@7.18.6: + resolution: {integrity: sha512-7m71iS/QhsPk85xSjFPovHPcH3H9qeyzsujhTc+vcdnsXavoWYJ74zx0lP5RhpC5+iDnVLO+PPMHzC11qels1g==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + dev: true + + /@babel/plugin-transform-unicode-escapes/7.18.6_@babel+core@7.18.6: + resolution: {integrity: sha512-XNRwQUXYMP7VLuy54cr/KS/WeL3AZeORhrmeZ7iewgu+X2eBqmpaLI/hzqr9ZxCeUoq0ASK4GUzSM0BDhZkLFw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + dev: true + + /@babel/plugin-transform-unicode-regex/7.18.6_@babel+core@7.18.6: + resolution: {integrity: sha512-gE7A6Lt7YLnNOL3Pb9BNeZvi+d8l7tcRrG4+pwJjK9hD2xX4mEvjlQW60G9EEmfXVYRPv9VRQcyegIVHCql/AA==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-create-regexp-features-plugin': 7.18.6_@babel+core@7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + dev: true + + /@babel/preset-env/7.18.6_@babel+core@7.18.6: + resolution: {integrity: sha512-WrthhuIIYKrEFAwttYzgRNQ5hULGmwTj+D6l7Zdfsv5M7IWV/OZbUfbeL++Qrzx1nVJwWROIFhCHRYQV4xbPNw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/compat-data': 7.18.8 + '@babel/core': 7.18.6 + '@babel/helper-compilation-targets': 7.18.6_@babel+core@7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + '@babel/helper-validator-option': 7.18.6 + '@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression': 7.18.6_@babel+core@7.18.6 + '@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining': 7.18.6_@babel+core@7.18.6 + '@babel/plugin-proposal-async-generator-functions': 7.18.6_@babel+core@7.18.6 + '@babel/plugin-proposal-class-properties': 7.18.6_@babel+core@7.18.6 + '@babel/plugin-proposal-class-static-block': 7.18.6_@babel+core@7.18.6 + '@babel/plugin-proposal-dynamic-import': 7.18.6_@babel+core@7.18.6 + '@babel/plugin-proposal-export-namespace-from': 7.18.6_@babel+core@7.18.6 + '@babel/plugin-proposal-json-strings': 7.18.6_@babel+core@7.18.6 + '@babel/plugin-proposal-logical-assignment-operators': 7.18.6_@babel+core@7.18.6 + '@babel/plugin-proposal-nullish-coalescing-operator': 7.18.6_@babel+core@7.18.6 + '@babel/plugin-proposal-numeric-separator': 7.18.6_@babel+core@7.18.6 + '@babel/plugin-proposal-object-rest-spread': 7.18.6_@babel+core@7.18.6 + '@babel/plugin-proposal-optional-catch-binding': 7.18.6_@babel+core@7.18.6 + '@babel/plugin-proposal-optional-chaining': 7.18.6_@babel+core@7.18.6 + '@babel/plugin-proposal-private-methods': 7.18.6_@babel+core@7.18.6 + '@babel/plugin-proposal-private-property-in-object': 7.18.6_@babel+core@7.18.6 + '@babel/plugin-proposal-unicode-property-regex': 7.18.6_@babel+core@7.18.6 + '@babel/plugin-syntax-async-generators': 7.8.4_@babel+core@7.18.6 + '@babel/plugin-syntax-class-properties': 7.12.13_@babel+core@7.18.6 + '@babel/plugin-syntax-class-static-block': 7.14.5_@babel+core@7.18.6 + '@babel/plugin-syntax-dynamic-import': 7.8.3_@babel+core@7.18.6 + '@babel/plugin-syntax-export-namespace-from': 7.8.3_@babel+core@7.18.6 + '@babel/plugin-syntax-import-assertions': 7.18.6_@babel+core@7.18.6 + '@babel/plugin-syntax-json-strings': 7.8.3_@babel+core@7.18.6 + '@babel/plugin-syntax-logical-assignment-operators': 7.10.4_@babel+core@7.18.6 + '@babel/plugin-syntax-nullish-coalescing-operator': 7.8.3_@babel+core@7.18.6 + '@babel/plugin-syntax-numeric-separator': 7.10.4_@babel+core@7.18.6 + '@babel/plugin-syntax-object-rest-spread': 7.8.3_@babel+core@7.18.6 + '@babel/plugin-syntax-optional-catch-binding': 7.8.3_@babel+core@7.18.6 + '@babel/plugin-syntax-optional-chaining': 7.8.3_@babel+core@7.18.6 + '@babel/plugin-syntax-private-property-in-object': 7.14.5_@babel+core@7.18.6 + '@babel/plugin-syntax-top-level-await': 7.14.5_@babel+core@7.18.6 + '@babel/plugin-transform-arrow-functions': 7.18.6_@babel+core@7.18.6 + '@babel/plugin-transform-async-to-generator': 7.18.6_@babel+core@7.18.6 + '@babel/plugin-transform-block-scoped-functions': 7.18.6_@babel+core@7.18.6 + '@babel/plugin-transform-block-scoping': 7.18.6_@babel+core@7.18.6 + '@babel/plugin-transform-classes': 7.18.8_@babel+core@7.18.6 + '@babel/plugin-transform-computed-properties': 7.18.6_@babel+core@7.18.6 + '@babel/plugin-transform-destructuring': 7.18.6_@babel+core@7.18.6 + '@babel/plugin-transform-dotall-regex': 7.18.6_@babel+core@7.18.6 + '@babel/plugin-transform-duplicate-keys': 7.18.6_@babel+core@7.18.6 + '@babel/plugin-transform-exponentiation-operator': 7.18.6_@babel+core@7.18.6 + '@babel/plugin-transform-for-of': 7.18.8_@babel+core@7.18.6 + '@babel/plugin-transform-function-name': 7.18.6_@babel+core@7.18.6 + '@babel/plugin-transform-literals': 7.18.6_@babel+core@7.18.6 + '@babel/plugin-transform-member-expression-literals': 7.18.6_@babel+core@7.18.6 + '@babel/plugin-transform-modules-amd': 7.18.6_@babel+core@7.18.6 + '@babel/plugin-transform-modules-commonjs': 7.18.6_@babel+core@7.18.6 + '@babel/plugin-transform-modules-systemjs': 7.18.6_@babel+core@7.18.6 + '@babel/plugin-transform-modules-umd': 7.18.6_@babel+core@7.18.6 + '@babel/plugin-transform-named-capturing-groups-regex': 7.18.6_@babel+core@7.18.6 + '@babel/plugin-transform-new-target': 7.18.6_@babel+core@7.18.6 + '@babel/plugin-transform-object-super': 7.18.6_@babel+core@7.18.6 + '@babel/plugin-transform-parameters': 7.18.8_@babel+core@7.18.6 + '@babel/plugin-transform-property-literals': 7.18.6_@babel+core@7.18.6 + '@babel/plugin-transform-regenerator': 7.18.6_@babel+core@7.18.6 + '@babel/plugin-transform-reserved-words': 7.18.6_@babel+core@7.18.6 + '@babel/plugin-transform-shorthand-properties': 7.18.6_@babel+core@7.18.6 + '@babel/plugin-transform-spread': 7.18.6_@babel+core@7.18.6 + '@babel/plugin-transform-sticky-regex': 7.18.6_@babel+core@7.18.6 + '@babel/plugin-transform-template-literals': 7.18.6_@babel+core@7.18.6 + '@babel/plugin-transform-typeof-symbol': 7.18.6_@babel+core@7.18.6 + '@babel/plugin-transform-unicode-escapes': 7.18.6_@babel+core@7.18.6 + '@babel/plugin-transform-unicode-regex': 7.18.6_@babel+core@7.18.6 + '@babel/preset-modules': 0.1.5_@babel+core@7.18.6 + '@babel/types': 7.18.8 + babel-plugin-polyfill-corejs2: 0.3.1_@babel+core@7.18.6 + babel-plugin-polyfill-corejs3: 0.5.2_@babel+core@7.18.6 + babel-plugin-polyfill-regenerator: 0.3.1_@babel+core@7.18.6 + core-js-compat: 3.23.4 + semver: 6.3.0 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/preset-modules/0.1.5_@babel+core@7.18.6: + resolution: {integrity: sha512-A57th6YRG7oR3cq/yt/Y84MvGgE0eJG2F1JLhKuyG+jFxEgrd/HAMJatiFtmOiZurz+0DkrvbheCLaV5f2JfjA==} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + '@babel/plugin-proposal-unicode-property-regex': 7.18.6_@babel+core@7.18.6 + '@babel/plugin-transform-dotall-regex': 7.18.6_@babel+core@7.18.6 + '@babel/types': 7.18.8 + esutils: 2.0.3 + dev: true + + /@babel/runtime/7.18.6: + resolution: {integrity: sha512-t9wi7/AW6XtKahAe20Yw0/mMljKq0B1r2fPdvaAdV/KPDZewFXdaaa6K7lxmZBZ8FBNpCiAT6iHPmd6QO9bKfQ==} + engines: {node: '>=6.9.0'} + dependencies: + regenerator-runtime: 0.13.9 + dev: true + + /@babel/template/7.18.6: + resolution: {integrity: sha512-JoDWzPe+wgBsTTgdnIma3iHNFC7YVJoPssVBDjiHfNlyt4YcunDtcDOUmfVDfCK5MfdsaIoX9PkijPhjH3nYUw==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/code-frame': 7.18.6 + '@babel/parser': 7.18.8 + '@babel/types': 7.18.8 + dev: true + + /@babel/traverse/7.18.8: + resolution: {integrity: sha512-UNg/AcSySJYR/+mIcJQDCv00T+AqRO7j/ZEJLzpaYtgM48rMg5MnkJgyNqkzo88+p4tfRvZJCEiwwfG6h4jkRg==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/code-frame': 7.18.6 + '@babel/generator': 7.18.7 + '@babel/helper-environment-visitor': 7.18.6 + '@babel/helper-function-name': 7.18.6 + '@babel/helper-hoist-variables': 7.18.6 + '@babel/helper-split-export-declaration': 7.18.6 + '@babel/parser': 7.18.8 + '@babel/types': 7.18.8 + debug: 4.3.4 + globals: 11.12.0 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/types/7.18.8: + resolution: {integrity: sha512-qwpdsmraq0aJ3osLJRApsc2ouSJCdnMeZwB0DhbtHAtRpZNZCdlbRnHIgcRKzdE1g0iOGg644fzjOBcdOz9cPw==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/helper-validator-identifier': 7.18.6 + to-fast-properties: 2.0.0 + + /@iconify/json/2.1.78: + resolution: {integrity: sha512-WxWKgVEsnnj6pQVO9zOCYq3kIDIIbAv+RDxfU66DJw16BjKjXxcUyCDEARFRbqodSFd1p7o6Uw4zJ5iggItaQg==} + dependencies: + '@iconify/types': 1.1.0 + pathe: 0.3.2 + dev: true + + /@iconify/types/1.1.0: + resolution: {integrity: sha512-Jh0llaK2LRXQoYsorIH8maClebsnzTcve+7U3rQUSnC11X4jtPnFuyatqFLvMxZ8MLG8dB4zfHsbPfuvxluONw==} + dev: true + + /@iconify/utils/1.0.33: + resolution: {integrity: sha512-vGeAqo7aGPxOQmGdVoXFUOuyN+0V7Lcrx2EvaiRjxUD1x6Om0Tvq2bdm7E24l2Pz++4S0mWMCVFXe/17EtKImQ==} + dependencies: + '@antfu/install-pkg': 0.1.0 + '@antfu/utils': 0.5.2 + '@iconify/types': 1.1.0 + debug: 4.3.4 + kolorist: 1.5.1 + local-pkg: 0.4.2 + transitivePeerDependencies: + - supports-color + dev: true + + /@intlify/bundle-utils/3.1.0_vue-i18n@9.1.10: + resolution: {integrity: sha512-ghlJ0kR2cCQ8D+poKknC0Xx0ncOt3J3os7CcIAqqIWVF7k6AtGoCDnIru+YzlZcvFRNmP9wEZ7jKliojCdAWNg==} + engines: {node: '>= 12'} + peerDependencies: + petite-vue-i18n: '*' + vue-i18n: '*' + peerDependenciesMeta: + petite-vue-i18n: + optional: true + vue-i18n: + optional: true + dependencies: + '@intlify/message-compiler': 9.3.0-beta.1 + '@intlify/shared': 9.3.0-beta.1 + jsonc-eslint-parser: 1.4.1 + source-map: 0.6.1 + vue-i18n: 9.1.10_vue@3.2.37 + yaml-eslint-parser: 0.3.2 + dev: true + + /@intlify/core-base/9.1.10: + resolution: {integrity: sha512-So9CNUavB/IsZ+zBmk2Cv6McQp6vc2wbGi1S0XQmJ8Vz+UFcNn9MFXAe9gY67PreIHrbLsLxDD0cwo1qsxM1Nw==} + engines: {node: '>= 10'} + dependencies: + '@intlify/devtools-if': 9.1.10 + '@intlify/message-compiler': 9.1.10 + '@intlify/message-resolver': 9.1.10 + '@intlify/runtime': 9.1.10 + '@intlify/shared': 9.1.10 + '@intlify/vue-devtools': 9.1.10 + + /@intlify/devtools-if/9.1.10: + resolution: {integrity: sha512-SHaKoYu6sog3+Q8js1y3oXLywuogbH1sKuc7NSYkN3GElvXSBaMoCzW+we0ZSFqj/6c7vTNLg9nQ6rxhKqYwnQ==} + engines: {node: '>= 10'} + dependencies: + '@intlify/shared': 9.1.10 + + /@intlify/message-compiler/9.1.10: + resolution: {integrity: sha512-+JiJpXff/XTb0EadYwdxOyRTB0hXNd4n1HaJ/a4yuV960uRmPXaklJsedW0LNdcptd/hYUZtCkI7Lc9J5C1gxg==} + engines: {node: '>= 10'} + dependencies: + '@intlify/message-resolver': 9.1.10 + '@intlify/shared': 9.1.10 + source-map: 0.6.1 + + /@intlify/message-compiler/9.3.0-beta.1: + resolution: {integrity: sha512-XHjwJB7qJciYA3T19ehBFpcmC1z+R4sMS43fEp30CLOOFLsrB0xuk0V2XeOFsHovaQ2LsK5x0qk+5+Dy6Hs7fw==} + engines: {node: '>= 14'} + dependencies: + '@intlify/shared': 9.3.0-beta.1 + source-map: 0.6.1 + dev: true + + /@intlify/message-resolver/9.1.10: + resolution: {integrity: sha512-5YixMG/M05m0cn9+gOzd4EZQTFRUu8RGhzxJbR1DWN21x/Z3bJ8QpDYj6hC4FwBj5uKsRfKpJQ3Xqg98KWoA+w==} + engines: {node: '>= 10'} + + /@intlify/runtime/9.1.10: + resolution: {integrity: sha512-7QsuByNzpe3Gfmhwq6hzgXcMPpxz8Zxb/XFI6s9lQdPLPe5Lgw4U1ovRPZTOs6Y2hwitR3j/HD8BJNGWpJnOFA==} + engines: {node: '>= 10'} + dependencies: + '@intlify/message-compiler': 9.1.10 + '@intlify/message-resolver': 9.1.10 + '@intlify/shared': 9.1.10 + + /@intlify/shared/9.1.10: + resolution: {integrity: sha512-Om54xJeo1Vw+K1+wHYyXngE8cAbrxZHpWjYzMR9wCkqbhGtRV5VLhVc214Ze2YatPrWlS2WSMOWXR8JktX/IgA==} + engines: {node: '>= 10'} + + /@intlify/shared/9.3.0-beta.1: + resolution: {integrity: sha512-clf9EF4lY0sANjHlEndwfsR2hvYuq0TElq+gO/1xqH3FMGJwv+6lxJPOtoF4r2IE5RV3qX6YyZejZgdfbq2Yfg==} + engines: {node: '>= 14'} + dev: true + + /@intlify/vite-plugin-vue-i18n/5.0.0_vite@3.0.0+vue-i18n@9.1.10: + resolution: {integrity: sha512-49W7y2b0m6Cg6qGoBkjdNgxyzFx3iOSbnxvDaWcN65raaceJVuwCwxXX1SqJbjHTg32rpTFi4jSlroqAV9Rr0w==} + engines: {node: '>= 14.6'} + peerDependencies: + petite-vue-i18n: '*' + vite: ^2.9.0 || ^3.0.0 + vue-i18n: '*' + peerDependenciesMeta: + petite-vue-i18n: + optional: true + vite: + optional: true + vue-i18n: + optional: true + dependencies: + '@intlify/bundle-utils': 3.1.0_vue-i18n@9.1.10 + '@intlify/shared': 9.3.0-beta.1 + '@rollup/pluginutils': 4.2.1 + debug: 4.3.4 + fast-glob: 3.2.11 + source-map: 0.6.1 + vite: 3.0.0 + vue-i18n: 9.1.10_vue@3.2.37 + transitivePeerDependencies: + - supports-color + dev: true + + /@intlify/vue-devtools/9.1.10: + resolution: {integrity: sha512-5l3qYARVbkWAkagLu1XbDUWRJSL8br1Dj60wgMaKB0+HswVsrR6LloYZTg7ozyvM621V6+zsmwzbQxbVQyrytQ==} + engines: {node: '>= 10'} + dependencies: + '@intlify/message-resolver': 9.1.10 + '@intlify/runtime': 9.1.10 + '@intlify/shared': 9.1.10 + + /@jridgewell/gen-mapping/0.1.1: + resolution: {integrity: sha512-sQXCasFk+U8lWYEe66WxRDOE9PjVz4vSM51fTu3Hw+ClTpUSQb718772vH3pyS5pShp6lvQM7SxgIDXXXmOX7w==} + engines: {node: '>=6.0.0'} + dependencies: + '@jridgewell/set-array': 1.1.2 + '@jridgewell/sourcemap-codec': 1.4.14 + dev: true + + /@jridgewell/gen-mapping/0.3.2: + resolution: {integrity: sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A==} + engines: {node: '>=6.0.0'} + dependencies: + '@jridgewell/set-array': 1.1.2 + '@jridgewell/sourcemap-codec': 1.4.14 + '@jridgewell/trace-mapping': 0.3.14 + dev: true + + /@jridgewell/resolve-uri/3.1.0: + resolution: {integrity: sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w==} + engines: {node: '>=6.0.0'} + dev: true + + /@jridgewell/set-array/1.1.2: + resolution: {integrity: sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw==} + engines: {node: '>=6.0.0'} + dev: true + + /@jridgewell/source-map/0.3.2: + resolution: {integrity: sha512-m7O9o2uR8k2ObDysZYzdfhb08VuEml5oWGiosa1VdaPZ/A6QyPkAJuwN0Q1lhULOf6B7MtQmHENS743hWtCrgw==} + dependencies: + '@jridgewell/gen-mapping': 0.3.2 + '@jridgewell/trace-mapping': 0.3.14 + dev: true + + /@jridgewell/sourcemap-codec/1.4.14: + resolution: {integrity: sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw==} + dev: true + + /@jridgewell/trace-mapping/0.3.14: + resolution: {integrity: sha512-bJWEfQ9lPTvm3SneWwRFVLzrh6nhjwqw7TUFFBEMzwvg7t7PCDenf2lDwqo4NQXzdpgBXyFgDWnQA+2vkruksQ==} + dependencies: + '@jridgewell/resolve-uri': 3.1.0 + '@jridgewell/sourcemap-codec': 1.4.14 + dev: true + + /@nodelib/fs.scandir/2.1.5: + resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==} + engines: {node: '>= 8'} + dependencies: + '@nodelib/fs.stat': 2.0.5 + run-parallel: 1.2.0 + + /@nodelib/fs.stat/2.0.5: + resolution: {integrity: sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==} + engines: {node: '>= 8'} + + /@nodelib/fs.walk/1.2.8: + resolution: {integrity: sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==} + engines: {node: '>= 8'} + dependencies: + '@nodelib/fs.scandir': 2.1.5 + fastq: 1.13.0 + + /@rollup/plugin-babel/5.3.1_3crms4j33zkfeqv7ozcuia3hfq: + resolution: {integrity: sha512-WFfdLWU/xVWKeRQnKmIAQULUI7Il0gZnBIH/ZFO069wYIfPu+8zrfp/KMW0atmELoRDq8FbiP3VCss9MhCut7Q==} + engines: {node: '>= 10.0.0'} + peerDependencies: + '@babel/core': ^7.0.0 + '@types/babel__core': ^7.1.9 + rollup: ^1.20.0||^2.0.0 + peerDependenciesMeta: + '@types/babel__core': + optional: true + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-module-imports': 7.18.6 + '@rollup/pluginutils': 3.1.0_rollup@2.77.0 + rollup: 2.77.0 + dev: true + + /@rollup/plugin-node-resolve/11.2.1_rollup@2.77.0: + resolution: {integrity: sha512-yc2n43jcqVyGE2sqV5/YCmocy9ArjVAP/BeXyTtADTBBX6V0e5UMqwO8CdQ0kzjb6zu5P1qMzsScCMRvE9OlVg==} + engines: {node: '>= 10.0.0'} + peerDependencies: + rollup: ^1.20.0||^2.0.0 + dependencies: + '@rollup/pluginutils': 3.1.0_rollup@2.77.0 + '@types/resolve': 1.17.1 + builtin-modules: 3.3.0 + deepmerge: 4.2.2 + is-module: 1.0.0 + resolve: 1.22.1 + rollup: 2.77.0 + dev: true + + /@rollup/plugin-replace/2.4.2_rollup@2.77.0: + resolution: {integrity: sha512-IGcu+cydlUMZ5En85jxHH4qj2hta/11BHq95iHEyb2sbgiN0eCdzvUcHw5gt9pBL5lTi4JDYJ1acCoMGpTvEZg==} + peerDependencies: + rollup: ^1.20.0 || ^2.0.0 + dependencies: + '@rollup/pluginutils': 3.1.0_rollup@2.77.0 + magic-string: 0.25.9 + rollup: 2.77.0 + dev: true + + /@rollup/pluginutils/3.1.0_rollup@2.77.0: + resolution: {integrity: sha512-GksZ6pr6TpIjHm8h9lSQ8pi8BE9VeubNT0OMJ3B5uZJ8pz73NPiqOtCog/x2/QzM1ENChPKxMDhiQuRHsqc+lg==} + engines: {node: '>= 8.0.0'} + peerDependencies: + rollup: ^1.20.0||^2.0.0 + dependencies: + '@types/estree': 0.0.39 + estree-walker: 1.0.1 + picomatch: 2.3.1 + rollup: 2.77.0 + dev: true + + /@rollup/pluginutils/4.2.1: + resolution: {integrity: sha512-iKnFXr7NkdZAIHiIWE+BX5ULi/ucVFYWD6TbAV+rZctiRTY2PL6tsIKhoIOaoskiWAkgu+VsbXgUVDNLHf+InQ==} + engines: {node: '>= 8.0.0'} + dependencies: + estree-walker: 2.0.2 + picomatch: 2.3.1 + dev: true + + /@surma/rollup-plugin-off-main-thread/2.2.3: + resolution: {integrity: sha512-lR8q/9W7hZpMWweNiAKU7NQerBnzQQLvi8qnTDU/fxItPhtZVMbPV3lbCwjhIlNBe9Bbr5V+KHshvWmVSG9cxQ==} + dependencies: + ejs: 3.1.8 + json5: 2.2.1 + magic-string: 0.25.9 + string.prototype.matchall: 4.0.7 + dev: true + + /@tailwindcss/aspect-ratio/0.4.0_tailwindcss@3.1.6: + resolution: {integrity: sha512-WJu0I4PpqNPuutpaA9zDUq2JXR+lorZ7PbLcKNLmb6GL9/HLfC7w3CRsMhJF4BbYd/lkY6CfXOvkYpuGnZfkpQ==} + peerDependencies: + tailwindcss: '>=2.0.0 || >=3.0.0 || >=3.0.0-alpha.1' + dependencies: + tailwindcss: 3.1.6_postcss@8.4.14 + dev: false + + /@tailwindcss/forms/0.5.2_tailwindcss@3.1.6: + resolution: {integrity: sha512-pSrFeJB6Bg1Mrg9CdQW3+hqZXAKsBrSG9MAfFLKy1pVA4Mb4W7C0k7mEhlmS2Dfo/otxrQOET7NJiJ9RrS563w==} + peerDependencies: + tailwindcss: '>=3.0.0 || >= 3.0.0-alpha.1' + dependencies: + mini-svg-data-uri: 1.4.4 + tailwindcss: 3.1.6_postcss@8.4.14 + dev: false + + /@tailwindcss/typography/0.5.4_tailwindcss@3.1.6: + resolution: {integrity: sha512-QEdg40EmGvE7kKoDei8zr5sf4D1pIayHj4R31bH3lX8x2BtTiR+jNejYPOkhbmy3DXgkMF9jC8xqNiGFAuL9Sg==} + peerDependencies: + tailwindcss: '>=3.0.0 || insiders' + dependencies: + lodash.castarray: 4.4.0 + lodash.isplainobject: 4.0.6 + lodash.merge: 4.6.2 + tailwindcss: 3.1.6_postcss@8.4.14 + dev: false + + /@tootallnate/once/2.0.0: + resolution: {integrity: sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==} + engines: {node: '>= 10'} + dev: true + + /@types/chai-subset/1.3.3: + resolution: {integrity: sha512-frBecisrNGz+F4T6bcc+NLeolfiojh5FxW2klu669+8BARtyQv2C/GkNW6FUodVe4BroGMP/wER/YDGc7rEllw==} + dependencies: + '@types/chai': 4.3.1 + dev: true + + /@types/chai/4.3.1: + resolution: {integrity: sha512-/zPMqDkzSZ8t3VtxOa4KPq7uzzW978M9Tvh+j7GHKuo6k6GTLxPJ4J5gE5cjfJ26pnXst0N5Hax8Sr0T2Mi9zQ==} + dev: true + + /@types/estree/0.0.39: + resolution: {integrity: sha512-EYNwp3bU+98cpU4lAWYYL7Zz+2gryWH1qbdDTidVd6hkiR6weksdbMadyXKXNPEkQFhXM+hVO9ZygomHXp+AIw==} + dev: true + + /@types/node/18.0.4: + resolution: {integrity: sha512-M0+G6V0Y4YV8cqzHssZpaNCqvYwlCiulmm0PwpNLF55r/+cT8Ol42CHRU1SEaYFH2rTwiiE1aYg/2g2rrtGdPA==} + + /@types/resolve/1.17.1: + resolution: {integrity: sha512-yy7HuzQhj0dhGpD8RLXSZWEkLsV9ibvxvi6EiJ3bkqLAO1RGo0WbkWQiwpRlSFymTJRz0d3k5LM3kkx8ArDbLw==} + dependencies: + '@types/node': 18.0.4 + dev: true + + /@types/trusted-types/2.0.2: + resolution: {integrity: sha512-F5DIZ36YVLE+PN+Zwws4kJogq47hNgX3Nx6WyDJ3kcplxyke3XIzB8uK5n/Lpm1HBsbGzd6nmGehL8cPekP+Tg==} + + /@types/web-bluetooth/0.0.14: + resolution: {integrity: sha512-5d2RhCard1nQUC3aHcq/gHzWYO6K0WJmAbjO7mQJgCQKtZpgXxv1rOM6O/dBDhDYYVutk1sciOgNSe+5YyfM8A==} + + /@vitejs/plugin-vue/3.0.0_vite@3.0.0+vue@3.2.37: + resolution: {integrity: sha512-yWP34ArFh/jAeNUDkkLz/kVRLjf5ppJiq4L36f64Cp6dIrMQeYZGDP9xxdemlXfZR9ylN9JgHUl3GzfqOtgYDg==} + engines: {node: '>=14.18.0'} + peerDependencies: + vite: ^3.0.0 + vue: ^3.2.25 + dependencies: + vite: 3.0.0 + vue: 3.2.37 + dev: true + + /@volar/code-gen/0.38.5: + resolution: {integrity: sha512-GRGhPKoNtRwZyn9M0b2buobeMR1Aj9zxZI0osanLG9vB9YCnJov1myxKU8EJV5NobpyspLIv1X6/BEHLZNsKig==} + dependencies: + '@volar/source-map': 0.38.5 + dev: true + + /@volar/source-map/0.38.5: + resolution: {integrity: sha512-TyTLkOtAW/7qnl4Gabt4W4vcKPBPCBdaPLKwkMglKcaX70lPH2CIwZcPMJo6PAilbUVXcuX86xfgdncWDKKaZQ==} + dev: true + + /@volar/vue-code-gen/0.38.5: + resolution: {integrity: sha512-4t2bX2bCmmlyYwPLqfH3AJXj9Km79uRmCy81recc8LB8ZT2Z9hOFNBAnlGNcCeZAtVWtmHVV7sXJtQAJQxkWeg==} + dependencies: + '@volar/code-gen': 0.38.5 + '@volar/source-map': 0.38.5 + '@vue/compiler-core': 3.2.37 + '@vue/compiler-dom': 3.2.37 + '@vue/shared': 3.2.37 + dev: true + + /@volar/vue-typescript/0.38.5: + resolution: {integrity: sha512-Gobtg5gxR3bf/l1h300OWCWkvDQnOINgnxkDYiF8qdTBalW6qGsgGmq0uLBBDLaoahrfM7rqNd5QfJwGBgFXZg==} + dependencies: + '@volar/code-gen': 0.38.5 + '@volar/source-map': 0.38.5 + '@volar/vue-code-gen': 0.38.5 + '@vue/compiler-sfc': 3.2.37 + '@vue/reactivity': 3.2.37 + dev: true + + /@vue/compiler-core/3.2.37: + resolution: {integrity: sha512-81KhEjo7YAOh0vQJoSmAD68wLfYqJvoiD4ulyedzF+OEk/bk6/hx3fTNVfuzugIIaTrOx4PGx6pAiBRe5e9Zmg==} + dependencies: + '@babel/parser': 7.18.8 + '@vue/shared': 3.2.37 + estree-walker: 2.0.2 + source-map: 0.6.1 + + /@vue/compiler-dom/3.2.37: + resolution: {integrity: sha512-yxJLH167fucHKxaqXpYk7x8z7mMEnXOw3G2q62FTkmsvNxu4FQSu5+3UMb+L7fjKa26DEzhrmCxAgFLLIzVfqQ==} + dependencies: + '@vue/compiler-core': 3.2.37 + '@vue/shared': 3.2.37 + + /@vue/compiler-sfc/3.2.37: + resolution: {integrity: sha512-+7i/2+9LYlpqDv+KTtWhOZH+pa8/HnX/905MdVmAcI/mPQOBwkHHIzrsEsucyOIZQYMkXUiTkmZq5am/NyXKkg==} + dependencies: + '@babel/parser': 7.18.8 + '@vue/compiler-core': 3.2.37 + '@vue/compiler-dom': 3.2.37 + '@vue/compiler-ssr': 3.2.37 + '@vue/reactivity-transform': 3.2.37 + '@vue/shared': 3.2.37 + estree-walker: 2.0.2 + magic-string: 0.25.9 + postcss: 8.4.14 + source-map: 0.6.1 + + /@vue/compiler-ssr/3.2.37: + resolution: {integrity: sha512-7mQJD7HdXxQjktmsWp/J67lThEIcxLemz1Vb5I6rYJHR5vI+lON3nPGOH3ubmbvYGt8xEUaAr1j7/tIFWiEOqw==} + dependencies: + '@vue/compiler-dom': 3.2.37 + '@vue/shared': 3.2.37 + + /@vue/devtools-api/6.2.1: + resolution: {integrity: sha512-OEgAMeQXvCoJ+1x8WyQuVZzFo0wcyCmUR3baRVLmKBo1LmYZWMlRiXlux5jd0fqVJu6PfDbOrZItVqUEzLobeQ==} + + /@vue/reactivity-transform/3.2.37: + resolution: {integrity: sha512-IWopkKEb+8qpu/1eMKVeXrK0NLw9HicGviJzhJDEyfxTR9e1WtpnnbYkJWurX6WwoFP0sz10xQg8yL8lgskAZg==} + dependencies: + '@babel/parser': 7.18.8 + '@vue/compiler-core': 3.2.37 + '@vue/shared': 3.2.37 + estree-walker: 2.0.2 + magic-string: 0.25.9 + + /@vue/reactivity/3.2.37: + resolution: {integrity: sha512-/7WRafBOshOc6m3F7plwzPeCu/RCVv9uMpOwa/5PiY1Zz+WLVRWiy0MYKwmg19KBdGtFWsmZ4cD+LOdVPcs52A==} + dependencies: + '@vue/shared': 3.2.37 + + /@vue/runtime-core/3.2.37: + resolution: {integrity: sha512-JPcd9kFyEdXLl/i0ClS7lwgcs0QpUAWj+SKX2ZC3ANKi1U4DOtiEr6cRqFXsPwY5u1L9fAjkinIdB8Rz3FoYNQ==} + dependencies: + '@vue/reactivity': 3.2.37 + '@vue/shared': 3.2.37 + + /@vue/runtime-dom/3.2.37: + resolution: {integrity: sha512-HimKdh9BepShW6YozwRKAYjYQWg9mQn63RGEiSswMbW+ssIht1MILYlVGkAGGQbkhSh31PCdoUcfiu4apXJoPw==} + dependencies: + '@vue/runtime-core': 3.2.37 + '@vue/shared': 3.2.37 + csstype: 2.6.20 + + /@vue/server-renderer/3.2.37_vue@3.2.37: + resolution: {integrity: sha512-kLITEJvaYgZQ2h47hIzPh2K3jG8c1zCVbp/o/bzQOyvzaKiCquKS7AaioPI28GNxIsE/zSx+EwWYsNxDCX95MA==} + peerDependencies: + vue: 3.2.37 + dependencies: + '@vue/compiler-ssr': 3.2.37 + '@vue/shared': 3.2.37 + vue: 3.2.37 + + /@vue/shared/3.2.37: + resolution: {integrity: sha512-4rSJemR2NQIo9Klm1vabqWjD8rs/ZaJSzMxkMNeJS6lHiUjjUeYFbooN19NgFjztubEKh3WlZUeOLVdbbUWHsw==} + + /@vueuse/components/8.9.3_vue@3.2.37: + resolution: {integrity: sha512-7A97cUdJxwAESo1dJvIzxGW7Z8n5LGrLPOrQ9qgNGUKZlwVgBHJNiQ5KMddDDoqSwTVrLGspc1p8q8/+tYpHKA==} + dependencies: + '@vueuse/core': 8.9.3_vue@3.2.37 + '@vueuse/shared': 8.9.3_vue@3.2.37 + vue-demi: 0.13.4_vue@3.2.37 + transitivePeerDependencies: + - '@vue/composition-api' + - vue + dev: false + + /@vueuse/core/8.9.3_vue@3.2.37: + resolution: {integrity: sha512-q2pr3N7FPG7IBBhEXTYOJU+38VwKMLP5IfD33byzBV4Th7f1JHT4qPKvJrvr17knAefPRzNqgt9et+xFqaRlPQ==} + peerDependencies: + '@vue/composition-api': ^1.1.0 + vue: ^2.6.0 || ^3.2.0 + peerDependenciesMeta: + '@vue/composition-api': + optional: true + vue: + optional: true + dependencies: + '@types/web-bluetooth': 0.0.14 + '@vueuse/metadata': 8.9.3 + '@vueuse/shared': 8.9.3_vue@3.2.37 + vue: 3.2.37 + vue-demi: 0.13.4_vue@3.2.37 + + /@vueuse/head/0.7.6_vue@3.2.37: + resolution: {integrity: sha512-cOWqCkT3WiF5oEpw+VVEWUJd9RLD5rc7DmnFp3cePsejp+t7686uKD9Z9ZU7Twb7R/BI8iexKTmXo9D/F3v6UA==} + peerDependencies: + vue: '>=3' + dependencies: + vue: 3.2.37 + + /@vueuse/metadata/8.9.3: + resolution: {integrity: sha512-57gZZKtWAmcJaUBmciCohvmumVLz4+FnoVnWj7U5BWs5PC2/7gU9Z0/i1i9leDNeboAauFzAq7z1GjS8eYnT+w==} + + /@vueuse/shared/8.9.3_vue@3.2.37: + resolution: {integrity: sha512-foorYQAU3CGknAO1w9No/rpGBJmb7L74MPltnZAYxeBRfhsajjJYYgja+D5IT2vT+/a0NciISaVp3fDwMN1ocA==} + peerDependencies: + '@vue/composition-api': ^1.1.0 + vue: ^2.6.0 || ^3.2.0 + peerDependenciesMeta: + '@vue/composition-api': + optional: true + vue: + optional: true + dependencies: + vue: 3.2.37 + vue-demi: 0.13.4_vue@3.2.37 + + /abab/2.0.6: + resolution: {integrity: sha512-j2afSsaIENvHZN2B8GOpF566vZ5WVk5opAiMTvWgaQT8DkbOqsTfvNAvHoRGU2zzP8cPoqys+xHTRDWW8L+/BA==} + dev: true + + /accepts/1.3.8: + resolution: {integrity: sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==} + engines: {node: '>= 0.6'} + dependencies: + mime-types: 2.1.35 + negotiator: 0.6.3 + dev: true + + /acorn-globals/6.0.0: + resolution: {integrity: sha512-ZQl7LOWaF5ePqqcX4hLuv/bLXYQNfNWw2c0/yX/TsPRKamzHcTGQnlCjHT3TsmkOUVEPS3crCxiPfdzE/Trlhg==} + dependencies: + acorn: 7.4.1 + acorn-walk: 7.2.0 + dev: true + + /acorn-jsx/5.3.2_acorn@7.4.1: + resolution: {integrity: sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==} + peerDependencies: + acorn: ^6.0.0 || ^7.0.0 || ^8.0.0 + dependencies: + acorn: 7.4.1 + dev: true + + /acorn-node/1.8.2: + resolution: {integrity: sha512-8mt+fslDufLYntIoPAaIMUe/lrbrehIiwmR3t2k9LljIzoigEPF27eLk2hy8zSGzmR/ogr7zbRKINMo1u0yh5A==} + dependencies: + acorn: 7.4.1 + acorn-walk: 7.2.0 + xtend: 4.0.2 + dev: false + + /acorn-walk/7.2.0: + resolution: {integrity: sha512-OPdCF6GsMIP+Az+aWfAAOEt2/+iVDKE7oy6lJ098aoe59oAmK76qV6Gw60SbZ8jHuG2wH058GF4pLFbYamYrVA==} + engines: {node: '>=0.4.0'} + + /acorn/7.4.1: + resolution: {integrity: sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==} + engines: {node: '>=0.4.0'} + hasBin: true + + /acorn/8.7.1: + resolution: {integrity: sha512-Xx54uLJQZ19lKygFXOWsscKUbsBZW0CPykPhVQdhIeIwrbPmJzqeASDInc8nKBnp/JT6igTs82qPXz069H8I/A==} + engines: {node: '>=0.4.0'} + hasBin: true + dev: true + + /agent-base/6.0.2: + resolution: {integrity: sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==} + engines: {node: '>= 6.0.0'} + dependencies: + debug: 4.3.4 + transitivePeerDependencies: + - supports-color + dev: true + + /ajv/8.11.0: + resolution: {integrity: sha512-wGgprdCvMalC0BztXvitD2hC04YffAvtsUn93JbGXYLAtCUO4xd17mCCZQxUOItiBwZvJScWo8NIvQMQ71rdpg==} + dependencies: + fast-deep-equal: 3.1.3 + json-schema-traverse: 1.0.0 + require-from-string: 2.0.2 + uri-js: 4.4.1 + dev: true + + /ansi-regex/5.0.1: + resolution: {integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==} + engines: {node: '>=8'} + dev: true + + /ansi-styles/3.2.1: + resolution: {integrity: sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==} + engines: {node: '>=4'} + dependencies: + color-convert: 1.9.3 + dev: true + + /ansi-styles/4.3.0: + resolution: {integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==} + engines: {node: '>=8'} + dependencies: + color-convert: 2.0.1 + dev: true + + /anymatch/3.1.2: + resolution: {integrity: sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg==} + engines: {node: '>= 8'} + dependencies: + normalize-path: 3.0.0 + picomatch: 2.3.1 + + /appdata-path/1.0.0: + resolution: {integrity: sha512-ZbH3ezXfnT/YE3NdqduIt4lBV+H0ybvA2Qx3K76gIjQvh8gROpDFdDLpx6B1QJtW7zxisCbpTlCLhKqoR8cDBw==} + dev: true + + /arg/5.0.2: + resolution: {integrity: sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg==} + dev: false + + /array-flatten/1.1.1: + resolution: {integrity: sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=} + dev: true + + /assertion-error/1.1.0: + resolution: {integrity: sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw==} + dev: true + + /async/3.2.4: + resolution: {integrity: sha512-iAB+JbDEGXhyIUavoDl9WP/Jj106Kz9DEn1DPgYw5ruDn0e3Wgi3sKFm55sASdGBNOQB8F59d9qQ7deqrHA8wQ==} + dev: true + + /asynckit/0.4.0: + resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==} + dev: true + + /at-least-node/1.0.0: + resolution: {integrity: sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg==} + engines: {node: '>= 4.0.0'} + dev: true + + /autoprefixer/10.4.7_postcss@8.4.14: + resolution: {integrity: sha512-ypHju4Y2Oav95SipEcCcI5J7CGPuvz8oat7sUtYj3ClK44bldfvtvcxK6IEK++7rqB7YchDGzweZIBG+SD0ZAA==} + engines: {node: ^10 || ^12 || >=14} + hasBin: true + peerDependencies: + postcss: ^8.1.0 + dependencies: + browserslist: 4.21.2 + caniuse-lite: 1.0.30001366 + fraction.js: 4.2.0 + normalize-range: 0.1.2 + picocolors: 1.0.0 + postcss: 8.4.14 + postcss-value-parser: 4.2.0 + dev: false + + /babel-plugin-dynamic-import-node/2.3.3: + resolution: {integrity: sha512-jZVI+s9Zg3IqA/kdi0i6UDCybUI3aSBLnglhYbSSjKlV7yF1F/5LWv8MakQmvYpnbJDS6fcBL2KzHSxNCMtWSQ==} + dependencies: + object.assign: 4.1.2 + dev: true + + /babel-plugin-polyfill-corejs2/0.3.1_@babel+core@7.18.6: + resolution: {integrity: sha512-v7/T6EQcNfVLfcN2X8Lulb7DjprieyLWJK/zOWH5DUYcAgex9sP3h25Q+DLsX9TloXe3y1O8l2q2Jv9q8UVB9w==} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/compat-data': 7.18.8 + '@babel/core': 7.18.6 + '@babel/helper-define-polyfill-provider': 0.3.1_@babel+core@7.18.6 + semver: 6.3.0 + transitivePeerDependencies: + - supports-color + dev: true + + /babel-plugin-polyfill-corejs3/0.5.2_@babel+core@7.18.6: + resolution: {integrity: sha512-G3uJih0XWiID451fpeFaYGVuxHEjzKTHtc9uGFEjR6hHrvNzeS/PX+LLLcetJcytsB5m4j+K3o/EpXJNb/5IEQ==} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-define-polyfill-provider': 0.3.1_@babel+core@7.18.6 + core-js-compat: 3.23.4 + transitivePeerDependencies: + - supports-color + dev: true + + /babel-plugin-polyfill-regenerator/0.3.1_@babel+core@7.18.6: + resolution: {integrity: sha512-Y2B06tvgHYt1x0yz17jGkGeeMr5FeKUu+ASJ+N6nB5lQ8Dapfg42i0OVrf8PNGJ3zKL4A23snMi1IRwrqqND7A==} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.18.6 + '@babel/helper-define-polyfill-provider': 0.3.1_@babel+core@7.18.6 + transitivePeerDependencies: + - supports-color + dev: true + + /babel-runtime/6.26.0: + resolution: {integrity: sha512-ITKNuq2wKlW1fJg9sSW52eepoYgZBggvOAHC0u/CYu/qxQ9EVzThCgR69BnSXLHjy2f7SY5zaQ4yt7H9ZVxY2g==} + dependencies: + core-js: 2.6.12 + regenerator-runtime: 0.11.1 + dev: false + + /balanced-match/1.0.2: + resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==} + dev: true + + /binary-extensions/2.2.0: + resolution: {integrity: sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==} + engines: {node: '>=8'} + + /body-parser/1.20.0: + resolution: {integrity: sha512-DfJ+q6EPcGKZD1QWUjSpqp+Q7bDQTsQIF4zfUAtZ6qk+H/3/QRhg9CEp39ss+/T2vw0+HaidC0ecJj/DRLIaKg==} + engines: {node: '>= 0.8', npm: 1.2.8000 || >= 1.4.16} + dependencies: + bytes: 3.1.2 + content-type: 1.0.4 + debug: 2.6.9 + depd: 2.0.0 + destroy: 1.2.0 + http-errors: 2.0.0 + iconv-lite: 0.4.24 + on-finished: 2.4.1 + qs: 6.10.3 + raw-body: 2.5.1 + type-is: 1.6.18 + unpipe: 1.0.0 + transitivePeerDependencies: + - supports-color + dev: true + + /boolbase/1.0.0: + resolution: {integrity: sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==} + dev: true + + /brace-expansion/1.1.11: + resolution: {integrity: sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==} + dependencies: + balanced-match: 1.0.2 + concat-map: 0.0.1 + dev: true + + /brace-expansion/2.0.1: + resolution: {integrity: sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==} + dependencies: + balanced-match: 1.0.2 + dev: true + + /braces/3.0.2: + resolution: {integrity: sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==} + engines: {node: '>=8'} + dependencies: + fill-range: 7.0.1 + + /browser-process-hrtime/1.0.0: + resolution: {integrity: sha512-9o5UecI3GhkpM6DrXr69PblIuWxPKk9Y0jHBRhdocZ2y7YECBFCsHm79Pr3OyR2AvjhDkabFJaDJMYRazHgsow==} + dev: true + + /browserslist/4.21.2: + resolution: {integrity: sha512-MonuOgAtUB46uP5CezYbRaYKBNt2LxP0yX+Pmj4LkcDFGkn9Cbpi83d9sCjwQDErXsIJSzY5oKGDbgOlF/LPAA==} + engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7} + hasBin: true + dependencies: + caniuse-lite: 1.0.30001366 + electron-to-chromium: 1.4.191 + node-releases: 2.0.6 + update-browserslist-db: 1.0.4_browserslist@4.21.2 + + /buffer-from/1.1.2: + resolution: {integrity: sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==} + dev: true + + /builtin-modules/3.3.0: + resolution: {integrity: sha512-zhaCDicdLuWN5UbN5IMnFqNMhNfo919sH85y2/ea+5Yg9TsTkeZxpL+JLbp6cgYFS4sRLp3YV4S6yDuqVWHYOw==} + engines: {node: '>=6'} + dev: true + + /bytes/3.0.0: + resolution: {integrity: sha1-0ygVQE1olpn4Wk6k+odV3ROpYEg=} + engines: {node: '>= 0.8'} + dev: true + + /bytes/3.1.2: + resolution: {integrity: sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==} + engines: {node: '>= 0.8'} + dev: true + + /call-bind/1.0.2: + resolution: {integrity: sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==} + dependencies: + function-bind: 1.1.1 + get-intrinsic: 1.1.2 + dev: true + + /camel-case/3.0.0: + resolution: {integrity: sha512-+MbKztAYHXPr1jNTSKQF52VpcFjwY5RkR7fxksV8Doo4KAYc5Fl4UJRgthBbTmEx8C54DqahhbLJkDwjI3PI/w==} + dependencies: + no-case: 2.3.2 + upper-case: 1.1.3 + dev: true + + /camelcase-css/2.0.1: + resolution: {integrity: sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA==} + engines: {node: '>= 6'} + dev: false + + /caniuse-lite/1.0.30001366: + resolution: {integrity: sha512-yy7XLWCubDobokgzudpkKux8e0UOOnLHE6mlNJBzT3lZJz6s5atSEzjoL+fsCPkI0G8MP5uVdDx1ur/fXEWkZA==} + + /chai/4.3.6: + resolution: {integrity: sha512-bbcp3YfHCUzMOvKqsztczerVgBKSsEijCySNlHHbX3VG1nskvqjz5Rfso1gGwD6w6oOV3eI60pKuMOV5MV7p3Q==} + engines: {node: '>=4'} + dependencies: + assertion-error: 1.1.0 + check-error: 1.0.2 + deep-eql: 3.0.1 + get-func-name: 2.0.0 + loupe: 2.3.4 + pathval: 1.1.1 + type-detect: 4.0.8 + dev: true + + /chalk/2.4.2: + resolution: {integrity: sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==} + engines: {node: '>=4'} + dependencies: + ansi-styles: 3.2.1 + escape-string-regexp: 1.0.5 + supports-color: 5.5.0 + dev: true + + /chalk/4.1.2: + resolution: {integrity: sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==} + engines: {node: '>=10'} + dependencies: + ansi-styles: 4.3.0 + supports-color: 7.2.0 + dev: true + + /check-error/1.0.2: + resolution: {integrity: sha512-BrgHpW9NURQgzoNyjfq0Wu6VFO6D7IZEmJNdtgNqpzGG8RuNFHt2jQxWlAs4HMe119chBnv+34syEZtc6IhLtA==} + dev: true + + /chokidar/3.5.3: + resolution: {integrity: sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==} + engines: {node: '>= 8.10.0'} + dependencies: + anymatch: 3.1.2 + braces: 3.0.2 + glob-parent: 5.1.2 + is-binary-path: 2.1.0 + is-glob: 4.0.3 + normalize-path: 3.0.0 + readdirp: 3.6.0 + optionalDependencies: + fsevents: 2.3.2 + + /clean-css/4.2.4: + resolution: {integrity: sha512-EJUDT7nDVFDvaQgAo2G/PJvxmp1o/c6iXLbswsBbUFXi1Nr+AjA2cKmfbKDMjMvzEe75g3P6JkaDDAKk96A85A==} + engines: {node: '>= 4.0'} + dependencies: + source-map: 0.6.1 + dev: true + + /cliui/7.0.4: + resolution: {integrity: sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==} + dependencies: + string-width: 4.2.3 + strip-ansi: 6.0.1 + wrap-ansi: 7.0.0 + dev: true + + /color-convert/1.9.3: + resolution: {integrity: sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==} + dependencies: + color-name: 1.1.3 + dev: true + + /color-convert/2.0.1: + resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==} + engines: {node: '>=7.0.0'} + dependencies: + color-name: 1.1.4 + + /color-name/1.1.3: + resolution: {integrity: sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==} + dev: true + + /color-name/1.1.4: + resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==} + + /color-string/1.9.1: + resolution: {integrity: sha512-shrVawQFojnZv6xM40anx4CkoDP+fZsw/ZerEMsW/pyzsRbElpsL/DBVW7q3ExxwusdNXI3lXpuhEZkzs8p5Eg==} + dependencies: + color-name: 1.1.4 + simple-swizzle: 0.2.2 + dev: false + + /color/4.2.3: + resolution: {integrity: sha512-1rXeuUUiGGrykh+CeBdu5Ie7OJwinCgQY0bc7GCRxy5xVHy+moaqkpL/jqQq0MtQOeYcrqEz4abc5f0KtU7W4A==} + engines: {node: '>=12.5.0'} + dependencies: + color-convert: 2.0.1 + color-string: 1.9.1 + dev: false + + /combined-stream/1.0.8: + resolution: {integrity: sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==} + engines: {node: '>= 0.8'} + dependencies: + delayed-stream: 1.0.0 + dev: true + + /commander/2.20.3: + resolution: {integrity: sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==} + dev: true + + /common-tags/1.8.2: + resolution: {integrity: sha512-gk/Z852D2Wtb//0I+kRFNKKE9dIIVirjoqPoA1wJU+XePVXZfGeBpk45+A1rKO4Q43prqWBNY/MiIeRLbPWUaA==} + engines: {node: '>=4.0.0'} + dev: true + + /compressible/2.0.18: + resolution: {integrity: sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg==} + engines: {node: '>= 0.6'} + dependencies: + mime-db: 1.52.0 + dev: true + + /compression/1.7.4: + resolution: {integrity: sha512-jaSIDzP9pZVS4ZfQ+TzvtiWhdpFhE2RDHz8QJkpX9SIpLq88VueF5jJw6t+6CUQcAoA6t+x89MLrWAqpfDE8iQ==} + engines: {node: '>= 0.8.0'} + dependencies: + accepts: 1.3.8 + bytes: 3.0.0 + compressible: 2.0.18 + debug: 2.6.9 + on-headers: 1.0.2 + safe-buffer: 5.1.2 + vary: 1.1.2 + transitivePeerDependencies: + - supports-color + dev: true + + /concat-map/0.0.1: + resolution: {integrity: sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=} + dev: true + + /content-disposition/0.5.4: + resolution: {integrity: sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==} + engines: {node: '>= 0.6'} + dependencies: + safe-buffer: 5.2.1 + dev: true + + /content-type/1.0.4: + resolution: {integrity: sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==} + engines: {node: '>= 0.6'} + dev: true + + /convert-source-map/1.8.0: + resolution: {integrity: sha512-+OQdjP49zViI/6i7nIJpA8rAl4sV/JdPfU9nZs3VqOwGIgizICvuN2ru6fMd+4llL0tar18UYJXfZ/TWtmhUjA==} + dependencies: + safe-buffer: 5.1.2 + dev: true + + /cookie-signature/1.0.6: + resolution: {integrity: sha1-4wOogrNCzD7oylE6eZmXNNqzriw=} + dev: true + + /cookie/0.5.0: + resolution: {integrity: sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw==} + engines: {node: '>= 0.6'} + dev: true + + /core-js-compat/3.23.4: + resolution: {integrity: sha512-RkSRPe+JYEoflcsuxJWaiMPhnZoFS51FcIxm53k4KzhISCBTmaGlto9dTIrYuk0hnJc3G6pKufAKepHnBq6B6Q==} + dependencies: + browserslist: 4.21.2 + semver: 7.0.0 + dev: true + + /core-js/2.6.12: + resolution: {integrity: sha512-Kb2wC0fvsWfQrgk8HU5lW6U/Lcs8+9aaYcy4ZFc6DDlo4nZ7n70dEgE5rtR0oG6ufKDUnrwfWL1mXR5ljDatrQ==} + deprecated: core-js@<3.23.3 is no longer maintained and not recommended for usage due to the number of issues. Because of the V8 engine whims, feature detection in old core-js versions could cause a slowdown up to 100x even if nothing is polyfilled. Some versions have web compatibility issues. Please, upgrade your dependencies to the actual version of core-js. + requiresBuild: true + dev: false + + /core-util-is/1.0.3: + resolution: {integrity: sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==} + dev: true + + /cors/2.8.5: + resolution: {integrity: sha512-KIHbLJqu73RGr/hnbrO9uBeixNGuvSQjul/jdFvS/KFSIH1hWVd1ng7zOHx+YrEfInLG7q4n6GHQ9cDtxv/P6g==} + engines: {node: '>= 0.10'} + dependencies: + object-assign: 4.1.1 + vary: 1.1.2 + dev: true + + /critters/0.0.16: + resolution: {integrity: sha512-JwjgmO6i3y6RWtLYmXwO5jMd+maZt8Tnfu7VVISmEWyQqfLpB8soBswf8/2bu6SBXxtKA68Al3c+qIG1ApT68A==} + dependencies: + chalk: 4.1.2 + css-select: 4.3.0 + parse5: 6.0.1 + parse5-htmlparser2-tree-adapter: 6.0.1 + postcss: 8.4.14 + pretty-bytes: 5.6.0 + dev: true + + /cross-spawn/7.0.3: + resolution: {integrity: sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==} + engines: {node: '>= 8'} + dependencies: + path-key: 3.1.1 + shebang-command: 2.0.0 + which: 2.0.2 + dev: true + + /crypto-random-string/2.0.0: + resolution: {integrity: sha512-v1plID3y9r/lPhviJ1wrXpLeyUIGAZ2SHNYTEapm7/8A9nLPoyvVp3RK/EPFqn5kEznyWgYZNsRtYYIWbuG8KA==} + engines: {node: '>=8'} + dev: true + + /css-select/4.3.0: + resolution: {integrity: sha512-wPpOYtnsVontu2mODhA19JrqWxNsfdatRKd64kmpRbQgh1KtItko5sTnEpPdpSaJszTOhEMlF/RPz28qj4HqhQ==} + dependencies: + boolbase: 1.0.0 + css-what: 6.1.0 + domhandler: 4.3.1 + domutils: 2.8.0 + nth-check: 2.1.1 + dev: true + + /css-selector-tokenizer/0.8.0: + resolution: {integrity: sha512-Jd6Ig3/pe62/qe5SBPTN8h8LeUg/pT4lLgtavPf7updwwHpvFzxvOQBHYj2LZDMjUnBzgvIUSjRcf6oT5HzHFg==} + dependencies: + cssesc: 3.0.0 + fastparse: 1.1.2 + dev: false + + /css-what/6.1.0: + resolution: {integrity: sha512-HTUrgRJ7r4dsZKU6GjmpfRK1O76h97Z8MfS1G0FozR+oF2kG6Vfe8JE6zwrkbxigziPHinCJ+gCPjA9EaBDtRw==} + engines: {node: '>= 6'} + dev: true + + /cssesc/3.0.0: + resolution: {integrity: sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==} + engines: {node: '>=4'} + hasBin: true + dev: false + + /cssom/0.3.8: + resolution: {integrity: sha512-b0tGHbfegbhPJpxpiBPU2sCkigAqtM9O121le6bbOlgyV+NyGyCmVfJ6QW9eRjz8CpNfWEOYBIMIGRYkLwsIYg==} + dev: true + + /cssom/0.5.0: + resolution: {integrity: sha512-iKuQcq+NdHqlAcwUY0o/HL69XQrUaQdMjmStJ8JFmUaiiQErlhrmuigkg/CU4E2J0IyUKUrMAgl36TvN67MqTw==} + dev: true + + /cssstyle/2.3.0: + resolution: {integrity: sha512-AZL67abkUzIuvcHqk7c09cezpGNcxUxU4Ioi/05xHk4DQeTkWmGYftIE6ctU6AEt+Gn4n1lDStOtj7FKycP71A==} + engines: {node: '>=8'} + dependencies: + cssom: 0.3.8 + dev: true + + /csstype/2.6.20: + resolution: {integrity: sha512-/WwNkdXfckNgw6S5R125rrW8ez139lBHWouiBvX8dfMFtcn6V81REDqnH7+CRpRipfYlyU1CmOnOxrmGcFOjeA==} + + /daisyui/2.24.0_ugi4xkrfysqkt4c4y6hkyfj344: + resolution: {integrity: sha512-Fdu/4LCdTfWLWAbCuPxvnaRotEfJ+hVPgZ2kv/aUk9RZ00Yk8fGdJtIf0kXJ3IgUKOr8rCXUpfQY6DQU9usPCQ==} + peerDependencies: + autoprefixer: ^10.0.2 + postcss: ^8.1.6 + dependencies: + autoprefixer: 10.4.7_postcss@8.4.14 + color: 4.2.3 + css-selector-tokenizer: 0.8.0 + postcss: 8.4.14 + postcss-js: 4.0.0_postcss@8.4.14 + tailwindcss: 3.1.6_postcss@8.4.14 + transitivePeerDependencies: + - ts-node + dev: false + + /data-urls/3.0.2: + resolution: {integrity: sha512-Jy/tj3ldjZJo63sVAvg6LHt2mHvl4V6AgRAmNDtLdm7faqtsx+aJG42rsyCo9JCoRVKwPFzKlIPx3DIibwSIaQ==} + engines: {node: '>=12'} + dependencies: + abab: 2.0.6 + whatwg-mimetype: 3.0.0 + whatwg-url: 11.0.0 + dev: true + + /debug/2.6.9: + resolution: {integrity: sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==} + peerDependencies: + supports-color: '*' + peerDependenciesMeta: + supports-color: + optional: true + dependencies: + ms: 2.0.0 + dev: true + + /debug/4.3.4: + resolution: {integrity: sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==} + engines: {node: '>=6.0'} + peerDependencies: + supports-color: '*' + peerDependenciesMeta: + supports-color: + optional: true + dependencies: + ms: 2.1.2 + dev: true + + /decimal.js/10.3.1: + resolution: {integrity: sha512-V0pfhfr8suzyPGOx3nmq4aHqabehUZn6Ch9kyFpV79TGDTWFmHqUqXdabR7QHqxzrYolF4+tVmJhUG4OURg5dQ==} + dev: true + + /deep-eql/3.0.1: + resolution: {integrity: sha512-+QeIQyN5ZuO+3Uk5DYh6/1eKO0m0YmJFGNmFHGACpf1ClL1nmlV/p4gNgbl2pJGxgXb4faqo6UE+M5ACEMyVcw==} + engines: {node: '>=0.12'} + dependencies: + type-detect: 4.0.8 + dev: true + + /deep-is/0.1.4: + resolution: {integrity: sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==} + dev: true + + /deepmerge/4.2.2: + resolution: {integrity: sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg==} + engines: {node: '>=0.10.0'} + dev: true + + /define-properties/1.1.4: + resolution: {integrity: sha512-uckOqKcfaVvtBdsVkdPv3XjveQJsNQqmhXgRi8uhvWWuPYZCNlzT8qAyblUgNoXdHdjMTzAqeGjAoli8f+bzPA==} + engines: {node: '>= 0.4'} + dependencies: + has-property-descriptors: 1.0.0 + object-keys: 1.1.1 + dev: true + + /defined/1.0.0: + resolution: {integrity: sha512-Y2caI5+ZwS5c3RiNDJ6u53VhQHv+hHKwhkI1iHvceKUHw9Df6EK2zRLfjejRgMuCuxK7PfSWIMwWecceVvThjQ==} + dev: false + + /delayed-stream/1.0.0: + resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==} + engines: {node: '>=0.4.0'} + dev: true + + /depd/2.0.0: + resolution: {integrity: sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==} + engines: {node: '>= 0.8'} + dev: true + + /destroy/1.2.0: + resolution: {integrity: sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==} + engines: {node: '>= 0.8', npm: 1.2.8000 || >= 1.4.16} + dev: true + + /detect-node/2.1.0: + resolution: {integrity: sha512-T0NIuQpnTvFDATNuHN5roPwSBG83rFsuO+MXXH9/3N1eFbn4wcPjttvjMLEPWJ0RGUYgQE7cGgS3tNxbqCGM7g==} + dev: true + + /detective/5.2.1: + resolution: {integrity: sha512-v9XE1zRnz1wRtgurGu0Bs8uHKFSTdteYZNbIPFVhUZ39L/S79ppMpdmVOZAnoz1jfEFodc48n6MX483Xo3t1yw==} + engines: {node: '>=0.8.0'} + hasBin: true + dependencies: + acorn-node: 1.8.2 + defined: 1.0.0 + minimist: 1.2.6 + dev: false + + /didyoumean/1.2.2: + resolution: {integrity: sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw==} + dev: false + + /dlv/1.1.3: + resolution: {integrity: sha512-+HlytyjlPKnIG8XuRG8WvmBP8xs8P71y+SKKS6ZXWoEgLuePxtDoUEiH7WkdePWrQ5JBpE6aoVqfZfJUQkjXwA==} + dev: false + + /dom-serializer/1.4.1: + resolution: {integrity: sha512-VHwB3KfrcOOkelEG2ZOfxqLZdfkil8PtJi4P8N2MMXucZq2yLp75ClViUlOVwyoHEDjYU433Aq+5zWP61+RGag==} + dependencies: + domelementtype: 2.3.0 + domhandler: 4.3.1 + entities: 2.2.0 + dev: true + + /domelementtype/2.3.0: + resolution: {integrity: sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==} + dev: true + + /domexception/4.0.0: + resolution: {integrity: sha512-A2is4PLG+eeSfoTMA95/s4pvAoSo2mKtiM5jlHkAVewmiO8ISFTFKZjH7UAM1Atli/OT/7JHOrJRJiMKUZKYBw==} + engines: {node: '>=12'} + dependencies: + webidl-conversions: 7.0.0 + dev: true + + /domhandler/4.3.1: + resolution: {integrity: sha512-GrwoxYN+uWlzO8uhUXRl0P+kHE4GtVPfYzVLcUxPL7KNdHKj66vvlhiweIHqYYXWlw+T8iLMp42Lm67ghw4WMQ==} + engines: {node: '>= 4'} + dependencies: + domelementtype: 2.3.0 + dev: true + + /domutils/2.8.0: + resolution: {integrity: sha512-w96Cjofp72M5IIhpjgobBimYEfoPjx1Vx0BSX9P30WBdZW2WIKU0T1Bd0kz2eNZ9ikjKgHbEyKx8BB6H1L3h3A==} + dependencies: + dom-serializer: 1.4.1 + domelementtype: 2.3.0 + domhandler: 4.3.1 + dev: true + + /ee-first/1.1.1: + resolution: {integrity: sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0=} + dev: true + + /ejs/3.1.8: + resolution: {integrity: sha512-/sXZeMlhS0ArkfX2Aw780gJzXSMPnKjtspYZv+f3NiKLlubezAHDU5+9xz6gd3/NhG3txQCo6xlglmTS+oTGEQ==} + engines: {node: '>=0.10.0'} + hasBin: true + dependencies: + jake: 10.8.5 + dev: true + + /electron-to-chromium/1.4.191: + resolution: {integrity: sha512-MeEaiuoSFh4G+rrN+Ilm1KJr8pTTZloeLurcZ+PRcthvdK1gWThje+E6baL7/7LoNctrzCncavAG/j/vpES9jg==} + + /emoji-regex/8.0.0: + resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==} + dev: true + + /encodeurl/1.0.2: + resolution: {integrity: sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==} + engines: {node: '>= 0.8'} + dev: true + + /entities/2.2.0: + resolution: {integrity: sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A==} + dev: true + + /entities/4.3.1: + resolution: {integrity: sha512-o4q/dYJlmyjP2zfnaWDUC6A3BQFmVTX+tZPezK7k0GLSU9QYCauscf5Y+qcEPzKL+EixVouYDgLQK5H9GrLpkg==} + engines: {node: '>=0.12'} + dev: true + + /es-abstract/1.20.1: + resolution: {integrity: sha512-WEm2oBhfoI2sImeM4OF2zE2V3BYdSF+KnSi9Sidz51fQHd7+JuF8Xgcj9/0o+OWeIeIS/MiuNnlruQrJf16GQA==} + engines: {node: '>= 0.4'} + dependencies: + call-bind: 1.0.2 + es-to-primitive: 1.2.1 + function-bind: 1.1.1 + function.prototype.name: 1.1.5 + get-intrinsic: 1.1.2 + get-symbol-description: 1.0.0 + has: 1.0.3 + has-property-descriptors: 1.0.0 + has-symbols: 1.0.3 + internal-slot: 1.0.3 + is-callable: 1.2.4 + is-negative-zero: 2.0.2 + is-regex: 1.1.4 + is-shared-array-buffer: 1.0.2 + is-string: 1.0.7 + is-weakref: 1.0.2 + object-inspect: 1.12.2 + object-keys: 1.1.1 + object.assign: 4.1.2 + regexp.prototype.flags: 1.4.3 + string.prototype.trimend: 1.0.5 + string.prototype.trimstart: 1.0.5 + unbox-primitive: 1.0.2 + dev: true + + /es-to-primitive/1.2.1: + resolution: {integrity: sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==} + engines: {node: '>= 0.4'} + dependencies: + is-callable: 1.2.4 + is-date-object: 1.0.5 + is-symbol: 1.0.4 + dev: true + + /esbuild-android-64/0.14.49: + resolution: {integrity: sha512-vYsdOTD+yi+kquhBiFWl3tyxnj2qZJsl4tAqwhT90ktUdnyTizgle7TjNx6Ar1bN7wcwWqZ9QInfdk2WVagSww==} + engines: {node: '>=12'} + cpu: [x64] + os: [android] + requiresBuild: true + dev: true + optional: true + + /esbuild-android-arm64/0.14.49: + resolution: {integrity: sha512-g2HGr/hjOXCgSsvQZ1nK4nW/ei8JUx04Li74qub9qWrStlysaVmadRyTVuW32FGIpLQyc5sUjjZopj49eGGM2g==} + engines: {node: '>=12'} + cpu: [arm64] + os: [android] + requiresBuild: true + dev: true + optional: true + + /esbuild-darwin-64/0.14.49: + resolution: {integrity: sha512-3rvqnBCtX9ywso5fCHixt2GBCUsogNp9DjGmvbBohh31Ces34BVzFltMSxJpacNki96+WIcX5s/vum+ckXiLYg==} + engines: {node: '>=12'} + cpu: [x64] + os: [darwin] + requiresBuild: true + dev: true + optional: true + + /esbuild-darwin-arm64/0.14.49: + resolution: {integrity: sha512-XMaqDxO846srnGlUSJnwbijV29MTKUATmOLyQSfswbK/2X5Uv28M9tTLUJcKKxzoo9lnkYPsx2o8EJcTYwCs/A==} + engines: {node: '>=12'} + cpu: [arm64] + os: [darwin] + requiresBuild: true + dev: true + optional: true + + /esbuild-freebsd-64/0.14.49: + resolution: {integrity: sha512-NJ5Q6AjV879mOHFri+5lZLTp5XsO2hQ+KSJYLbfY9DgCu8s6/Zl2prWXVANYTeCDLlrIlNNYw8y34xqyLDKOmQ==} + engines: {node: '>=12'} + cpu: [x64] + os: [freebsd] + requiresBuild: true + dev: true + optional: true + + /esbuild-freebsd-arm64/0.14.49: + resolution: {integrity: sha512-lFLtgXnAc3eXYqj5koPlBZvEbBSOSUbWO3gyY/0+4lBdRqELyz4bAuamHvmvHW5swJYL7kngzIZw6kdu25KGOA==} + engines: {node: '>=12'} + cpu: [arm64] + os: [freebsd] + requiresBuild: true + dev: true + optional: true + + /esbuild-linux-32/0.14.49: + resolution: {integrity: sha512-zTTH4gr2Kb8u4QcOpTDVn7Z8q7QEIvFl/+vHrI3cF6XOJS7iEI1FWslTo3uofB2+mn6sIJEQD9PrNZKoAAMDiA==} + engines: {node: '>=12'} + cpu: [ia32] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /esbuild-linux-64/0.14.49: + resolution: {integrity: sha512-hYmzRIDzFfLrB5c1SknkxzM8LdEUOusp6M2TnuQZJLRtxTgyPnZZVtyMeCLki0wKgYPXkFsAVhi8vzo2mBNeTg==} + engines: {node: '>=12'} + cpu: [x64] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /esbuild-linux-arm/0.14.49: + resolution: {integrity: sha512-iE3e+ZVv1Qz1Sy0gifIsarJMQ89Rpm9mtLSRtG3AH0FPgAzQ5Z5oU6vYzhc/3gSPi2UxdCOfRhw2onXuFw/0lg==} + engines: {node: '>=12'} + cpu: [arm] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /esbuild-linux-arm64/0.14.49: + resolution: {integrity: sha512-KLQ+WpeuY+7bxukxLz5VgkAAVQxUv67Ft4DmHIPIW+2w3ObBPQhqNoeQUHxopoW/aiOn3m99NSmSV+bs4BSsdA==} + engines: {node: '>=12'} + cpu: [arm64] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /esbuild-linux-mips64le/0.14.49: + resolution: {integrity: sha512-n+rGODfm8RSum5pFIqFQVQpYBw+AztL8s6o9kfx7tjfK0yIGF6tm5HlG6aRjodiiKkH2xAiIM+U4xtQVZYU4rA==} + engines: {node: '>=12'} + cpu: [mips64el] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /esbuild-linux-ppc64le/0.14.49: + resolution: {integrity: sha512-WP9zR4HX6iCBmMFH+XHHng2LmdoIeUmBpL4aL2TR8ruzXyT4dWrJ5BSbT8iNo6THN8lod6GOmYDLq/dgZLalGw==} + engines: {node: '>=12'} + cpu: [ppc64] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /esbuild-linux-riscv64/0.14.49: + resolution: {integrity: sha512-h66ORBz+Dg+1KgLvzTVQEA1LX4XBd1SK0Fgbhhw4akpG/YkN8pS6OzYI/7SGENiN6ao5hETRDSkVcvU9NRtkMQ==} + engines: {node: '>=12'} + cpu: [riscv64] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /esbuild-linux-s390x/0.14.49: + resolution: {integrity: sha512-DhrUoFVWD+XmKO1y7e4kNCqQHPs6twz6VV6Uezl/XHYGzM60rBewBF5jlZjG0nCk5W/Xy6y1xWeopkrhFFM0sQ==} + engines: {node: '>=12'} + cpu: [s390x] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /esbuild-netbsd-64/0.14.49: + resolution: {integrity: sha512-BXaUwFOfCy2T+hABtiPUIpWjAeWK9P8O41gR4Pg73hpzoygVGnj0nI3YK4SJhe52ELgtdgWP/ckIkbn2XaTxjQ==} + engines: {node: '>=12'} + cpu: [x64] + os: [netbsd] + requiresBuild: true + dev: true + optional: true + + /esbuild-openbsd-64/0.14.49: + resolution: {integrity: sha512-lP06UQeLDGmVPw9Rg437Btu6J9/BmyhdoefnQ4gDEJTtJvKtQaUcOQrhjTq455ouZN4EHFH1h28WOJVANK41kA==} + engines: {node: '>=12'} + cpu: [x64] + os: [openbsd] + requiresBuild: true + dev: true + optional: true + + /esbuild-sunos-64/0.14.49: + resolution: {integrity: sha512-4c8Zowp+V3zIWje329BeLbGh6XI9c/rqARNaj5yPHdC61pHI9UNdDxT3rePPJeWcEZVKjkiAS6AP6kiITp7FSw==} + engines: {node: '>=12'} + cpu: [x64] + os: [sunos] + requiresBuild: true + dev: true + optional: true + + /esbuild-windows-32/0.14.49: + resolution: {integrity: sha512-q7Rb+J9yHTeKr9QTPDYkqfkEj8/kcKz9lOabDuvEXpXuIcosWCJgo5Z7h/L4r7rbtTH4a8U2FGKb6s1eeOHmJA==} + engines: {node: '>=12'} + cpu: [ia32] + os: [win32] + requiresBuild: true + dev: true + optional: true + + /esbuild-windows-64/0.14.49: + resolution: {integrity: sha512-+Cme7Ongv0UIUTniPqfTX6mJ8Deo7VXw9xN0yJEN1lQMHDppTNmKwAM3oGbD/Vqff+07K2gN0WfNkMohmG+dVw==} + engines: {node: '>=12'} + cpu: [x64] + os: [win32] + requiresBuild: true + dev: true + optional: true + + /esbuild-windows-arm64/0.14.49: + resolution: {integrity: sha512-v+HYNAXzuANrCbbLFJ5nmO3m5y2PGZWLe3uloAkLt87aXiO2mZr3BTmacZdjwNkNEHuH3bNtN8cak+mzVjVPfA==} + engines: {node: '>=12'} + cpu: [arm64] + os: [win32] + requiresBuild: true + dev: true + optional: true + + /esbuild/0.14.49: + resolution: {integrity: sha512-/TlVHhOaq7Yz8N1OJrjqM3Auzo5wjvHFLk+T8pIue+fhnhIMpfAzsG6PLVMbFveVxqD2WOp3QHei+52IMUNmCw==} + engines: {node: '>=12'} + hasBin: true + requiresBuild: true + optionalDependencies: + esbuild-android-64: 0.14.49 + esbuild-android-arm64: 0.14.49 + esbuild-darwin-64: 0.14.49 + esbuild-darwin-arm64: 0.14.49 + esbuild-freebsd-64: 0.14.49 + esbuild-freebsd-arm64: 0.14.49 + esbuild-linux-32: 0.14.49 + esbuild-linux-64: 0.14.49 + esbuild-linux-arm: 0.14.49 + esbuild-linux-arm64: 0.14.49 + esbuild-linux-mips64le: 0.14.49 + esbuild-linux-ppc64le: 0.14.49 + esbuild-linux-riscv64: 0.14.49 + esbuild-linux-s390x: 0.14.49 + esbuild-netbsd-64: 0.14.49 + esbuild-openbsd-64: 0.14.49 + esbuild-sunos-64: 0.14.49 + esbuild-windows-32: 0.14.49 + esbuild-windows-64: 0.14.49 + esbuild-windows-arm64: 0.14.49 + dev: true + + /escalade/3.1.1: + resolution: {integrity: sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==} + engines: {node: '>=6'} + + /escape-html/1.0.3: + resolution: {integrity: sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==} + dev: true + + /escape-string-regexp/1.0.5: + resolution: {integrity: sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==} + engines: {node: '>=0.8.0'} + dev: true + + /escape-string-regexp/5.0.0: + resolution: {integrity: sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==} + engines: {node: '>=12'} + dev: true + + /escodegen/2.0.0: + resolution: {integrity: sha512-mmHKys/C8BFUGI+MAWNcSYoORYLMdPzjrknd2Vc+bUsjN5bXcr8EhrNB+UTqfL1y3I9c4fw2ihgtMPQLBRiQxw==} + engines: {node: '>=6.0'} + hasBin: true + dependencies: + esprima: 4.0.1 + estraverse: 5.3.0 + esutils: 2.0.3 + optionator: 0.8.3 + optionalDependencies: + source-map: 0.6.1 + dev: true + + /eslint-utils/2.1.0: + resolution: {integrity: sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg==} + engines: {node: '>=6'} + dependencies: + eslint-visitor-keys: 1.3.0 + dev: true + + /eslint-visitor-keys/1.3.0: + resolution: {integrity: sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==} + engines: {node: '>=4'} + dev: true + + /espree/6.2.1: + resolution: {integrity: sha512-ysCxRQY3WaXJz9tdbWOwuWr5Y/XrPTGX9Kiz3yoUXwW0VZ4w30HTkQLaGx/+ttFjF8i+ACbArnB4ce68a9m5hw==} + engines: {node: '>=6.0.0'} + dependencies: + acorn: 7.4.1 + acorn-jsx: 5.3.2_acorn@7.4.1 + eslint-visitor-keys: 1.3.0 + dev: true + + /esprima/4.0.1: + resolution: {integrity: sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==} + engines: {node: '>=4'} + hasBin: true + dev: true + + /estraverse/5.3.0: + resolution: {integrity: sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==} + engines: {node: '>=4.0'} + dev: true + + /estree-walker/1.0.1: + resolution: {integrity: sha512-1fMXF3YP4pZZVozF8j/ZLfvnR8NSIljt56UhbZ5PeeDmmGHpgpdwQt7ITlGvYaQukCvuBRMLEiKiYC+oeIg4cg==} + dev: true + + /estree-walker/2.0.2: + resolution: {integrity: sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==} + + /esutils/2.0.3: + resolution: {integrity: sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==} + engines: {node: '>=0.10.0'} + dev: true + + /etag/1.8.1: + resolution: {integrity: sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==} + engines: {node: '>= 0.6'} + dev: true + + /eventemitter3/4.0.7: + resolution: {integrity: sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==} + dev: true + + /execa/5.1.1: + resolution: {integrity: sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==} + engines: {node: '>=10'} + dependencies: + cross-spawn: 7.0.3 + get-stream: 6.0.1 + human-signals: 2.1.0 + is-stream: 2.0.1 + merge-stream: 2.0.0 + npm-run-path: 4.0.1 + onetime: 5.1.2 + signal-exit: 3.0.7 + strip-final-newline: 2.0.0 + dev: true + + /express/4.18.1: + resolution: {integrity: sha512-zZBcOX9TfehHQhtupq57OF8lFZ3UZi08Y97dwFCkD8p9d/d2Y3M+ykKcwaMDEL+4qyUolgBDX6AblpR3fL212Q==} + engines: {node: '>= 0.10.0'} + dependencies: + accepts: 1.3.8 + array-flatten: 1.1.1 + body-parser: 1.20.0 + content-disposition: 0.5.4 + content-type: 1.0.4 + cookie: 0.5.0 + cookie-signature: 1.0.6 + debug: 2.6.9 + depd: 2.0.0 + encodeurl: 1.0.2 + escape-html: 1.0.3 + etag: 1.8.1 + finalhandler: 1.2.0 + fresh: 0.5.2 + http-errors: 2.0.0 + merge-descriptors: 1.0.1 + methods: 1.1.2 + on-finished: 2.4.1 + parseurl: 1.3.3 + path-to-regexp: 0.1.7 + proxy-addr: 2.0.7 + qs: 6.10.3 + range-parser: 1.2.1 + safe-buffer: 5.2.1 + send: 0.18.0 + serve-static: 1.15.0 + setprototypeof: 1.2.0 + statuses: 2.0.1 + type-is: 1.6.18 + utils-merge: 1.0.1 + vary: 1.1.2 + transitivePeerDependencies: + - supports-color + dev: true + + /fast-deep-equal/3.1.3: + resolution: {integrity: sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==} + dev: true + + /fast-glob/3.2.11: + resolution: {integrity: sha512-xrO3+1bxSo3ZVHAnqzyuewYT6aMFHRAd4Kcs92MAonjwQZLsK9d0SF1IyQ3k5PoirxTW0Oe/RqFgMQ6TcNE5Ew==} + engines: {node: '>=8.6.0'} + dependencies: + '@nodelib/fs.stat': 2.0.5 + '@nodelib/fs.walk': 1.2.8 + glob-parent: 5.1.2 + merge2: 1.4.1 + micromatch: 4.0.5 + + /fast-json-stable-stringify/2.1.0: + resolution: {integrity: sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==} + dev: true + + /fast-levenshtein/2.0.6: + resolution: {integrity: sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==} + dev: true + + /fastparse/1.1.2: + resolution: {integrity: sha512-483XLLxTVIwWK3QTrMGRqUfUpoOs/0hbQrl2oz4J0pAcm3A3bu84wxTFqGqkJzewCLdME38xJLJAxBABfQT8sQ==} + dev: false + + /fastq/1.13.0: + resolution: {integrity: sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw==} + dependencies: + reusify: 1.0.4 + + /filelist/1.0.4: + resolution: {integrity: sha512-w1cEuf3S+DrLCQL7ET6kz+gmlJdbq9J7yXCSjK/OZCPA+qEN1WyF4ZAf0YYJa4/shHJra2t/d/r8SV4Ji+x+8Q==} + dependencies: + minimatch: 5.1.0 + dev: true + + /fill-range/7.0.1: + resolution: {integrity: sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==} + engines: {node: '>=8'} + dependencies: + to-regex-range: 5.0.1 + + /finalhandler/1.2.0: + resolution: {integrity: sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg==} + engines: {node: '>= 0.8'} + dependencies: + debug: 2.6.9 + encodeurl: 1.0.2 + escape-html: 1.0.3 + on-finished: 2.4.1 + parseurl: 1.3.3 + statuses: 2.0.1 + unpipe: 1.0.0 + transitivePeerDependencies: + - supports-color + dev: true + + /find-up/5.0.0: + resolution: {integrity: sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==} + engines: {node: '>=10'} + dependencies: + locate-path: 6.0.0 + path-exists: 4.0.0 + dev: true + + /form-data/4.0.0: + resolution: {integrity: sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==} + engines: {node: '>= 6'} + dependencies: + asynckit: 0.4.0 + combined-stream: 1.0.8 + mime-types: 2.1.35 + dev: true + + /forwarded/0.2.0: + resolution: {integrity: sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==} + engines: {node: '>= 0.6'} + dev: true + + /fraction.js/4.2.0: + resolution: {integrity: sha512-MhLuK+2gUcnZe8ZHlaaINnQLl0xRIGRfcGk2yl8xoQAfHrSsL3rYu6FCmBdkdbhc9EPlwyGHewaRsvwRMJtAlA==} + dev: false + + /fresh/0.5.2: + resolution: {integrity: sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac=} + engines: {node: '>= 0.6'} + dev: true + + /fs-extra/10.1.0: + resolution: {integrity: sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ==} + engines: {node: '>=12'} + dependencies: + graceful-fs: 4.2.10 + jsonfile: 6.1.0 + universalify: 2.0.0 + dev: true + + /fs-extra/9.1.0: + resolution: {integrity: sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ==} + engines: {node: '>=10'} + dependencies: + at-least-node: 1.0.0 + graceful-fs: 4.2.10 + jsonfile: 6.1.0 + universalify: 2.0.0 + dev: true + + /fs.realpath/1.0.0: + resolution: {integrity: sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==} + dev: true + + /fsevents/2.3.2: + resolution: {integrity: sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==} + engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} + os: [darwin] + requiresBuild: true + optional: true + + /function-bind/1.1.1: + resolution: {integrity: sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==} + + /function.prototype.name/1.1.5: + resolution: {integrity: sha512-uN7m/BzVKQnCUF/iW8jYea67v++2u7m5UgENbHRtdDVclOUP+FMPlCNdmk0h/ysGyo2tavMJEDqJAkJdRa1vMA==} + engines: {node: '>= 0.4'} + dependencies: + call-bind: 1.0.2 + define-properties: 1.1.4 + es-abstract: 1.20.1 + functions-have-names: 1.2.3 + dev: true + + /functions-have-names/1.2.3: + resolution: {integrity: sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ==} + dev: true + + /gensync/1.0.0-beta.2: + resolution: {integrity: sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==} + engines: {node: '>=6.9.0'} + dev: true + + /get-caller-file/2.0.5: + resolution: {integrity: sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==} + engines: {node: 6.* || 8.* || >= 10.*} + dev: true + + /get-func-name/2.0.0: + resolution: {integrity: sha512-Hm0ixYtaSZ/V7C8FJrtZIuBBI+iSgL+1Aq82zSu8VQNB4S3Gk8e7Qs3VwBDJAhmRZcFqkl3tQu36g/Foh5I5ig==} + dev: true + + /get-intrinsic/1.1.2: + resolution: {integrity: sha512-Jfm3OyCxHh9DJyc28qGk+JmfkpO41A4XkneDSujN9MDXrm4oDKdHvndhZ2dN94+ERNfkYJWDclW6k2L/ZGHjXA==} + dependencies: + function-bind: 1.1.1 + has: 1.0.3 + has-symbols: 1.0.3 + dev: true + + /get-own-enumerable-property-symbols/3.0.2: + resolution: {integrity: sha512-I0UBV/XOz1XkIJHEUDMZAbzCThU/H8DxmSfmdGcKPnVhu2VfFqr34jr9777IyaTYvxjedWhqVIilEDsCdP5G6g==} + dev: true + + /get-stream/6.0.1: + resolution: {integrity: sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==} + engines: {node: '>=10'} + dev: true + + /get-symbol-description/1.0.0: + resolution: {integrity: sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw==} + engines: {node: '>= 0.4'} + dependencies: + call-bind: 1.0.2 + get-intrinsic: 1.1.2 + dev: true + + /glob-parent/5.1.2: + resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==} + engines: {node: '>= 6'} + dependencies: + is-glob: 4.0.3 + + /glob-parent/6.0.2: + resolution: {integrity: sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==} + engines: {node: '>=10.13.0'} + dependencies: + is-glob: 4.0.3 + dev: false + + /glob/7.2.3: + resolution: {integrity: sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==} + dependencies: + fs.realpath: 1.0.0 + inflight: 1.0.6 + inherits: 2.0.4 + minimatch: 3.1.2 + once: 1.4.0 + path-is-absolute: 1.0.1 + dev: true + + /globals/11.12.0: + resolution: {integrity: sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==} + engines: {node: '>=4'} + dev: true + + /graceful-fs/4.2.10: + resolution: {integrity: sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==} + dev: true + + /handle-thing/2.0.1: + resolution: {integrity: sha512-9Qn4yBxelxoh2Ow62nP+Ka/kMnOXRi8BXnRaUwezLNhqelnN49xKz4F/dPP8OYLxLxq6JDtZb2i9XznUQbNPTg==} + dev: true + + /has-bigints/1.0.2: + resolution: {integrity: sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ==} + dev: true + + /has-flag/3.0.0: + resolution: {integrity: sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==} + engines: {node: '>=4'} + dev: true + + /has-flag/4.0.0: + resolution: {integrity: sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==} + engines: {node: '>=8'} + dev: true + + /has-property-descriptors/1.0.0: + resolution: {integrity: sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ==} + dependencies: + get-intrinsic: 1.1.2 + dev: true + + /has-symbols/1.0.3: + resolution: {integrity: sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==} + engines: {node: '>= 0.4'} + dev: true + + /has-tostringtag/1.0.0: + resolution: {integrity: sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ==} + engines: {node: '>= 0.4'} + dependencies: + has-symbols: 1.0.3 + dev: true + + /has/1.0.3: + resolution: {integrity: sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==} + engines: {node: '>= 0.4.0'} + dependencies: + function-bind: 1.1.1 + + /he/1.2.0: + resolution: {integrity: sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==} + hasBin: true + dev: true + + /hpack.js/2.1.6: + resolution: {integrity: sha512-zJxVehUdMGIKsRaNt7apO2Gqp0BdqW5yaiGHXXmbpvxgBYVZnAql+BJb4RO5ad2MgpbZKn5G6nMnegrH1FcNYQ==} + dependencies: + inherits: 2.0.4 + obuf: 1.1.2 + readable-stream: 2.3.7 + wbuf: 1.7.3 + dev: true + + /html-encoding-sniffer/3.0.0: + resolution: {integrity: sha512-oWv4T4yJ52iKrufjnyZPkrN0CH3QnrUqdB6In1g5Fe1mia8GmF36gnfNySxoZtxD5+NmYw1EElVXiBk93UeskA==} + engines: {node: '>=12'} + dependencies: + whatwg-encoding: 2.0.0 + dev: true + + /html-minifier/4.0.0: + resolution: {integrity: sha512-aoGxanpFPLg7MkIl/DDFYtb0iWz7jMFGqFhvEDZga6/4QTjneiD8I/NXL1x5aaoCp7FSIT6h/OhykDdPsbtMig==} + engines: {node: '>=6'} + hasBin: true + dependencies: + camel-case: 3.0.0 + clean-css: 4.2.4 + commander: 2.20.3 + he: 1.2.0 + param-case: 2.1.1 + relateurl: 0.2.7 + uglify-js: 3.16.2 + dev: true + + /html5parser/2.0.2: + resolution: {integrity: sha512-L0y+IdTVxHsovmye8MBtFgBvWZnq1C9WnI/SmJszxoQjmUH1psX2uzDk21O5k5et6udxdGjwxkbmT9eVRoG05w==} + dependencies: + tslib: 2.4.0 + dev: true + + /http-deceiver/1.2.7: + resolution: {integrity: sha512-LmpOGxTfbpgtGVxJrj5k7asXHCgNZp5nLfp+hWc8QQRqtb7fUy6kRY3BO1h9ddF6yIPYUARgxGOwB42DnxIaNw==} + dev: true + + /http-errors/2.0.0: + resolution: {integrity: sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==} + engines: {node: '>= 0.8'} + dependencies: + depd: 2.0.0 + inherits: 2.0.4 + setprototypeof: 1.2.0 + statuses: 2.0.1 + toidentifier: 1.0.1 + dev: true + + /http-proxy-agent/5.0.0: + resolution: {integrity: sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w==} + engines: {node: '>= 6'} + dependencies: + '@tootallnate/once': 2.0.0 + agent-base: 6.0.2 + debug: 4.3.4 + transitivePeerDependencies: + - supports-color + dev: true + + /https-localhost/4.7.1: + resolution: {integrity: sha512-rl+NFV0l67/0W7fZwk4LB5gS6HdhtSFLpCpf1N+KD5WQAXtPXX1QE8H0cP8VNJii18rtpTkE9eAHdUfJ0goAnQ==} + hasBin: true + dependencies: + appdata-path: 1.0.0 + compression: 1.7.4 + cors: 2.8.5 + express: 4.18.1 + spdy: 4.0.2 + uglify-js: 3.16.2 + transitivePeerDependencies: + - supports-color + dev: true + + /https-proxy-agent/5.0.1: + resolution: {integrity: sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==} + engines: {node: '>= 6'} + dependencies: + agent-base: 6.0.2 + debug: 4.3.4 + transitivePeerDependencies: + - supports-color + dev: true + + /human-signals/2.1.0: + resolution: {integrity: sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==} + engines: {node: '>=10.17.0'} + dev: true + + /iconv-lite/0.4.24: + resolution: {integrity: sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==} + engines: {node: '>=0.10.0'} + dependencies: + safer-buffer: 2.1.2 + dev: true + + /iconv-lite/0.6.3: + resolution: {integrity: sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==} + engines: {node: '>=0.10.0'} + dependencies: + safer-buffer: 2.1.2 + dev: true + + /idb/6.1.5: + resolution: {integrity: sha512-IJtugpKkiVXQn5Y+LteyBCNk1N8xpGV3wWZk9EVtZWH8DYkjBn0bX1XnGP9RkyZF0sAcywa6unHqSWKe7q4LGw==} + dev: true + + /inflight/1.0.6: + resolution: {integrity: sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==} + dependencies: + once: 1.4.0 + wrappy: 1.0.2 + dev: true + + /inherits/2.0.4: + resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==} + dev: true + + /internal-slot/1.0.3: + resolution: {integrity: sha512-O0DB1JC/sPyZl7cIo78n5dR7eUSwwpYPiXRhTzNxZVAMUuB8vlnRFyLxdrVToks6XPLVnFfbzaVd5WLjhgg+vA==} + engines: {node: '>= 0.4'} + dependencies: + get-intrinsic: 1.1.2 + has: 1.0.3 + side-channel: 1.0.4 + dev: true + + /ipaddr.js/1.9.1: + resolution: {integrity: sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==} + engines: {node: '>= 0.10'} + dev: true + + /is-arrayish/0.3.2: + resolution: {integrity: sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ==} + dev: false + + /is-bigint/1.0.4: + resolution: {integrity: sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg==} + dependencies: + has-bigints: 1.0.2 + dev: true + + /is-binary-path/2.1.0: + resolution: {integrity: sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==} + engines: {node: '>=8'} + dependencies: + binary-extensions: 2.2.0 + + /is-boolean-object/1.1.2: + resolution: {integrity: sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA==} + engines: {node: '>= 0.4'} + dependencies: + call-bind: 1.0.2 + has-tostringtag: 1.0.0 + dev: true + + /is-callable/1.2.4: + resolution: {integrity: sha512-nsuwtxZfMX67Oryl9LCQ+upnC0Z0BgpwntpS89m1H/TLF0zNfzfLMV/9Wa/6MZsj0acpEjAO0KF1xT6ZdLl95w==} + engines: {node: '>= 0.4'} + dev: true + + /is-core-module/2.9.0: + resolution: {integrity: sha512-+5FPy5PnwmO3lvfMb0AsoPaBG+5KHUI0wYFXOtYPnVVVspTFUuMZNfNaNVRt3FZadstu2c8x23vykRW/NBoU6A==} + dependencies: + has: 1.0.3 + + /is-date-object/1.0.5: + resolution: {integrity: sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ==} + engines: {node: '>= 0.4'} + dependencies: + has-tostringtag: 1.0.0 + dev: true + + /is-extglob/2.1.1: + resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==} + engines: {node: '>=0.10.0'} + + /is-fullwidth-code-point/3.0.0: + resolution: {integrity: sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==} + engines: {node: '>=8'} + dev: true + + /is-glob/4.0.3: + resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==} + engines: {node: '>=0.10.0'} + dependencies: + is-extglob: 2.1.1 + + /is-module/1.0.0: + resolution: {integrity: sha512-51ypPSPCoTEIN9dy5Oy+h4pShgJmPCygKfyRCISBI+JoWT/2oJvK8QPxmwv7b/p239jXrm9M1mlQbyKJ5A152g==} + dev: true + + /is-negative-zero/2.0.2: + resolution: {integrity: sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA==} + engines: {node: '>= 0.4'} + dev: true + + /is-number-object/1.0.7: + resolution: {integrity: sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ==} + engines: {node: '>= 0.4'} + dependencies: + has-tostringtag: 1.0.0 + dev: true + + /is-number/7.0.0: + resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==} + engines: {node: '>=0.12.0'} + + /is-obj/1.0.1: + resolution: {integrity: sha512-l4RyHgRqGN4Y3+9JHVrNqO+tN0rV5My76uW5/nuO4K1b6vw5G8d/cmFjP9tRfEsdhZNt0IFdZuK/c2Vr4Nb+Qg==} + engines: {node: '>=0.10.0'} + dev: true + + /is-potential-custom-element-name/1.0.1: + resolution: {integrity: sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ==} + dev: true + + /is-regex/1.1.4: + resolution: {integrity: sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==} + engines: {node: '>= 0.4'} + dependencies: + call-bind: 1.0.2 + has-tostringtag: 1.0.0 + dev: true + + /is-regexp/1.0.0: + resolution: {integrity: sha512-7zjFAPO4/gwyQAAgRRmqeEeyIICSdmCqa3tsVHMdBzaXXRiqopZL4Cyghg/XulGWrtABTpbnYYzzIRffLkP4oA==} + engines: {node: '>=0.10.0'} + dev: true + + /is-shared-array-buffer/1.0.2: + resolution: {integrity: sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA==} + dependencies: + call-bind: 1.0.2 + dev: true + + /is-stream/2.0.1: + resolution: {integrity: sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==} + engines: {node: '>=8'} + dev: true + + /is-string/1.0.7: + resolution: {integrity: sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg==} + engines: {node: '>= 0.4'} + dependencies: + has-tostringtag: 1.0.0 + dev: true + + /is-symbol/1.0.4: + resolution: {integrity: sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg==} + engines: {node: '>= 0.4'} + dependencies: + has-symbols: 1.0.3 + dev: true + + /is-weakref/1.0.2: + resolution: {integrity: sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ==} + dependencies: + call-bind: 1.0.2 + dev: true + + /isarray/1.0.0: + resolution: {integrity: sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==} + dev: true + + /isexe/2.0.0: + resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==} + dev: true + + /jake/10.8.5: + resolution: {integrity: sha512-sVpxYeuAhWt0OTWITwT98oyV0GsXyMlXCF+3L1SuafBVUIr/uILGRB+NqwkzhgXKvoJpDIpQvqkUALgdmQsQxw==} + engines: {node: '>=10'} + hasBin: true + dependencies: + async: 3.2.4 + chalk: 4.1.2 + filelist: 1.0.4 + minimatch: 3.1.2 + dev: true + + /jest-worker/26.6.2: + resolution: {integrity: sha512-KWYVV1c4i+jbMpaBC+U++4Va0cp8OisU185o73T1vo99hqi7w8tSJfUXYswwqqrjzwxa6KpRK54WhPvwf5w6PQ==} + engines: {node: '>= 10.13.0'} + dependencies: + '@types/node': 18.0.4 + merge-stream: 2.0.0 + supports-color: 7.2.0 + dev: true + + /js-tokens/4.0.0: + resolution: {integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==} + dev: true + + /jsdom/20.0.0: + resolution: {integrity: sha512-x4a6CKCgx00uCmP+QakBDFXwjAJ69IkkIWHmtmjd3wvXPcdOS44hfX2vqkOQrVrq8l9DhNNADZRXaCEWvgXtVA==} + engines: {node: '>=14'} + peerDependencies: + canvas: ^2.5.0 + peerDependenciesMeta: + canvas: + optional: true + dependencies: + abab: 2.0.6 + acorn: 8.7.1 + acorn-globals: 6.0.0 + cssom: 0.5.0 + cssstyle: 2.3.0 + data-urls: 3.0.2 + decimal.js: 10.3.1 + domexception: 4.0.0 + escodegen: 2.0.0 + form-data: 4.0.0 + html-encoding-sniffer: 3.0.0 + http-proxy-agent: 5.0.0 + https-proxy-agent: 5.0.1 + is-potential-custom-element-name: 1.0.1 + nwsapi: 2.2.1 + parse5: 7.0.0 + saxes: 6.0.0 + symbol-tree: 3.2.4 + tough-cookie: 4.0.0 + w3c-hr-time: 1.0.2 + w3c-xmlserializer: 3.0.0 + webidl-conversions: 7.0.0 + whatwg-encoding: 2.0.0 + whatwg-mimetype: 3.0.0 + whatwg-url: 11.0.0 + ws: 8.8.0 + xml-name-validator: 4.0.0 + transitivePeerDependencies: + - bufferutil + - supports-color + - utf-8-validate + dev: true + + /jsesc/0.5.0: + resolution: {integrity: sha512-uZz5UnB7u4T9LvwmFqXii7pZSouaRPorGs5who1Ip7VO0wxanFvBL7GkM6dTHlgX+jhBApRetaWpnDabOeTcnA==} + hasBin: true + dev: true + + /jsesc/2.5.2: + resolution: {integrity: sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==} + engines: {node: '>=4'} + hasBin: true + dev: true + + /json-schema-traverse/1.0.0: + resolution: {integrity: sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==} + dev: true + + /json-schema/0.4.0: + resolution: {integrity: sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA==} + dev: true + + /json5/2.2.1: + resolution: {integrity: sha512-1hqLFMSrGHRHxav9q9gNjJ5EXznIxGVO09xQRrwplcS8qs28pZ8s8hupZAmqDwZUmVZ2Qb2jnyPOWcDH8m8dlA==} + engines: {node: '>=6'} + hasBin: true + dev: true + + /jsonc-eslint-parser/1.4.1: + resolution: {integrity: sha512-hXBrvsR1rdjmB2kQmUjf1rEIa+TqHBGMge8pwi++C+Si1ad7EjZrJcpgwym+QGK/pqTx+K7keFAtLlVNdLRJOg==} + engines: {node: '>=8.10.0'} + dependencies: + acorn: 7.4.1 + eslint-utils: 2.1.0 + eslint-visitor-keys: 1.3.0 + espree: 6.2.1 + semver: 6.3.0 + dev: true + + /jsonc-parser/3.1.0: + resolution: {integrity: sha512-DRf0QjnNeCUds3xTjKlQQ3DpJD51GvDjJfnxUVWg6PZTo2otSm+slzNAxU/35hF8/oJIKoG9slq30JYOsF2azg==} + dev: true + + /jsonfile/6.1.0: + resolution: {integrity: sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==} + dependencies: + universalify: 2.0.0 + optionalDependencies: + graceful-fs: 4.2.10 + dev: true + + /jsonpointer/5.0.1: + resolution: {integrity: sha512-p/nXbhSEcu3pZRdkW1OfJhpsVtW1gd4Wa1fnQc9YLiTfAjn0312eMKimbdIQzuZl9aa9xUGaRlP9T/CJE/ditQ==} + engines: {node: '>=0.10.0'} + dev: true + + /kolorist/1.5.1: + resolution: {integrity: sha512-lxpCM3HTvquGxKGzHeknB/sUjuVoUElLlfYnXZT73K8geR9jQbroGlSCFBax9/0mpGoD3kzcMLnOlGQPJJNyqQ==} + dev: true + + /leven/3.1.0: + resolution: {integrity: sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==} + engines: {node: '>=6'} + dev: true + + /levn/0.3.0: + resolution: {integrity: sha512-0OO4y2iOHix2W6ujICbKIaEQXvFQHue65vUG3pb5EUomzPI90z9hsA1VsO/dbIIpC53J8gxM9Q4Oho0jrCM/yA==} + engines: {node: '>= 0.8.0'} + dependencies: + prelude-ls: 1.1.2 + type-check: 0.3.2 + dev: true + + /lilconfig/2.0.6: + resolution: {integrity: sha512-9JROoBW7pobfsx+Sq2JsASvCo6Pfo6WWoUW79HuB1BCoBXD4PLWJPqDF6fNj67pqBYTbAHkE57M1kS/+L1neOg==} + engines: {node: '>=10'} + dev: false + + /local-pkg/0.4.2: + resolution: {integrity: sha512-mlERgSPrbxU3BP4qBqAvvwlgW4MTg78iwJdGGnv7kibKjWcJksrG3t6LB5lXI93wXRDvG4NpUgJFmTG4T6rdrg==} + engines: {node: '>=14'} + dev: true + + /locate-path/6.0.0: + resolution: {integrity: sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==} + engines: {node: '>=10'} + dependencies: + p-locate: 5.0.0 + dev: true + + /lodash.castarray/4.4.0: + resolution: {integrity: sha512-aVx8ztPv7/2ULbArGJ2Y42bG1mEQ5mGjpdvrbJcJFU3TbYybe+QlLS4pst9zV52ymy2in1KpFPiZnAOATxD4+Q==} + dev: false + + /lodash.debounce/4.0.8: + resolution: {integrity: sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow==} + dev: true + + /lodash.isplainobject/4.0.6: + resolution: {integrity: sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA==} + dev: false + + /lodash.merge/4.6.2: + resolution: {integrity: sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==} + dev: false + + /lodash.sortby/4.7.0: + resolution: {integrity: sha512-HDWXG8isMntAyRF5vZ7xKuEvOhT4AhlRt/3czTSjvGUxjYCBVRQY48ViDHyfYz9VIoBkW4TMGQNapx+l3RUwdA==} + dev: true + + /lodash/4.17.21: + resolution: {integrity: sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==} + dev: true + + /loupe/2.3.4: + resolution: {integrity: sha512-OvKfgCC2Ndby6aSTREl5aCCPTNIzlDfQZvZxNUrBrihDhL3xcrYegTblhmEiCrg2kKQz4XsFIaemE5BF4ybSaQ==} + dependencies: + get-func-name: 2.0.0 + dev: true + + /lower-case/1.1.4: + resolution: {integrity: sha512-2Fgx1Ycm599x+WGpIYwJOvsjmXFzTSc34IwDWALRA/8AopUKAVPwfJ+h5+f85BCp0PWmmJcWzEpxOpoXycMpdA==} + dev: true + + /magic-string/0.25.9: + resolution: {integrity: sha512-RmF0AsMzgt25qzqqLc1+MbHmhdx0ojF2Fvs4XnOqz2ZOBXzzkEwc/dJQZCYHAn7v1jbVOjAZfK8msRn4BxO4VQ==} + dependencies: + sourcemap-codec: 1.4.8 + + /magic-string/0.26.2: + resolution: {integrity: sha512-NzzlXpclt5zAbmo6h6jNc8zl2gNRGHvmsZW4IvZhTC4W7k4OlLP+S5YLussa/r3ixNT66KOQfNORlXHSOy/X4A==} + engines: {node: '>=12'} + dependencies: + sourcemap-codec: 1.4.8 + dev: true + + /media-typer/0.3.0: + resolution: {integrity: sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g=} + engines: {node: '>= 0.6'} + dev: true + + /merge-descriptors/1.0.1: + resolution: {integrity: sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E=} + dev: true + + /merge-stream/2.0.0: + resolution: {integrity: sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==} + dev: true + + /merge2/1.4.1: + resolution: {integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==} + engines: {node: '>= 8'} + + /methods/1.1.2: + resolution: {integrity: sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==} + engines: {node: '>= 0.6'} + dev: true + + /micromatch/4.0.5: + resolution: {integrity: sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==} + engines: {node: '>=8.6'} + dependencies: + braces: 3.0.2 + picomatch: 2.3.1 + + /mime-db/1.52.0: + resolution: {integrity: sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==} + engines: {node: '>= 0.6'} + dev: true + + /mime-types/2.1.35: + resolution: {integrity: sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==} + engines: {node: '>= 0.6'} + dependencies: + mime-db: 1.52.0 + dev: true + + /mime/1.6.0: + resolution: {integrity: sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==} + engines: {node: '>=4'} + hasBin: true + dev: true + + /mimic-fn/2.1.0: + resolution: {integrity: sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==} + engines: {node: '>=6'} + dev: true + + /mini-svg-data-uri/1.4.4: + resolution: {integrity: sha512-r9deDe9p5FJUPZAk3A59wGH7Ii9YrjjWw0jmw/liSbHl2CHiyXj6FcDXDu2K3TjVAXqiJdaw3xxwlZZr9E6nHg==} + hasBin: true + dev: false + + /minimalistic-assert/1.0.1: + resolution: {integrity: sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A==} + dev: true + + /minimatch/3.1.2: + resolution: {integrity: sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==} + dependencies: + brace-expansion: 1.1.11 + dev: true + + /minimatch/5.1.0: + resolution: {integrity: sha512-9TPBGGak4nHfGZsPBohm9AWg6NoT7QTCehS3BIJABslyZbzxfV78QM2Y6+i741OPZIafFAaiiEMh5OyIrJPgtg==} + engines: {node: '>=10'} + dependencies: + brace-expansion: 2.0.1 + dev: true + + /minimist/1.2.6: + resolution: {integrity: sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==} + dev: false + + /mlly/0.5.4: + resolution: {integrity: sha512-gFlsLWCjVwu/LM/ZfYUkmnbBoz7eyBIMUwVQYDqhd8IvtNFDeZ95uwAyxHE2Xx7tQwePQaCo4fECZ9MWFEUTgQ==} + dependencies: + pathe: 0.3.2 + pkg-types: 0.3.3 + dev: true + + /ms/2.0.0: + resolution: {integrity: sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=} + dev: true + + /ms/2.1.2: + resolution: {integrity: sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==} + dev: true + + /ms/2.1.3: + resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==} + dev: true + + /nanoid/3.3.4: + resolution: {integrity: sha512-MqBkQh/OHTS2egovRtLk45wEyNXwF+cokD+1YPf9u5VfJiRdAiRwB2froX5Co9Rh20xs4siNPm8naNotSD6RBw==} + engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} + hasBin: true + + /negotiator/0.6.3: + resolution: {integrity: sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==} + engines: {node: '>= 0.6'} + dev: true + + /no-case/2.3.2: + resolution: {integrity: sha512-rmTZ9kz+f3rCvK2TD1Ue/oZlns7OGoIWP4fc3llxxRXlOkHKoWPPWJOfFYpITabSow43QJbRIoHQXtt10VldyQ==} + dependencies: + lower-case: 1.1.4 + dev: true + + /node-releases/2.0.6: + resolution: {integrity: sha512-PiVXnNuFm5+iYkLBNeq5211hvO38y63T0i2KKh2KnUs3RpzJ+JtODFjkD8yjLwnDkTYF1eKXheUwdssR+NRZdg==} + + /normalize-path/3.0.0: + resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==} + engines: {node: '>=0.10.0'} + + /normalize-range/0.1.2: + resolution: {integrity: sha512-bdok/XvKII3nUpklnV6P2hxtMNrCboOjAcyBuQnWEhO665FwrSNRxU+AqpsyvO6LgGYPspN+lu5CLtw4jPRKNA==} + engines: {node: '>=0.10.0'} + dev: false + + /npm-run-path/4.0.1: + resolution: {integrity: sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==} + engines: {node: '>=8'} + dependencies: + path-key: 3.1.1 + dev: true + + /nth-check/2.1.1: + resolution: {integrity: sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w==} + dependencies: + boolbase: 1.0.0 + dev: true + + /nwsapi/2.2.1: + resolution: {integrity: sha512-JYOWTeFoS0Z93587vRJgASD5Ut11fYl5NyihP3KrYBvMe1FRRs6RN7m20SA/16GM4P6hTnZjT+UmDOt38UeXNg==} + dev: true + + /object-assign/4.1.1: + resolution: {integrity: sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==} + engines: {node: '>=0.10.0'} + dev: true + + /object-hash/3.0.0: + resolution: {integrity: sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw==} + engines: {node: '>= 6'} + dev: false + + /object-inspect/1.12.2: + resolution: {integrity: sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ==} + dev: true + + /object-keys/1.1.1: + resolution: {integrity: sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==} + engines: {node: '>= 0.4'} + dev: true + + /object.assign/4.1.2: + resolution: {integrity: sha512-ixT2L5THXsApyiUPYKmW+2EHpXXe5Ii3M+f4e+aJFAHao5amFRW6J0OO6c/LU8Be47utCx2GL89hxGB6XSmKuQ==} + engines: {node: '>= 0.4'} + dependencies: + call-bind: 1.0.2 + define-properties: 1.1.4 + has-symbols: 1.0.3 + object-keys: 1.1.1 + dev: true + + /obuf/1.1.2: + resolution: {integrity: sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg==} + dev: true + + /on-finished/2.4.1: + resolution: {integrity: sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==} + engines: {node: '>= 0.8'} + dependencies: + ee-first: 1.1.1 + dev: true + + /on-headers/1.0.2: + resolution: {integrity: sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA==} + engines: {node: '>= 0.8'} + dev: true + + /once/1.4.0: + resolution: {integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==} + dependencies: + wrappy: 1.0.2 + dev: true + + /onetime/5.1.2: + resolution: {integrity: sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==} + engines: {node: '>=6'} + dependencies: + mimic-fn: 2.1.0 + dev: true + + /optionator/0.8.3: + resolution: {integrity: sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA==} + engines: {node: '>= 0.8.0'} + dependencies: + deep-is: 0.1.4 + fast-levenshtein: 2.0.6 + levn: 0.3.0 + prelude-ls: 1.1.2 + type-check: 0.3.2 + word-wrap: 1.2.3 + dev: true + + /p-finally/1.0.0: + resolution: {integrity: sha512-LICb2p9CB7FS+0eR1oqWnHhp0FljGLZCWBE9aix0Uye9W8LTQPwMTYVGWQWIw9RdQiDg4+epXQODwIYJtSJaow==} + engines: {node: '>=4'} + dev: true + + /p-limit/3.1.0: + resolution: {integrity: sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==} + engines: {node: '>=10'} + dependencies: + yocto-queue: 0.1.0 + dev: true + + /p-locate/5.0.0: + resolution: {integrity: sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==} + engines: {node: '>=10'} + dependencies: + p-limit: 3.1.0 + dev: true + + /p-queue/6.6.2: + resolution: {integrity: sha512-RwFpb72c/BhQLEXIZ5K2e+AhgNVmIejGlTgiB9MzZ0e93GRvqZ7uSi0dvRF7/XIXDeNkra2fNHBxTyPDGySpjQ==} + engines: {node: '>=8'} + dependencies: + eventemitter3: 4.0.7 + p-timeout: 3.2.0 + dev: true + + /p-timeout/3.2.0: + resolution: {integrity: sha512-rhIwUycgwwKcP9yTOOFK/AKsAopjjCakVqLHePO3CC6Mir1Z99xT+R63jZxAT5lFZLa2inS5h+ZS2GvR99/FBg==} + engines: {node: '>=8'} + dependencies: + p-finally: 1.0.0 + dev: true + + /param-case/2.1.1: + resolution: {integrity: sha512-eQE845L6ot89sk2N8liD8HAuH4ca6Vvr7VWAWwt7+kvvG5aBcPmmphQ68JsEG2qa9n1TykS2DLeMt363AAH8/w==} + dependencies: + no-case: 2.3.2 + dev: true + + /parse5-htmlparser2-tree-adapter/6.0.1: + resolution: {integrity: sha512-qPuWvbLgvDGilKc5BoicRovlT4MtYT6JfJyBOMDsKoiT+GiuP5qyrPCnR9HcPECIJJmZh5jRndyNThnhhb/vlA==} + dependencies: + parse5: 6.0.1 + dev: true + + /parse5/6.0.1: + resolution: {integrity: sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw==} + dev: true + + /parse5/7.0.0: + resolution: {integrity: sha512-y/t8IXSPWTuRZqXc0ajH/UwDj4mnqLEbSttNbThcFhGrZuOyoyvNBO85PBp2jQa55wY9d07PBNjsK8ZP3K5U6g==} + dependencies: + entities: 4.3.1 + dev: true + + /parseurl/1.3.3: + resolution: {integrity: sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==} + engines: {node: '>= 0.8'} + dev: true + + /path-exists/4.0.0: + resolution: {integrity: sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==} + engines: {node: '>=8'} + dev: true + + /path-is-absolute/1.0.1: + resolution: {integrity: sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==} + engines: {node: '>=0.10.0'} + dev: true + + /path-key/3.1.1: + resolution: {integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==} + engines: {node: '>=8'} + dev: true + + /path-parse/1.0.7: + resolution: {integrity: sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==} + + /path-to-regexp/0.1.7: + resolution: {integrity: sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ==} + dev: true + + /pathe/0.3.2: + resolution: {integrity: sha512-qhnmX0TOqlCvdWWTkoM83wh5J8fZ2yhbDEc9MlsnAEtEc+JCwxUKEwmd6pkY9hRe6JR1Uecbc14VcAKX2yFSTA==} + dev: true + + /pathval/1.1.1: + resolution: {integrity: sha512-Dp6zGqpTdETdR63lehJYPeIOqpiNBNtc7BpWSLrOje7UaIsE5aY92r/AunQA7rsXvet3lrJ3JnZX29UPTKXyKQ==} + dev: true + + /picocolors/1.0.0: + resolution: {integrity: sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==} + + /picomatch/2.3.1: + resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==} + engines: {node: '>=8.6'} + + /pify/2.3.0: + resolution: {integrity: sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==} + engines: {node: '>=0.10.0'} + dev: false + + /pinia/2.0.16_j6bzmzd4ujpabbp5objtwxyjp4: + resolution: {integrity: sha512-9/LMVO+/epny1NBfC77vnps4g3JRezxhhoF1xLUk8mZkUIxVnwfEAIRiAX8mYBTD/KCwZqnDMqXc8w3eU0FQGg==} + peerDependencies: + '@vue/composition-api': ^1.4.0 + typescript: '>=4.4.4' + vue: ^2.6.14 || ^3.2.0 + peerDependenciesMeta: + '@vue/composition-api': + optional: true + typescript: + optional: true + dependencies: + '@vue/devtools-api': 6.2.1 + typescript: 4.7.4 + vue: 3.2.37 + vue-demi: 0.13.4_vue@3.2.37 + dev: false + + /pkg-types/0.3.3: + resolution: {integrity: sha512-6AJcCMnjUQPQv/Wk960w0TOmjhdjbeaQJoSKWRQv9N3rgkessCu6J0Ydsog/nw1MbpnxHuPzYbfOn2KmlZO1FA==} + dependencies: + jsonc-parser: 3.1.0 + mlly: 0.5.4 + pathe: 0.3.2 + dev: true + + /postcss-import/14.1.0_postcss@8.4.14: + resolution: {integrity: sha512-flwI+Vgm4SElObFVPpTIT7SU7R3qk2L7PyduMcokiaVKuWv9d/U+Gm/QAd8NDLuykTWTkcrjOeD2Pp1rMeBTGw==} + engines: {node: '>=10.0.0'} + peerDependencies: + postcss: ^8.0.0 + dependencies: + postcss: 8.4.14 + postcss-value-parser: 4.2.0 + read-cache: 1.0.0 + resolve: 1.22.1 + dev: false + + /postcss-js/4.0.0_postcss@8.4.14: + resolution: {integrity: sha512-77QESFBwgX4irogGVPgQ5s07vLvFqWr228qZY+w6lW599cRlK/HmnlivnnVUxkjHnCu4J16PDMHcH+e+2HbvTQ==} + engines: {node: ^12 || ^14 || >= 16} + peerDependencies: + postcss: ^8.3.3 + dependencies: + camelcase-css: 2.0.1 + postcss: 8.4.14 + dev: false + + /postcss-load-config/3.1.4_postcss@8.4.14: + resolution: {integrity: sha512-6DiM4E7v4coTE4uzA8U//WhtPwyhiim3eyjEMFCnUpzbrkK9wJHgKDT2mR+HbtSrd/NubVaYTOpSpjUl8NQeRg==} + engines: {node: '>= 10'} + peerDependencies: + postcss: '>=8.0.9' + ts-node: '>=9.0.0' + peerDependenciesMeta: + postcss: + optional: true + ts-node: + optional: true + dependencies: + lilconfig: 2.0.6 + postcss: 8.4.14 + yaml: 1.10.2 + dev: false + + /postcss-nested/5.0.6_postcss@8.4.14: + resolution: {integrity: sha512-rKqm2Fk0KbA8Vt3AdGN0FB9OBOMDVajMG6ZCf/GoHgdxUJ4sBFp0A/uMIRm+MJUdo33YXEtjqIz8u7DAp8B7DA==} + engines: {node: '>=12.0'} + peerDependencies: + postcss: ^8.2.14 + dependencies: + postcss: 8.4.14 + postcss-selector-parser: 6.0.10 + dev: false + + /postcss-selector-parser/6.0.10: + resolution: {integrity: sha512-IQ7TZdoaqbT+LCpShg46jnZVlhWD2w6iQYAcYXfHARZ7X1t/UGhhceQDs5X0cGqKvYlHNOuv7Oa1xmb0oQuA3w==} + engines: {node: '>=4'} + dependencies: + cssesc: 3.0.0 + util-deprecate: 1.0.2 + dev: false + + /postcss-value-parser/4.2.0: + resolution: {integrity: sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==} + dev: false + + /postcss/8.4.14: + resolution: {integrity: sha512-E398TUmfAYFPBSdzgeieK2Y1+1cpdxJx8yXbK/m57nRhKSmk1GB2tO4lbLBtlkfPQTDKfe4Xqv1ASWPpayPEig==} + engines: {node: ^10 || ^12 || >=14} + dependencies: + nanoid: 3.3.4 + picocolors: 1.0.0 + source-map-js: 1.0.2 + + /prelude-ls/1.1.2: + resolution: {integrity: sha512-ESF23V4SKG6lVSGZgYNpbsiaAkdab6ZgOxe52p7+Kid3W3u3bxR4Vfd/o21dmN7jSt0IwgZ4v5MUd26FEtXE9w==} + engines: {node: '>= 0.8.0'} + dev: true + + /prettier/2.7.1: + resolution: {integrity: sha512-ujppO+MkdPqoVINuDFDRLClm7D78qbDt0/NR+wp5FqEZOoTNAjPHWj17QRhu7geIHJfcNhRk1XVQmF8Bp3ye+g==} + engines: {node: '>=10.13.0'} + hasBin: true + dev: true + + /pretty-bytes/5.6.0: + resolution: {integrity: sha512-FFw039TmrBqFK8ma/7OL3sDz/VytdtJr044/QUJtH0wK9lb9jLq9tJyIxUwtQJHwar2BqtiA4iCWSwo9JLkzFg==} + engines: {node: '>=6'} + dev: true + + /pretty-bytes/6.0.0: + resolution: {integrity: sha512-6UqkYefdogmzqAZWzJ7laYeJnaXDy2/J+ZqiiMtS7t7OfpXWTlaeGMwX8U6EFvPV/YWWEKRkS8hKS4k60WHTOg==} + engines: {node: ^14.13.1 || >=16.0.0} + dev: true + + /process-nextick-args/2.0.1: + resolution: {integrity: sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==} + dev: true + + /proxy-addr/2.0.7: + resolution: {integrity: sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==} + engines: {node: '>= 0.10'} + dependencies: + forwarded: 0.2.0 + ipaddr.js: 1.9.1 + dev: true + + /psl/1.9.0: + resolution: {integrity: sha512-E/ZsdU4HLs/68gYzgGTkMicWTLPdAftJLfJFlLUAAKZGkStNU72sZjT66SnMDVOfOWY/YAoiD7Jxa9iHvngcag==} + dev: true + + /punycode/2.1.1: + resolution: {integrity: sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==} + engines: {node: '>=6'} + dev: true + + /qs/6.10.3: + resolution: {integrity: sha512-wr7M2E0OFRfIfJZjKGieI8lBKb7fRCH4Fv5KNPEs7gJ8jadvotdsS08PzOKR7opXhZ/Xkjtt3WF9g38drmyRqQ==} + engines: {node: '>=0.6'} + dependencies: + side-channel: 1.0.4 + dev: true + + /queue-microtask/1.2.3: + resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==} + + /quick-lru/5.1.1: + resolution: {integrity: sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA==} + engines: {node: '>=10'} + dev: false + + /randombytes/2.1.0: + resolution: {integrity: sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==} + dependencies: + safe-buffer: 5.2.1 + dev: true + + /range-parser/1.2.1: + resolution: {integrity: sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==} + engines: {node: '>= 0.6'} + dev: true + + /raw-body/2.5.1: + resolution: {integrity: sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig==} + engines: {node: '>= 0.8'} + dependencies: + bytes: 3.1.2 + http-errors: 2.0.0 + iconv-lite: 0.4.24 + unpipe: 1.0.0 + dev: true + + /read-cache/1.0.0: + resolution: {integrity: sha512-Owdv/Ft7IjOgm/i0xvNDZ1LrRANRfew4b2prF3OWMQLxLfu3bS8FVhCsrSCMK4lR56Y9ya+AThoTpDCTxCmpRA==} + dependencies: + pify: 2.3.0 + dev: false + + /readable-stream/2.3.7: + resolution: {integrity: sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==} + dependencies: + core-util-is: 1.0.3 + inherits: 2.0.4 + isarray: 1.0.0 + process-nextick-args: 2.0.1 + safe-buffer: 5.1.2 + string_decoder: 1.1.1 + util-deprecate: 1.0.2 + dev: true + + /readable-stream/3.6.0: + resolution: {integrity: sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==} + engines: {node: '>= 6'} + dependencies: + inherits: 2.0.4 + string_decoder: 1.3.0 + util-deprecate: 1.0.2 + dev: true + + /readdirp/3.6.0: + resolution: {integrity: sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==} + engines: {node: '>=8.10.0'} + dependencies: + picomatch: 2.3.1 + + /regenerate-unicode-properties/10.0.1: + resolution: {integrity: sha512-vn5DU6yg6h8hP/2OkQo3K7uVILvY4iu0oI4t3HFa81UPkhGJwkRwM10JEc3upjdhHjs/k8GJY1sRBhk5sr69Bw==} + engines: {node: '>=4'} + dependencies: + regenerate: 1.4.2 + dev: true + + /regenerate/1.4.2: + resolution: {integrity: sha512-zrceR/XhGYU/d/opr2EKO7aRHUeiBI8qjtfHqADTwZd6Szfy16la6kqD0MIUs5z5hx6AaKa+PixpPrR289+I0A==} + dev: true + + /regenerator-runtime/0.11.1: + resolution: {integrity: sha512-MguG95oij0fC3QV3URf4V2SDYGJhJnJGqvIIgdECeODCT98wSWDAJ94SSuVpYQUoTcGUIL6L4yNB7j1DFFHSBg==} + dev: false + + /regenerator-runtime/0.13.9: + resolution: {integrity: sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA==} + dev: true + + /regenerator-transform/0.15.0: + resolution: {integrity: sha512-LsrGtPmbYg19bcPHwdtmXwbW+TqNvtY4riE3P83foeHRroMbH6/2ddFBfab3t7kbzc7v7p4wbkIecHImqt0QNg==} + dependencies: + '@babel/runtime': 7.18.6 + dev: true + + /regexp.prototype.flags/1.4.3: + resolution: {integrity: sha512-fjggEOO3slI6Wvgjwflkc4NFRCTZAu5CnNfBd5qOMYhWdn67nJBBu34/TkD++eeFmd8C9r9jfXJ27+nSiRkSUA==} + engines: {node: '>= 0.4'} + dependencies: + call-bind: 1.0.2 + define-properties: 1.1.4 + functions-have-names: 1.2.3 + dev: true + + /regexpu-core/5.1.0: + resolution: {integrity: sha512-bb6hk+xWd2PEOkj5It46A16zFMs2mv86Iwpdu94la4S3sJ7C973h2dHpYKwIBGaWSO7cIRJ+UX0IeMaWcO4qwA==} + engines: {node: '>=4'} + dependencies: + regenerate: 1.4.2 + regenerate-unicode-properties: 10.0.1 + regjsgen: 0.6.0 + regjsparser: 0.8.4 + unicode-match-property-ecmascript: 2.0.0 + unicode-match-property-value-ecmascript: 2.0.0 + dev: true + + /regjsgen/0.6.0: + resolution: {integrity: sha512-ozE883Uigtqj3bx7OhL1KNbCzGyW2NQZPl6Hs09WTvCuZD5sTI4JY58bkbQWa/Y9hxIsvJ3M8Nbf7j54IqeZbA==} + dev: true + + /regjsparser/0.8.4: + resolution: {integrity: sha512-J3LABycON/VNEu3abOviqGHuB/LOtOQj8SKmfP9anY5GfAVw/SPjwzSjxGjbZXIxbGfqTHtJw58C2Li/WkStmA==} + hasBin: true + dependencies: + jsesc: 0.5.0 + dev: true + + /relateurl/0.2.7: + resolution: {integrity: sha512-G08Dxvm4iDN3MLM0EsP62EDV9IuhXPR6blNz6Utcp7zyV3tr4HVNINt6MpaRWbxoOHT3Q7YN2P+jaHX8vUbgog==} + engines: {node: '>= 0.10'} + dev: true + + /require-directory/2.1.1: + resolution: {integrity: sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==} + engines: {node: '>=0.10.0'} + dev: true + + /require-from-string/2.0.2: + resolution: {integrity: sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==} + engines: {node: '>=0.10.0'} + dev: true + + /resolve/1.22.1: + resolution: {integrity: sha512-nBpuuYuY5jFsli/JIs1oldw6fOQCBioohqWZg/2hiaOybXOft4lonv85uDOKXdf8rhyK159cxU5cDcK/NKk8zw==} + hasBin: true + dependencies: + is-core-module: 2.9.0 + path-parse: 1.0.7 + supports-preserve-symlinks-flag: 1.0.0 + + /reusify/1.0.4: + resolution: {integrity: sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==} + engines: {iojs: '>=1.0.0', node: '>=0.10.0'} + + /rollup-plugin-terser/7.0.2_rollup@2.77.0: + resolution: {integrity: sha512-w3iIaU4OxcF52UUXiZNsNeuXIMDvFrr+ZXK6bFZ0Q60qyVfq4uLptoS4bbq3paG3x216eQllFZX7zt6TIImguQ==} + peerDependencies: + rollup: ^2.0.0 + dependencies: + '@babel/code-frame': 7.18.6 + jest-worker: 26.6.2 + rollup: 2.77.0 + serialize-javascript: 4.0.0 + terser: 5.14.2 + dev: true + + /rollup/2.77.0: + resolution: {integrity: sha512-vL8xjY4yOQEw79DvyXLijhnhh+R/O9zpF/LEgkCebZFtb6ELeN9H3/2T0r8+mp+fFTBHZ5qGpOpW2ela2zRt3g==} + engines: {node: '>=10.0.0'} + hasBin: true + optionalDependencies: + fsevents: 2.3.2 + dev: true + + /run-parallel/1.2.0: + resolution: {integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==} + dependencies: + queue-microtask: 1.2.3 + + /safe-buffer/5.1.2: + resolution: {integrity: sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==} + dev: true + + /safe-buffer/5.2.1: + resolution: {integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==} + dev: true + + /safer-buffer/2.1.2: + resolution: {integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==} + dev: true + + /saxes/6.0.0: + resolution: {integrity: sha512-xAg7SOnEhrm5zI3puOOKyy1OMcMlIJZYNJY7xLBwSze0UjhPLnWfj2GF2EpT0jmzaJKIWKHLsaSSajf35bcYnA==} + engines: {node: '>=v12.22.7'} + dependencies: + xmlchars: 2.2.0 + dev: true + + /scule/0.2.1: + resolution: {integrity: sha512-M9gnWtn3J0W+UhJOHmBxBTwv8mZCan5i1Himp60t6vvZcor0wr+IM0URKmIglsWJ7bRujNAVVN77fp+uZaWoKg==} + dev: true + + /select-hose/2.0.0: + resolution: {integrity: sha512-mEugaLK+YfkijB4fx0e6kImuJdCIt2LxCRcbEYPqRGCs4F2ogyfZU5IAZRdjCP8JPq2AtdNoC/Dux63d9Kiryg==} + dev: true + + /semver/6.3.0: + resolution: {integrity: sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==} + hasBin: true + dev: true + + /semver/7.0.0: + resolution: {integrity: sha512-+GB6zVA9LWh6zovYQLALHwv5rb2PHGlJi3lfiqIHxR0uuwCgefcOJc59v9fv1w8GbStwxuuqqAjI9NMAOOgq1A==} + hasBin: true + dev: true + + /send/0.18.0: + resolution: {integrity: sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg==} + engines: {node: '>= 0.8.0'} + dependencies: + debug: 2.6.9 + depd: 2.0.0 + destroy: 1.2.0 + encodeurl: 1.0.2 + escape-html: 1.0.3 + etag: 1.8.1 + fresh: 0.5.2 + http-errors: 2.0.0 + mime: 1.6.0 + ms: 2.1.3 + on-finished: 2.4.1 + range-parser: 1.2.1 + statuses: 2.0.1 + transitivePeerDependencies: + - supports-color + dev: true + + /serialize-javascript/4.0.0: + resolution: {integrity: sha512-GaNA54380uFefWghODBWEGisLZFj00nS5ACs6yHa9nLqlLpVLO8ChDGeKRjZnV4Nh4n0Qi7nhYZD/9fCPzEqkw==} + dependencies: + randombytes: 2.1.0 + dev: true + + /serve-static/1.15.0: + resolution: {integrity: sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g==} + engines: {node: '>= 0.8.0'} + dependencies: + encodeurl: 1.0.2 + escape-html: 1.0.3 + parseurl: 1.3.3 + send: 0.18.0 + transitivePeerDependencies: + - supports-color + dev: true + + /setprototypeof/1.2.0: + resolution: {integrity: sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==} + dev: true + + /shebang-command/2.0.0: + resolution: {integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==} + engines: {node: '>=8'} + dependencies: + shebang-regex: 3.0.0 + dev: true + + /shebang-regex/3.0.0: + resolution: {integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==} + engines: {node: '>=8'} + dev: true + + /side-channel/1.0.4: + resolution: {integrity: sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==} + dependencies: + call-bind: 1.0.2 + get-intrinsic: 1.1.2 + object-inspect: 1.12.2 + dev: true + + /signal-exit/3.0.7: + resolution: {integrity: sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==} + dev: true + + /simple-swizzle/0.2.2: + resolution: {integrity: sha512-JA//kQgZtbuY83m+xT+tXJkmJncGMTFT+C+g2h2R9uxkYIrE2yy9sgmcLhCnw57/WSD+Eh3J97FPEDFnbXnDUg==} + dependencies: + is-arrayish: 0.3.2 + dev: false + + /source-map-js/1.0.2: + resolution: {integrity: sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==} + engines: {node: '>=0.10.0'} + + /source-map-support/0.5.21: + resolution: {integrity: sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==} + dependencies: + buffer-from: 1.1.2 + source-map: 0.6.1 + dev: true + + /source-map/0.6.1: + resolution: {integrity: sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==} + engines: {node: '>=0.10.0'} + + /source-map/0.8.0-beta.0: + resolution: {integrity: sha512-2ymg6oRBpebeZi9UUNsgQ89bhx01TcTkmNTGnNO88imTmbSgy4nfujrgVEFKWpMTEGA11EDkTt7mqObTPdigIA==} + engines: {node: '>= 8'} + dependencies: + whatwg-url: 7.1.0 + dev: true + + /sourcemap-codec/1.4.8: + resolution: {integrity: sha512-9NykojV5Uih4lgo5So5dtw+f0JgJX30KCNI8gwhz2J9A15wD0Ml6tjHKwf6fTSa6fAdVBdZeNOs9eJ71qCk8vA==} + + /spdy-transport/3.0.0: + resolution: {integrity: sha512-hsLVFE5SjA6TCisWeJXFKniGGOpBgMLmerfO2aCyCU5s7nJ/rpAepqmFifv/GCbSbueEeAJJnmSQ2rKC/g8Fcw==} + dependencies: + debug: 4.3.4 + detect-node: 2.1.0 + hpack.js: 2.1.6 + obuf: 1.1.2 + readable-stream: 3.6.0 + wbuf: 1.7.3 + transitivePeerDependencies: + - supports-color + dev: true + + /spdy/4.0.2: + resolution: {integrity: sha512-r46gZQZQV+Kl9oItvl1JZZqJKGr+oEkB08A6BzkiR7593/7IbtuncXHd2YoYeTsG4157ZssMu9KYvUHLcjcDoA==} + engines: {node: '>=6.0.0'} + dependencies: + debug: 4.3.4 + handle-thing: 2.0.1 + http-deceiver: 1.2.7 + select-hose: 2.0.0 + spdy-transport: 3.0.0 + transitivePeerDependencies: + - supports-color + dev: true + + /statuses/2.0.1: + resolution: {integrity: sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==} + engines: {node: '>= 0.8'} + dev: true + + /string-width/4.2.3: + resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==} + engines: {node: '>=8'} + dependencies: + emoji-regex: 8.0.0 + is-fullwidth-code-point: 3.0.0 + strip-ansi: 6.0.1 + dev: true + + /string.prototype.matchall/4.0.7: + resolution: {integrity: sha512-f48okCX7JiwVi1NXCVWcFnZgADDC/n2vePlQ/KUCNqCikLLilQvwjMO8+BHVKvgzH0JB0J9LEPgxOGT02RoETg==} + dependencies: + call-bind: 1.0.2 + define-properties: 1.1.4 + es-abstract: 1.20.1 + get-intrinsic: 1.1.2 + has-symbols: 1.0.3 + internal-slot: 1.0.3 + regexp.prototype.flags: 1.4.3 + side-channel: 1.0.4 + dev: true + + /string.prototype.trimend/1.0.5: + resolution: {integrity: sha512-I7RGvmjV4pJ7O3kdf+LXFpVfdNOxtCW/2C8f6jNiW4+PQchwxkCDzlk1/7p+Wl4bqFIZeF47qAHXLuHHWKAxog==} + dependencies: + call-bind: 1.0.2 + define-properties: 1.1.4 + es-abstract: 1.20.1 + dev: true + + /string.prototype.trimstart/1.0.5: + resolution: {integrity: sha512-THx16TJCGlsN0o6dl2o6ncWUsdgnLRSA23rRE5pyGBw/mLr3Ej/R2LaqCtgP8VNMGZsvMWnf9ooZPyY2bHvUFg==} + dependencies: + call-bind: 1.0.2 + define-properties: 1.1.4 + es-abstract: 1.20.1 + dev: true + + /string_decoder/1.1.1: + resolution: {integrity: sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==} + dependencies: + safe-buffer: 5.1.2 + dev: true + + /string_decoder/1.3.0: + resolution: {integrity: sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==} + dependencies: + safe-buffer: 5.2.1 + dev: true + + /stringify-object/3.3.0: + resolution: {integrity: sha512-rHqiFh1elqCQ9WPLIC8I0Q/g/wj5J1eMkyoiD6eoQApWHP0FtlK7rqnhmabL5VUY9JQCcqwwvlOaSuutekgyrw==} + engines: {node: '>=4'} + dependencies: + get-own-enumerable-property-symbols: 3.0.2 + is-obj: 1.0.1 + is-regexp: 1.0.0 + dev: true + + /strip-ansi/6.0.1: + resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==} + engines: {node: '>=8'} + dependencies: + ansi-regex: 5.0.1 + dev: true + + /strip-comments/2.0.1: + resolution: {integrity: sha512-ZprKx+bBLXv067WTCALv8SSz5l2+XhpYCsVtSqlMnkAXMWDq+/ekVbl1ghqP9rUHTzv6sm/DwCOiYutU/yp1fw==} + engines: {node: '>=10'} + dev: true + + /strip-final-newline/2.0.0: + resolution: {integrity: sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==} + engines: {node: '>=6'} + dev: true + + /strip-literal/0.4.0: + resolution: {integrity: sha512-ql/sBDoJOybTKSIOWrrh8kgUEMjXMwRAkZTD0EwiwxQH/6tTPkZvMIEjp0CRlpi6V5FMiJyvxeRkEi1KrGISoA==} + dependencies: + acorn: 8.7.1 + dev: true + + /supports-color/5.5.0: + resolution: {integrity: sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==} + engines: {node: '>=4'} + dependencies: + has-flag: 3.0.0 + dev: true + + /supports-color/7.2.0: + resolution: {integrity: sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==} + engines: {node: '>=8'} + dependencies: + has-flag: 4.0.0 + dev: true + + /supports-preserve-symlinks-flag/1.0.0: + resolution: {integrity: sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==} + engines: {node: '>= 0.4'} + + /symbol-tree/3.2.4: + resolution: {integrity: sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw==} + dev: true + + /tailwindcss/3.1.6_postcss@8.4.14: + resolution: {integrity: sha512-7skAOY56erZAFQssT1xkpk+kWt2NrO45kORlxFPXUt3CiGsVPhH1smuH5XoDH6sGPXLyBv+zgCKA2HWBsgCytg==} + engines: {node: '>=12.13.0'} + hasBin: true + peerDependencies: + postcss: ^8.0.9 + dependencies: + arg: 5.0.2 + chokidar: 3.5.3 + color-name: 1.1.4 + detective: 5.2.1 + didyoumean: 1.2.2 + dlv: 1.1.3 + fast-glob: 3.2.11 + glob-parent: 6.0.2 + is-glob: 4.0.3 + lilconfig: 2.0.6 + normalize-path: 3.0.0 + object-hash: 3.0.0 + picocolors: 1.0.0 + postcss: 8.4.14 + postcss-import: 14.1.0_postcss@8.4.14 + postcss-js: 4.0.0_postcss@8.4.14 + postcss-load-config: 3.1.4_postcss@8.4.14 + postcss-nested: 5.0.6_postcss@8.4.14 + postcss-selector-parser: 6.0.10 + postcss-value-parser: 4.2.0 + quick-lru: 5.1.1 + resolve: 1.22.1 + transitivePeerDependencies: + - ts-node + dev: false + + /temp-dir/2.0.0: + resolution: {integrity: sha512-aoBAniQmmwtcKp/7BzsH8Cxzv8OL736p7v1ihGb5e9DJ9kTwGWHrQrVB5+lfVDzfGrdRzXch+ig7LHaY1JTOrg==} + engines: {node: '>=8'} + dev: true + + /tempy/0.6.0: + resolution: {integrity: sha512-G13vtMYPT/J8A4X2SjdtBTphZlrp1gKv6hZiOjw14RCWg6GbHuQBGtjlx75xLbYV/wEc0D7G5K4rxKP/cXk8Bw==} + engines: {node: '>=10'} + dependencies: + is-stream: 2.0.1 + temp-dir: 2.0.0 + type-fest: 0.16.0 + unique-string: 2.0.0 + dev: true + + /terser/5.14.2: + resolution: {integrity: sha512-oL0rGeM/WFQCUd0y2QrWxYnq7tfSuKBiqTjRPWrRgB46WD/kiwHwF8T23z78H6Q6kGCuuHcPB+KULHRdxvVGQA==} + engines: {node: '>=10'} + hasBin: true + dependencies: + '@jridgewell/source-map': 0.3.2 + acorn: 8.7.1 + commander: 2.20.3 + source-map-support: 0.5.21 + dev: true + + /tinypool/0.2.3: + resolution: {integrity: sha512-BNbzsKIUzn6HlvwOJkRpl3ykim3PHHZWcfLX7dDisio8C+mXbjikKD7c8XmOBmZEKz7dME45ikTCfCMFvUf3zw==} + engines: {node: '>=14.0.0'} + dev: true + + /tinyspy/0.3.3: + resolution: {integrity: sha512-gRiUR8fuhUf0W9lzojPf1N1euJYA30ISebSfgca8z76FOvXtVXqd5ojEIaKLWbDQhAaC3ibxZIjqbyi4ybjcTw==} + engines: {node: '>=14.0.0'} + dev: true + + /to-fast-properties/2.0.0: + resolution: {integrity: sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==} + engines: {node: '>=4'} + + /to-regex-range/5.0.1: + resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==} + engines: {node: '>=8.0'} + dependencies: + is-number: 7.0.0 + + /toidentifier/1.0.1: + resolution: {integrity: sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==} + engines: {node: '>=0.6'} + dev: true + + /tough-cookie/4.0.0: + resolution: {integrity: sha512-tHdtEpQCMrc1YLrMaqXXcj6AxhYi/xgit6mZu1+EDWUn+qhUf8wMQoFIy9NXuq23zAwtcB0t/MjACGR18pcRbg==} + engines: {node: '>=6'} + dependencies: + psl: 1.9.0 + punycode: 2.1.1 + universalify: 0.1.2 + dev: true + + /tr46/1.0.1: + resolution: {integrity: sha512-dTpowEjclQ7Kgx5SdBkqRzVhERQXov8/l9Ft9dVM9fmg0W0KQSVaXX9T4i6twCPNtYiZM53lpSSUAwJbFPOHxA==} + dependencies: + punycode: 2.1.1 + dev: true + + /tr46/3.0.0: + resolution: {integrity: sha512-l7FvfAHlcmulp8kr+flpQZmVwtu7nfRV7NZujtN0OqES8EL4O4e0qqzL0DC5gAvx/ZC/9lk6rhcUwYvkBnBnYA==} + engines: {node: '>=12'} + dependencies: + punycode: 2.1.1 + dev: true + + /tslib/2.4.0: + resolution: {integrity: sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==} + dev: true + + /type-check/0.3.2: + resolution: {integrity: sha512-ZCmOJdvOWDBYJlzAoFkC+Q0+bUyEOS1ltgp1MGU03fqHG+dbi9tBFU2Rd9QKiDZFAYrhPh2JUf7rZRIuHRKtOg==} + engines: {node: '>= 0.8.0'} + dependencies: + prelude-ls: 1.1.2 + dev: true + + /type-detect/4.0.8: + resolution: {integrity: sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==} + engines: {node: '>=4'} + dev: true + + /type-fest/0.16.0: + resolution: {integrity: sha512-eaBzG6MxNzEn9kiwvtre90cXaNLkmadMWa1zQMs3XORCXNbsH/OewwbxC5ia9dCxIxnTAsSxXJaa/p5y8DlvJg==} + engines: {node: '>=10'} + dev: true + + /type-is/1.6.18: + resolution: {integrity: sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==} + engines: {node: '>= 0.6'} + dependencies: + media-typer: 0.3.0 + mime-types: 2.1.35 + dev: true + + /typescript/4.7.4: + resolution: {integrity: sha512-C0WQT0gezHuw6AdY1M2jxUO83Rjf0HP7Sk1DtXj6j1EwkQNZrHAg2XPWlq62oqEhYvONq5pkC2Y9oPljWToLmQ==} + engines: {node: '>=4.2.0'} + hasBin: true + + /uglify-js/3.16.2: + resolution: {integrity: sha512-AaQNokTNgExWrkEYA24BTNMSjyqEXPSfhqoS0AxmHkCJ4U+Dyy5AvbGV/sqxuxficEfGGoX3zWw9R7QpLFfEsg==} + engines: {node: '>=0.8.0'} + hasBin: true + dev: true + + /unbox-primitive/1.0.2: + resolution: {integrity: sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw==} + dependencies: + call-bind: 1.0.2 + has-bigints: 1.0.2 + has-symbols: 1.0.3 + which-boxed-primitive: 1.0.2 + dev: true + + /unicode-canonical-property-names-ecmascript/2.0.0: + resolution: {integrity: sha512-yY5PpDlfVIU5+y/BSCxAJRBIS1Zc2dDG3Ujq+sR0U+JjUevW2JhocOF+soROYDSaAezOzOKuyyixhD6mBknSmQ==} + engines: {node: '>=4'} + dev: true + + /unicode-match-property-ecmascript/2.0.0: + resolution: {integrity: sha512-5kaZCrbp5mmbz5ulBkDkbY0SsPOjKqVS35VpL9ulMPfSl0J0Xsm+9Evphv9CoIZFwre7aJoa94AY6seMKGVN5Q==} + engines: {node: '>=4'} + dependencies: + unicode-canonical-property-names-ecmascript: 2.0.0 + unicode-property-aliases-ecmascript: 2.0.0 + dev: true + + /unicode-match-property-value-ecmascript/2.0.0: + resolution: {integrity: sha512-7Yhkc0Ye+t4PNYzOGKedDhXbYIBe1XEQYQxOPyhcXNMJ0WCABqqj6ckydd6pWRZTHV4GuCPKdBAUiMc60tsKVw==} + engines: {node: '>=4'} + dev: true + + /unicode-property-aliases-ecmascript/2.0.0: + resolution: {integrity: sha512-5Zfuy9q/DFr4tfO7ZPeVXb1aPoeQSdeFMLpYuFebehDAhbuevLs5yxSZmIFN1tP5F9Wl4IpJrYojg85/zgyZHQ==} + engines: {node: '>=4'} + dev: true + + /unimport/0.4.5_vite@3.0.0: + resolution: {integrity: sha512-DnmiSt/HQIfhdcxOy4CGqwZDBh3WHg33euX1ge4X8hvquKBmw2PFvhoAJaBKxscOz0oYosoPoPT4tkDZWHhV0Q==} + dependencies: + '@rollup/pluginutils': 4.2.1 + escape-string-regexp: 5.0.0 + fast-glob: 3.2.11 + local-pkg: 0.4.2 + magic-string: 0.26.2 + mlly: 0.5.4 + pathe: 0.3.2 + scule: 0.2.1 + strip-literal: 0.4.0 + unplugin: 0.7.2_vite@3.0.0 + transitivePeerDependencies: + - esbuild + - rollup + - vite + - webpack + dev: true + + /unique-string/2.0.0: + resolution: {integrity: sha512-uNaeirEPvpZWSgzwsPGtU2zVSTrn/8L5q/IexZmH0eH6SA73CmAA5U4GwORTxQAZs95TAXLNqeLoPPNO5gZfWg==} + engines: {node: '>=8'} + dependencies: + crypto-random-string: 2.0.0 + dev: true + + /universalify/0.1.2: + resolution: {integrity: sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==} + engines: {node: '>= 4.0.0'} + dev: true + + /universalify/2.0.0: + resolution: {integrity: sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==} + engines: {node: '>= 10.0.0'} + dev: true + + /unpipe/1.0.0: + resolution: {integrity: sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==} + engines: {node: '>= 0.8'} + dev: true + + /unplugin-auto-import/0.9.3_gvio5bgcjg37ethveel5rvqgym: + resolution: {integrity: sha512-S3fC/kp98v+HhELCCG4jm4fhd/BbXhhcmFxxQ/JHXefLPtz9WTCOsSq3pq7U4D94xJ0eyZOPo/56Y9iUf3kskw==} + engines: {node: '>=14'} + peerDependencies: + '@vueuse/core': '*' + peerDependenciesMeta: + '@vueuse/core': + optional: true + dependencies: + '@antfu/utils': 0.5.2 + '@rollup/pluginutils': 4.2.1 + '@vueuse/core': 8.9.3_vue@3.2.37 + local-pkg: 0.4.2 + magic-string: 0.26.2 + unimport: 0.4.5_vite@3.0.0 + unplugin: 0.7.2_vite@3.0.0 + transitivePeerDependencies: + - esbuild + - rollup + - vite + - webpack + dev: true + + /unplugin-icons/0.14.7_5vhdwjrvx3yqj3k5avrlxtwoii: + resolution: {integrity: sha512-TrNnEdpaXMdiG5BsCgvU6cv/gSLYvIk1f8wGCGZmOo4wmi3nqYBuqIEuiXhmmyXdDZuRRpCaOzCnCYYZ5H7U8g==} + peerDependencies: + '@svgr/core': '>=5.5.0' + '@vue/compiler-sfc': ^3.0.2 + vue-template-compiler: ^2.6.12 + vue-template-es2015-compiler: ^1.9.0 + peerDependenciesMeta: + '@svgr/core': + optional: true + '@vue/compiler-sfc': + optional: true + vue-template-compiler: + optional: true + vue-template-es2015-compiler: + optional: true + dependencies: + '@antfu/install-pkg': 0.1.0 + '@antfu/utils': 0.5.2 + '@iconify/utils': 1.0.33 + '@vue/compiler-sfc': 3.2.37 + debug: 4.3.4 + kolorist: 1.5.1 + local-pkg: 0.4.2 + unplugin: 0.7.2_vite@3.0.0 + transitivePeerDependencies: + - esbuild + - rollup + - supports-color + - vite + - webpack + dev: true + + /unplugin-vue-components/0.21.1_vite@3.0.0+vue@3.2.37: + resolution: {integrity: sha512-8MhIT323q1EUu7rz6NfQeiHqDrZKtygy6s9jzcQAuuZUM2T38SHlPT5YJjBOZmM0Bau6YuNTKfBBX4iHzeusaQ==} + engines: {node: '>=14'} + peerDependencies: + '@babel/parser': ^7.15.8 + vue: 2 || 3 + peerDependenciesMeta: + '@babel/parser': + optional: true + dependencies: + '@antfu/utils': 0.5.2 + '@rollup/pluginutils': 4.2.1 + chokidar: 3.5.3 + debug: 4.3.4 + fast-glob: 3.2.11 + local-pkg: 0.4.2 + magic-string: 0.26.2 + minimatch: 5.1.0 + resolve: 1.22.1 + unplugin: 0.7.2_vite@3.0.0 + vue: 3.2.37 + transitivePeerDependencies: + - esbuild + - rollup + - supports-color + - vite + - webpack + dev: true + + /unplugin-vue-router/0.0.21_6ef32vilt6ae74xg3uetebg7ja: + resolution: {integrity: sha512-gOK8/pQQMug7kVFn2logNs6ll5Fj9Nv/OSwrG07CQei7WcZ8TYDb1hy598+4INRNT6KAyUFc6OInEdDZlN+FVg==} + peerDependencies: + vue-router: ^4.1.0 + peerDependenciesMeta: + vue-router: + optional: true + dependencies: + chokidar: 3.5.3 + fast-glob: 3.2.11 + json5: 2.2.1 + local-pkg: 0.4.2 + pathe: 0.3.2 + scule: 0.2.1 + unplugin: 0.7.2_vite@3.0.0 + vue-router: 4.1.2_vue@3.2.37 + yaml: 2.1.1 + transitivePeerDependencies: + - esbuild + - rollup + - vite + - webpack + dev: true + + /unplugin/0.7.2_vite@3.0.0: + resolution: {integrity: sha512-m7thX4jP8l5sETpLdUASoDOGOcHaOVtgNyrYlToyQUvILUtEzEnngRBrHnAX3IKqooJVmXpoa/CwQ/QqzvGaHQ==} + peerDependencies: + esbuild: '>=0.13' + rollup: ^2.50.0 + vite: ^2.3.0 || ^3.0.0-0 + webpack: 4 || 5 + peerDependenciesMeta: + esbuild: + optional: true + rollup: + optional: true + vite: + optional: true + webpack: + optional: true + dependencies: + acorn: 8.7.1 + chokidar: 3.5.3 + vite: 3.0.0 + webpack-sources: 3.2.3 + webpack-virtual-modules: 0.4.4 + dev: true + + /upath/1.2.0: + resolution: {integrity: sha512-aZwGpamFO61g3OlfT7OQCHqhGnW43ieH9WZeP7QxN/G/jS4jfqUkZxoryvJgVPEcrl5NL/ggHsSmLMHuH64Lhg==} + engines: {node: '>=4'} + dev: true + + /update-browserslist-db/1.0.4_browserslist@4.21.2: + resolution: {integrity: sha512-jnmO2BEGUjsMOe/Fg9u0oczOe/ppIDZPebzccl1yDWGLFP16Pa1/RM5wEoKYPG2zstNcDuAStejyxsOuKINdGA==} + hasBin: true + peerDependencies: + browserslist: '>= 4.21.0' + dependencies: + browserslist: 4.21.2 + escalade: 3.1.1 + picocolors: 1.0.0 + + /upper-case/1.1.3: + resolution: {integrity: sha512-WRbjgmYzgXkCV7zNVpy5YgrHgbBv126rMALQQMrmzOVC4GM2waQ9x7xtm8VU+1yF2kWyPzI9zbZ48n4vSxwfSA==} + dev: true + + /uri-js/4.4.1: + resolution: {integrity: sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==} + dependencies: + punycode: 2.1.1 + dev: true + + /util-deprecate/1.0.2: + resolution: {integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==} + + /utils-merge/1.0.1: + resolution: {integrity: sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM=} + engines: {node: '>= 0.4.0'} + dev: true + + /vary/1.1.2: + resolution: {integrity: sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==} + engines: {node: '>= 0.8'} + dev: true + + /vite-plugin-pwa/0.12.3_2slanrkxy3rtt36tq3uv5vcihy: + resolution: {integrity: sha512-gmYdIVXpmBuNjzbJFPZFzxWYrX4lHqwMAlOtjmXBbxApiHjx9QPXKQPJjSpeTeosLKvVbNcKSAAhfxMda0QVNQ==} + peerDependencies: + vite: ^2.0.0 || ^3.0.0-0 + workbox-window: ^6.4.0 + dependencies: + debug: 4.3.4 + fast-glob: 3.2.11 + pretty-bytes: 6.0.0 + rollup: 2.77.0 + vite: 3.0.0 + workbox-build: 6.5.3 + workbox-window: 6.5.3 + transitivePeerDependencies: + - '@types/babel__core' + - supports-color + dev: true + + /vite-plugin-vue-layouts/0.7.0_5vcanmustn365rg452uyprxay4: + resolution: {integrity: sha512-k5XDmRNFo4M/GmUjhbRXj2WmJiFcGoVI8l/uZ72RHyRDQr4wE/6Zq/KFq0lqXomWQxTSzakQRUswzNwtvZLE8A==} + peerDependencies: + vite: ^2.5.0 || ^3.0.0-0 + vue: ^2.6.12 || ^3.2.4 + vue-router: ^3.5.1 || ^ 4.0.11 + dependencies: + '@vue/compiler-sfc': 3.2.37 + debug: 4.3.4 + fast-glob: 3.2.11 + vite: 3.0.0 + vue: 3.2.37 + vue-router: 4.1.2_vue@3.2.37 + transitivePeerDependencies: + - supports-color + dev: true + + /vite-plugin-vue-type-imports/0.2.0_2yymnzrok6eda47acnj2yjm3ae: + resolution: {integrity: sha512-6rukBj1MP27WfsY5gqMZJg/Z8whK7f9oLbpICBDwzwOK2Oj7KkbP7IgFc2p3qInuB51SvUuxIIcJQVoUPaW6Nw==} + peerDependencies: + '@vue/compiler-sfc': ^3.2.24 + vue: ^3.2.24 + dependencies: + '@vue/compiler-sfc': 3.2.37 + vue: 3.2.37 + dev: true + + /vite-ssg-sitemap/0.3.2: + resolution: {integrity: sha512-udsHc11VYQLSy/Zy6efKHnU/ABozGUz84Z9GqmrD8MaB2HCezqFVFAJukwZaxBzcEjy1jFXfNCIcT8J6LefpWw==} + dev: true + + /vite-ssg/0.20.2_zodgg63wx4ia7g264h7fi3aaae: + resolution: {integrity: sha512-XtBgJEDrN9cL6pgzXvThrTS7Pm5zwZZnBt50EmL/O+QbdhD/ZjCbcp4GmIrWFBWjVBFcgfwgPccmWv3DAF6s/w==} + engines: {node: '>=14.0.0'} + hasBin: true + peerDependencies: + '@vueuse/head': ^0.5.0 || ^0.6.0 || ^0.7.0 + critters: ^0.0.16 + vite: ^2.0.0 || ^3.0.0-0 + vue: ^3.2.10 + vue-router: ^4.0.1 + peerDependenciesMeta: + critters: + optional: true + dependencies: + '@vueuse/head': 0.7.6_vue@3.2.37 + critters: 0.0.16 + fs-extra: 10.1.0 + html-minifier: 4.0.0 + html5parser: 2.0.2 + jsdom: 20.0.0 + kolorist: 1.5.1 + p-queue: 6.6.2 + prettier: 2.7.1 + vite: 3.0.0 + vue: 3.2.37 + vue-router: 4.1.2_vue@3.2.37 + yargs: 17.5.1 + transitivePeerDependencies: + - bufferutil + - canvas + - supports-color + - utf-8-validate + dev: true + + /vite/3.0.0: + resolution: {integrity: sha512-M7phQhY3+fRZa0H+1WzI6N+/onruwPTBTMvaj7TzgZ0v2TE+N2sdLKxJOfOv9CckDWt5C4HmyQP81xB4dwRKzA==} + engines: {node: '>=14.18.0'} + hasBin: true + peerDependencies: + less: '*' + sass: '*' + stylus: '*' + terser: ^5.4.0 + peerDependenciesMeta: + less: + optional: true + sass: + optional: true + stylus: + optional: true + terser: + optional: true + dependencies: + esbuild: 0.14.49 + postcss: 8.4.14 + resolve: 1.22.1 + rollup: 2.77.0 + optionalDependencies: + fsevents: 2.3.2 + dev: true + + /vitest/0.18.0: + resolution: {integrity: sha512-ryAtlh5Gvg3+aLNuOQ8YOHxgQCCu46jx40X5MBL0K0/ejB9i5zsr8fV8LTGXbXex80UMHlzceI9F+ouGaiR+mQ==} + engines: {node: '>=v14.16.0'} + hasBin: true + peerDependencies: + '@edge-runtime/vm': '*' + '@vitest/ui': '*' + c8: '*' + happy-dom: '*' + jsdom: '*' + peerDependenciesMeta: + '@edge-runtime/vm': + optional: true + '@vitest/ui': + optional: true + c8: + optional: true + happy-dom: + optional: true + jsdom: + optional: true + dependencies: + '@types/chai': 4.3.1 + '@types/chai-subset': 1.3.3 + '@types/node': 18.0.4 + chai: 4.3.6 + debug: 4.3.4 + local-pkg: 0.4.2 + tinypool: 0.2.3 + tinyspy: 0.3.3 + vite: 3.0.0 + transitivePeerDependencies: + - less + - sass + - stylus + - supports-color + - terser + dev: true + + /vue-demi/0.13.4_vue@3.2.37: + resolution: {integrity: sha512-KP4lq9uSz0KZbaqCllRhnxMV3mYRsRWJfdsAhZyt5bV5O1RTpoeDptBRV9NOa/JgOpfaA9ane88VF7OjWNK/DA==} + engines: {node: '>=12'} + hasBin: true + requiresBuild: true + peerDependencies: + '@vue/composition-api': ^1.0.0-rc.1 + vue: ^3.0.0-0 || ^2.6.0 + peerDependenciesMeta: + '@vue/composition-api': + optional: true + dependencies: + vue: 3.2.37 + + /vue-i18n/9.1.10_vue@3.2.37: + resolution: {integrity: sha512-jpr7gV5KPk4n+sSPdpZT8Qx3XzTcNDWffRlHV/cT2NUyEf+sEgTTmLvnBAibjOFJ0zsUyZlVTAWH5DDnYep+1g==} + engines: {node: '>= 10'} + peerDependencies: + vue: ^3.0.0 + dependencies: + '@intlify/core-base': 9.1.10 + '@intlify/shared': 9.1.10 + '@intlify/vue-devtools': 9.1.10 + '@vue/devtools-api': 6.2.1 + vue: 3.2.37 + + /vue-router/4.1.2_vue@3.2.37: + resolution: {integrity: sha512-5BP1qXFncVRwgV/XnqzsKApdMjQPqWIpoUBdL1ynz8HyLxIX/UDAx7Ql2BjmA5CXT/p61JfZvkpiFWFpaqcfag==} + peerDependencies: + vue: ^3.2.0 + dependencies: + '@vue/devtools-api': 6.2.1 + vue: 3.2.37 + + /vue-tsc/0.38.5_typescript@4.7.4: + resolution: {integrity: sha512-AFlqvwpENOTTJxS1Gl1uG0ZVsvtGCbgv9d0i4fXk5Pnao/ETVwWewPEsj+4cPlwa5htO9WhK6qfWwoqcCqg0yQ==} + hasBin: true + peerDependencies: + typescript: '*' + dependencies: + '@volar/vue-typescript': 0.38.5 + typescript: 4.7.4 + dev: true + + /vue/3.2.37: + resolution: {integrity: sha512-bOKEZxrm8Eh+fveCqS1/NkG/n6aMidsI6hahas7pa0w/l7jkbssJVsRhVDs07IdDq7h9KHswZOgItnwJAgtVtQ==} + dependencies: + '@vue/compiler-dom': 3.2.37 + '@vue/compiler-sfc': 3.2.37 + '@vue/runtime-dom': 3.2.37 + '@vue/server-renderer': 3.2.37_vue@3.2.37 + '@vue/shared': 3.2.37 + + /w3c-hr-time/1.0.2: + resolution: {integrity: sha512-z8P5DvDNjKDoFIHK7q8r8lackT6l+jo/Ye3HOle7l9nICP9lf1Ci25fy9vHd0JOWewkIFzXIEig3TdKT7JQ5fQ==} + dependencies: + browser-process-hrtime: 1.0.0 + dev: true + + /w3c-xmlserializer/3.0.0: + resolution: {integrity: sha512-3WFqGEgSXIyGhOmAFtlicJNMjEps8b1MG31NCA0/vOF9+nKMUW1ckhi9cnNHmf88Rzw5V+dwIwsm2C7X8k9aQg==} + engines: {node: '>=12'} + dependencies: + xml-name-validator: 4.0.0 + dev: true + + /wbuf/1.7.3: + resolution: {integrity: sha512-O84QOnr0icsbFGLS0O3bI5FswxzRr8/gHwWkDlQFskhSPryQXvrTMxjxGP4+iWYoauLoBvfDpkrOauZ+0iZpDA==} + dependencies: + minimalistic-assert: 1.0.1 + dev: true + + /webidl-conversions/4.0.2: + resolution: {integrity: sha512-YQ+BmxuTgd6UXZW3+ICGfyqRyHXVlD5GtQr5+qjiNW7bF0cqrzX500HVXPBOvgXb5YnzDd+h0zqyv61KUD7+Sg==} + dev: true + + /webidl-conversions/7.0.0: + resolution: {integrity: sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==} + engines: {node: '>=12'} + dev: true + + /webpack-sources/3.2.3: + resolution: {integrity: sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w==} + engines: {node: '>=10.13.0'} + dev: true + + /webpack-virtual-modules/0.4.4: + resolution: {integrity: sha512-h9atBP/bsZohWpHnr+2sic8Iecb60GxftXsWNLLLSqewgIsGzByd2gcIID4nXcG+3tNe4GQG3dLcff3kXupdRA==} + dev: true + + /whatwg-encoding/2.0.0: + resolution: {integrity: sha512-p41ogyeMUrw3jWclHWTQg1k05DSVXPLcVxRTYsXUk+ZooOCZLcoYgPZ/HL/D/N+uQPOtcp1me1WhBEaX02mhWg==} + engines: {node: '>=12'} + dependencies: + iconv-lite: 0.6.3 + dev: true + + /whatwg-mimetype/3.0.0: + resolution: {integrity: sha512-nt+N2dzIutVRxARx1nghPKGv1xHikU7HKdfafKkLNLindmPU/ch3U31NOCGGA/dmPcmb1VlofO0vnKAcsm0o/Q==} + engines: {node: '>=12'} + dev: true + + /whatwg-url/11.0.0: + resolution: {integrity: sha512-RKT8HExMpoYx4igMiVMY83lN6UeITKJlBQ+vR/8ZJ8OCdSiN3RwCq+9gH0+Xzj0+5IrM6i4j/6LuvzbZIQgEcQ==} + engines: {node: '>=12'} + dependencies: + tr46: 3.0.0 + webidl-conversions: 7.0.0 + dev: true + + /whatwg-url/7.1.0: + resolution: {integrity: sha512-WUu7Rg1DroM7oQvGWfOiAK21n74Gg+T4elXEQYkOhtyLeWiJFoOGLXPKI/9gzIie9CtwVLm8wtw6YJdKyxSjeg==} + dependencies: + lodash.sortby: 4.7.0 + tr46: 1.0.1 + webidl-conversions: 4.0.2 + dev: true + + /which-boxed-primitive/1.0.2: + resolution: {integrity: sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==} + dependencies: + is-bigint: 1.0.4 + is-boolean-object: 1.1.2 + is-number-object: 1.0.7 + is-string: 1.0.7 + is-symbol: 1.0.4 + dev: true + + /which/2.0.2: + resolution: {integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==} + engines: {node: '>= 8'} + hasBin: true + dependencies: + isexe: 2.0.0 + dev: true + + /word-wrap/1.2.3: + resolution: {integrity: sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==} + engines: {node: '>=0.10.0'} + dev: true + + /workbox-background-sync/6.5.3: + resolution: {integrity: sha512-0DD/V05FAcek6tWv9XYj2w5T/plxhDSpclIcAGjA/b7t/6PdaRkQ7ZgtAX6Q/L7kV7wZ8uYRJUoH11VjNipMZw==} + dependencies: + idb: 6.1.5 + workbox-core: 6.5.3 + dev: true + + /workbox-broadcast-update/6.5.3: + resolution: {integrity: sha512-4AwCIA5DiDrYhlN+Miv/fp5T3/whNmSL+KqhTwRBTZIL6pvTgE4lVuRzAt1JltmqyMcQ3SEfCdfxczuI4kwFQg==} + dependencies: + workbox-core: 6.5.3 + dev: true + + /workbox-build/6.5.3: + resolution: {integrity: sha512-8JNHHS7u13nhwIYCDea9MNXBNPHXCs5KDZPKI/ZNTr3f4sMGoD7hgFGecbyjX1gw4z6e9bMpMsOEJNyH5htA/w==} + engines: {node: '>=10.0.0'} + dependencies: + '@apideck/better-ajv-errors': 0.3.6_ajv@8.11.0 + '@babel/core': 7.18.6 + '@babel/preset-env': 7.18.6_@babel+core@7.18.6 + '@babel/runtime': 7.18.6 + '@rollup/plugin-babel': 5.3.1_3crms4j33zkfeqv7ozcuia3hfq + '@rollup/plugin-node-resolve': 11.2.1_rollup@2.77.0 + '@rollup/plugin-replace': 2.4.2_rollup@2.77.0 + '@surma/rollup-plugin-off-main-thread': 2.2.3 + ajv: 8.11.0 + common-tags: 1.8.2 + fast-json-stable-stringify: 2.1.0 + fs-extra: 9.1.0 + glob: 7.2.3 + lodash: 4.17.21 + pretty-bytes: 5.6.0 + rollup: 2.77.0 + rollup-plugin-terser: 7.0.2_rollup@2.77.0 + source-map: 0.8.0-beta.0 + stringify-object: 3.3.0 + strip-comments: 2.0.1 + tempy: 0.6.0 + upath: 1.2.0 + workbox-background-sync: 6.5.3 + workbox-broadcast-update: 6.5.3 + workbox-cacheable-response: 6.5.3 + workbox-core: 6.5.3 + workbox-expiration: 6.5.3 + workbox-google-analytics: 6.5.3 + workbox-navigation-preload: 6.5.3 + workbox-precaching: 6.5.3 + workbox-range-requests: 6.5.3 + workbox-recipes: 6.5.3 + workbox-routing: 6.5.3 + workbox-strategies: 6.5.3 + workbox-streams: 6.5.3 + workbox-sw: 6.5.3 + workbox-window: 6.5.3 + transitivePeerDependencies: + - '@types/babel__core' + - supports-color + dev: true + + /workbox-cacheable-response/6.5.3: + resolution: {integrity: sha512-6JE/Zm05hNasHzzAGKDkqqgYtZZL2H06ic2GxuRLStA4S/rHUfm2mnLFFXuHAaGR1XuuYyVCEey1M6H3PdZ7SQ==} + dependencies: + workbox-core: 6.5.3 + dev: true + + /workbox-core/6.5.3: + resolution: {integrity: sha512-Bb9ey5n/M9x+l3fBTlLpHt9ASTzgSGj6vxni7pY72ilB/Pb3XtN+cZ9yueboVhD5+9cNQrC9n/E1fSrqWsUz7Q==} + + /workbox-expiration/6.5.3: + resolution: {integrity: sha512-jzYopYR1zD04ZMdlbn/R2Ik6ixiXbi15c9iX5H8CTi6RPDz7uhvMLZPKEndZTpfgmUk8mdmT9Vx/AhbuCl5Sqw==} + dependencies: + idb: 6.1.5 + workbox-core: 6.5.3 + dev: true + + /workbox-google-analytics/6.5.3: + resolution: {integrity: sha512-3GLCHotz5umoRSb4aNQeTbILETcrTVEozSfLhHSBaegHs1PnqCmN0zbIy2TjTpph2AGXiNwDrWGF0AN+UgDNTw==} + dependencies: + workbox-background-sync: 6.5.3 + workbox-core: 6.5.3 + workbox-routing: 6.5.3 + workbox-strategies: 6.5.3 + dev: true + + /workbox-navigation-preload/6.5.3: + resolution: {integrity: sha512-bK1gDFTc5iu6lH3UQ07QVo+0ovErhRNGvJJO/1ngknT0UQ702nmOUhoN9qE5mhuQSrnK+cqu7O7xeaJ+Rd9Tmg==} + dependencies: + workbox-core: 6.5.3 + dev: true + + /workbox-precaching/6.5.3: + resolution: {integrity: sha512-sjNfgNLSsRX5zcc63H/ar/hCf+T19fRtTqvWh795gdpghWb5xsfEkecXEvZ8biEi1QD7X/ljtHphdaPvXDygMQ==} + dependencies: + workbox-core: 6.5.3 + workbox-routing: 6.5.3 + workbox-strategies: 6.5.3 + dev: true + + /workbox-range-requests/6.5.3: + resolution: {integrity: sha512-pGCP80Bpn/0Q0MQsfETSfmtXsQcu3M2QCJwSFuJ6cDp8s2XmbUXkzbuQhCUzKR86ZH2Vex/VUjb2UaZBGamijA==} + dependencies: + workbox-core: 6.5.3 + dev: true + + /workbox-recipes/6.5.3: + resolution: {integrity: sha512-IcgiKYmbGiDvvf3PMSEtmwqxwfQ5zwI7OZPio3GWu4PfehA8jI8JHI3KZj+PCfRiUPZhjQHJ3v1HbNs+SiSkig==} + dependencies: + workbox-cacheable-response: 6.5.3 + workbox-core: 6.5.3 + workbox-expiration: 6.5.3 + workbox-precaching: 6.5.3 + workbox-routing: 6.5.3 + workbox-strategies: 6.5.3 + dev: true + + /workbox-routing/6.5.3: + resolution: {integrity: sha512-DFjxcuRAJjjt4T34RbMm3MCn+xnd36UT/2RfPRfa8VWJGItGJIn7tG+GwVTdHmvE54i/QmVTJepyAGWtoLPTmg==} + dependencies: + workbox-core: 6.5.3 + dev: true + + /workbox-strategies/6.5.3: + resolution: {integrity: sha512-MgmGRrDVXs7rtSCcetZgkSZyMpRGw8HqL2aguszOc3nUmzGZsT238z/NN9ZouCxSzDu3PQ3ZSKmovAacaIhu1w==} + dependencies: + workbox-core: 6.5.3 + dev: true + + /workbox-streams/6.5.3: + resolution: {integrity: sha512-vN4Qi8o+b7zj1FDVNZ+PlmAcy1sBoV7SC956uhqYvZ9Sg1fViSbOpydULOssVJ4tOyKRifH/eoi6h99d+sJ33w==} + dependencies: + workbox-core: 6.5.3 + workbox-routing: 6.5.3 + dev: true + + /workbox-sw/6.5.3: + resolution: {integrity: sha512-BQBzm092w+NqdIEF2yhl32dERt9j9MDGUTa2Eaa+o3YKL4Qqw55W9yQC6f44FdAHdAJrJvp0t+HVrfh8AiGj8A==} + dev: true + + /workbox-window/6.5.3: + resolution: {integrity: sha512-GnJbx1kcKXDtoJBVZs/P7ddP0Yt52NNy4nocjBpYPiRhMqTpJCNrSL+fGHZ/i/oP6p/vhE8II0sA6AZGKGnssw==} + dependencies: + '@types/trusted-types': 2.0.2 + workbox-core: 6.5.3 + + /workbox/0.0.0: + resolution: {integrity: sha512-/I5JIWHK18DDGGoXOZgOkOFLsiMQZlHLnwEzw328ZMU4TA21yFCw11YEnZ1CsOnSazl0DMbFmLJ6q5hpQhP/aQ==} + dependencies: + babel-runtime: 6.26.0 + dev: false + + /wrap-ansi/7.0.0: + resolution: {integrity: sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==} + engines: {node: '>=10'} + dependencies: + ansi-styles: 4.3.0 + string-width: 4.2.3 + strip-ansi: 6.0.1 + dev: true + + /wrappy/1.0.2: + resolution: {integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==} + dev: true + + /ws/8.8.0: + resolution: {integrity: sha512-JDAgSYQ1ksuwqfChJusw1LSJ8BizJ2e/vVu5Lxjq3YvNJNlROv1ui4i+c/kUUrPheBvQl4c5UbERhTwKa6QBJQ==} + engines: {node: '>=10.0.0'} + peerDependencies: + bufferutil: ^4.0.1 + utf-8-validate: ^5.0.2 + peerDependenciesMeta: + bufferutil: + optional: true + utf-8-validate: + optional: true + dev: true + + /xml-name-validator/4.0.0: + resolution: {integrity: sha512-ICP2e+jsHvAj2E2lIHxa5tjXRlKDJo4IdvPvCXbXQGdzSfmSpNVyIKMvoZHjDY9DP0zV17iI85o90vRFXNccRw==} + engines: {node: '>=12'} + dev: true + + /xmlchars/2.2.0: + resolution: {integrity: sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw==} + dev: true + + /xtend/4.0.2: + resolution: {integrity: sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==} + engines: {node: '>=0.4'} + dev: false + + /y18n/5.0.8: + resolution: {integrity: sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==} + engines: {node: '>=10'} + dev: true + + /yaml-eslint-parser/0.3.2: + resolution: {integrity: sha512-32kYO6kJUuZzqte82t4M/gB6/+11WAuHiEnK7FreMo20xsCKPeFH5tDBU7iWxR7zeJpNnMXfJyXwne48D0hGrg==} + dependencies: + eslint-visitor-keys: 1.3.0 + lodash: 4.17.21 + yaml: 1.10.2 + dev: true + + /yaml/1.10.2: + resolution: {integrity: sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==} + engines: {node: '>= 6'} + + /yaml/2.1.1: + resolution: {integrity: sha512-o96x3OPo8GjWeSLF+wOAbrPfhFOGY0W00GNaxCDv+9hkcDJEnev1yh8S7pgHF0ik6zc8sQLuL8hjHjJULZp8bw==} + engines: {node: '>= 14'} + dev: true + + /yargs-parser/21.0.1: + resolution: {integrity: sha512-9BK1jFpLzJROCI5TzwZL/TU4gqjK5xiHV/RfWLOahrjAko/e4DJkRDZQXfvqAsiZzzYhgAzbgz6lg48jcm4GLg==} + engines: {node: '>=12'} + dev: true + + /yargs/17.5.1: + resolution: {integrity: sha512-t6YAJcxDkNX7NFYiVtKvWUz8l+PaKTLiL63mJYWR2GnHq2gjEWISzsLp9wg3aY36dY1j+gfIEL3pIF+XlJJfbA==} + engines: {node: '>=12'} + dependencies: + cliui: 7.0.4 + escalade: 3.1.1 + get-caller-file: 2.0.5 + require-directory: 2.1.1 + string-width: 4.2.3 + y18n: 5.0.8 + yargs-parser: 21.0.1 + dev: true + + /yocto-queue/0.1.0: + resolution: {integrity: sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==} + engines: {node: '>=10'} + dev: true diff --git a/frontend/postcss.config.js b/frontend/postcss.config.js new file mode 100644 index 0000000..33ad091 --- /dev/null +++ b/frontend/postcss.config.js @@ -0,0 +1,6 @@ +module.exports = { + plugins: { + tailwindcss: {}, + autoprefixer: {}, + }, +} diff --git a/frontend/public/apple-touch-icon.png b/frontend/public/apple-touch-icon.png new file mode 100644 index 0000000000000000000000000000000000000000..bd3f26c1152e1ee41a00ed407d1587edd9e87b91 GIT binary patch literal 7911 zcmWkz1ymGW7@Z|0rAq-p5NUx$8W!np5S9=mmXwxe6-fo8OJHd!sijk-a{*}tX^@gm z;h%rb*`0It%+7i5d+*-w-uoi8HI+yR83+LYAW>14*9GtB|3CNez;CHxJVEdV@z7Oz z0el;IxD5bIIV$q9`ab4+nMC>;v&_B0B9=XH%EBX_*xQrTu17tOH4<9VJK2QvazC)9 zNk%z@sr>n~PE8%Q%{-vZeZ*y^hoDnUl-0=9J-JV^{m(*zq(AUW;C|zImj8jYqp{-@ z`uxpZ(9+W8Zq4Z8=Ke6mog9}+84`ol{o;krJzTPQ4fJCc*hF%SzI7fJ4kFSSUZNJH}> zYC(4_sGfb2diamTx2mth0bbV04|EEwfia8qg8x?9b|G96IG33RFwf_3c~B3)5m-I1plZQ8)9Iw8Enyl)&L{P)jo^1%c-Cqz(OG`8MxI9|-_{vDIC`myqCpqEe<&`-EUST8; zrbOM>l(Eo0?ro*K*s1C7qysMg3d_jI5EZVx%3~y-bHK&J%Muh4npnX{6Cbe(5G3V> z8{(l9$XKZmkz?%GKX4HA5qu#}2rzho4rpsq+NbB;r93U zw?)`TKi_y~r2_18k0FxBDM&Xq;eu<#g(kZ6>EN)URVQne6GY^?&sW zrFMO`XJ52>v_ANESY!z$a7jO>_`wL>9&mXC;pOF><9}3JTN_RJME&L6#o_8B6|CX9 z!}@9n38RGfm$ZzG>x{a(IxaK12M-?Pyjg1VS5;Fp+nOWO)#~o(_-*R#UB_uQk4U<8 zb#jW%%>10#Waa zpNwRTjG4<@TQep8J&urHu01_HwAIzsg>_$Ww1^NbhO;^*ImnHPynE?&$AL#{k=ad6 z;(I(dr_&a96B82|#ugUAk#*$K)RRAd^0+Lv`0A^vm2#Uk)AE@j1cik&AwZFd6B%kd zs=wOrJ<>2XCT8TVr|0fjkHyL7p62?&fdPgT9^)gq7cXAiOAwf!pTDD{qFS3MrESH( z_a`$XB;>%y&+oP<$%5(~4Ycob_IG@9nzW%Wo=ya&FAq;nZiYuk4;;~K>YIN5u7q*$ z@Omz19+m4a)Ka+HKbE5}<=!>TTFXi;~Y7*MpgVS1D18HT?+QH_7)Ei=i_zPE*Q0Z@v!?~cjQK@HA!%u|L+R=1 zY)#KirX82s{;iaimGv$E2%mUPOGfs2vo@$zg|b742W4O1C$E6FUr0kqhzn)54-Fjr zE+8zdA?|-xR?yemjC3K7HWG$HW0F!*Mwe3zhoEkro}PmHOYJf-e60KyZSuFSr6nbj z^D`*Xu)--pThpJP1oc(Z-YHT9r6nhOdynUvv-BumHSj|@J2^=< zn!5b%c!#|$NFro6Lae5$dPKImu01(5<*nu9RQA}6YZ^l!~AJ&1k%$ z=md730owO!sV&g%iB{GPr`hFWR@U)kJ~Id2o|2ra?;-e46eGF3D+H6e`H zAOPw9?Ct#v4-Hj#D}JiO$iY!G^Y!afeBdj)6x58GzL?`Xgc{i=040=FQtB1r=H~vq ze{s4ooKb1jO{l~aTb~`n_CJd0!Kix!{5{e-D=8=JQ9WW*ez zA7dJMf_Z~}Kt@OBvotZ0Fh%RL73wsHwnq?>wLY}6*x1-8wj249_TH?j;C)8Md@0BU zCje3$^8Ejz)UfVEzAYj{!EtVHjz?pQ3JXoV_7^VY`W1LbAt;-_>9vm2<7sUZgy7P8 zf=lZql$4m*k*|_ax6mHEq;`+n9S5zv5=!$5f?4?$$J$9NzJ~Z!9Tyj834_6s8ym&y zawE>;SFBE=PtF0?d%dt`?>!pnpc^0NU;KBZ%+d>F)YJt@@$tHf$R~0!^~5KCCQAoF z5*|8_@8YtTEHj)D?Pcjyz^XpZ3JQ~=Vq!XJZP#pE zTqQnZxeAP-M3*cZ8X6ji^uGsQJ5xFQJ6~@akif8ob);sM$_ieNr1Nysq0M0O-reP9 zHlyDO%#qq>I!`GUWzinw0#wS$YfR_n=I;HwI{o1Y00V=Amf$F4{m=K!zua894k*h#4{0*dO{+i>xysR_Io{Vo(TzE3$e4?eEMAF+}xkQVbSKgj5UV` zxgHo8C^2bp&CxJ4G(3(eq2lX?n8;W1ynLDSne<8l0f|oT%*)Lk;W0r+OnI7^nwqYI z%i4nt%L1muyL)=ft7~fw@+eJ|mHRaA-@gyl41Q514N1?{cH&p0MLZJ_5bIh!Y%?(E zwulS5IB>lRVIryQzk&zMy@hh{*$w^d21#?z*b~{BLD}+Ig z=Q(Ww2^9FGrK+l$EzP)hKjy80fx-8=TF1BGJ08eRd#h@6_xE$A4z7ROohrA1lMx@| z-)F+{_rE3wK7Z7S2BKMLGCoFOLtnz-n_a!VJ2{UTUHPCy@hMOkn)c5_ac(L)!j#(K516I&jJ=GiMA@*t_C=v zP|w(J-}ve@2?z*k&CSi_+&w&&;CRy$6FaM$n?|o*zYb|-@CBDhQAfhkhwWPiiT(lEW(bwl2Xw~ssOe>=l#Gbdu!`8xVibQp5v(B>7Ux?LPA2XFpqAQ z)|q62ZnE<7R#QNA;EzmW#)1~Ua^k%u-D3X7^C771hs^ttQlpC7q`~#JYj)ut_eHe* z1ff>8v=k2T2R>A){zX(EqjzFMb+>prsc&d$zf`bHWW&Y)s+TT^1T9G!X>9QJo_x9$pg}?oV zBJji__~znJ58U2i4bN9rRv21aTBKWp@1!S_zSn7xq1+Vg?0pse5CDRijLfy?$B);9 z`<8M>-%N|=t8G5c?xd#&x3sqKX?_un8?A7~Ej5C}iA?#)+mxoLnKlw&8lqac zV-HIrbP@|?)FH!ZGQqcVU?;dKD=Z9~RFet3qI&r7;fn_h3|UI@@)x9ZNL!$|ozhTW zA4Nhp4Labfu8ldog`u+BD9L1 z=q&F87apVWi-g1?beu^XuvLOz+eO(yP4$*P@H7_gfBf+LhutZ8;O~>;Q5o7d3k~Gu zP{Y;6OIiCdDQG#~%yc;=N`TV-%^PMKDk}8*{VGTO47nm>Uq8RFyQ4vV&&@2j2s;a^ zSn+YIyCAj+rgiwQ)ac1)>?fLE5DXL)?pAAkap?O!`SFW8(-n8C`};1Z*9YxI+MN8k z#33-DFD!WP9i3m;!h{|w)rd*!3XwvA0PuhDyZigMX>3*()3!c7K4$|-++zzK{rG#@ zmQkN_x>fE|hjv$KHGgA6;>pSV_&YSDULV7#unU7tIHOaotgNbcMrH0yH7Dff9f;2` z#J=*=#Ag+4+t0g3{3sv{0P+8i?CtLUtqae&*tQP{kQ~aCs6RFGTPZ0Og?-e1krVbX zz$%Ff^UA?t07Tn@JT-M$s9fBuI;X6lf4j3a_Yj4Apo3H$m?34tykNFFxI6sF-&RRL zL?q#0ZQb>BOiWBnV-QsTpR=-<22tKP2)ypZ0!~SkfaKoK;sixdPNtWa?`d;QO4B*t zqYDCwjkckoLA?xAL4{T+R`gg{lO-4=iuNVc*J+K(*3azM7#tQE$u45 z7ZGh{D@;gIwnv%uC5|iHh;3Fbug;Ihtm$O=n^`kXww=10EjB&8Jt)u%1_Q3d8Y+)< zxB7<7QQPv5^J5mUc~|gKpqQ<>MUNISQ_PC zVIK13I#C5|#83r(9IDqa(PDrExZ=>tl617UzvRq}0A>CSsB&?fJ-9|j%RUGXKgHZU zuzCdd>~ho#pM(jKWuuWUZyX%b6XWCG2NUxGS1zk20RcC(#G_J!+iE#J#vHJZMN7?d z4=YuhKZnIsky#%8%OVc!tEi|rAOx;c-9uv;IZrSYCrm$}|947qg)u`*Aw;?a5paho zQ!7Y+58?IkkwX@~54lP4#YZ3ZZDVlKj+oyq<3~Tc#p$g09QExvEO1z-~0lA(mMt=oSy5S@^eR zzuQIE7LO{hPoQ&lHf_GfPU~jE*E*S^U)a>p@N5M%5i&rBbAK%>d@za>8l^S%+Ud!? z*EDp2e4%&tEOKA0E>ubt3Y4Go+9cap`d|T|AF82!h;Zv9&LQFZny78&z+W{<8 zx0?k;v2BJ-gTO%ZUBh9UMHVjXBQ4Tus*r$v*dZ!E=K{u!iue@P^;JMYF7~pY=J1^% zTeL#giy7+JN0k7CtD>ou*_) zC>TX_aTpOk<-jm|_6(e zs6@Qk@fgnW9<8s-La@-1){z&=#*hL$6F26T#AC2&BrzaD2cHlq`?c3k{QRk3UstD? zQPnY7YIN5>G!$@HaSQtSy>MdsaWykD#-pPn23=iUOmZ~|>(Rv6)YKWJ{$@|~gU@ks zhjai#==8EH$jpl#1Bsl>1lQE&#>U2Grlt#bPcM+^*Ikp7wU-^8si~#} zpQ00SG4>M0=r;X(wOUyv?;9_Yxb_|Y+*-iAz zKz@FHuqa@l!77ZKD(PF@l$B-f;WBTIi;Ei#I%f43hd+P* z{5>r)Z3TRl)qFrlH^XDmCaDRBXLx{Kaz{WwU;;py%EM?tQ?4>KH5CRjR^k6vHkv|V za@*S8p1uaOXQkeIIB%%@z$6B~zrT;Ti()00_M{WC9V#Er5Xr;d_&z-?ayVX~<^iBc zIp~i!hW(e<)_&F&7Z>+0EIeck@<~uj-Yhd6NuByJ09LIop-5bMru(_FH>*Df_0gPu>*Vdr80GBoZE@pdS zK>_d9cmYvrM#g~{*aHN|T6teu+t{$@S-UqbJOMSKDojF5tb2E%aiS#f`V6afu&=K{ z*s>Fc3#K*G>Mt_d-Cb~rs1L!vzX9egc^QX`@Kq|jKcGSLtpfeSx9I3-jXnMOSy@V9 z)al8IdgqTHLWSySSYu;jA~gKwpGZ!+A}NFK5fIEq#T2c&E-oWS)YR3~{H4AA=zE4_ zz2P;?_^kgvBxJ&<+@za!Ybofue0+9xHa|D_eK$-`nS$Bm3I`vbquiv{K_6^*A=%kW zGY`Ohg#FtSw8=WY*4EbeOtIli7BqlsSZC%OG5TE&ixVZpll!g2$-z=^@w<25`;rq% zPqe;>T7IC|J~+7J0u?P&EnG+E@af~nkLi$lENirPU&tRTIt`7C+#b%>*kKwy>>A!4 zm`5ihOuPpZ!`DB;f}9Y59ss)$vx2-lq9x$6`upUhaBErF?z`e*dke6U zkIBl)rs(PEIm_=hpVMspFM;rIaPpHA6I%jR6T$C+;o*PV;Px&MO~>$Z*D^5xb0m!7 z(36VxTlc_=zbr3bzQkGI*!TtkGEPoTK&R51m!o71no?NQ{J{8j&itP>Gb5vVV*>-~ zjiI#TEif|p_w2*&{^sU+Mr>@Xy|y-{lu63Zv$C$v(Ch7688S-B8nPdx$`29W8MW&{ zfEn<2P-1rVWStYXyPwe{a5i8MMmBWfc{hx+ms^Fh>Um z(pC4ywByau1L@$~o5x@P8+Lej*z~_kuyA#K)+AuA^GWUqM&$jW223}{KY#u#jksrM zVnPV!IU13Xkw#!H5(2u73{qgr20n92v(+2REaL0$pR&HW*&bNDe&}NfAaBQ-T=B8T z@tRh_N_c>CgRcEu{_)DrPQ(rr5R13Bw~F)iZ-Sewx}&}WFt21-Aq)&h?_5D0`w032 z1v;8Cuoimy`sgz-irdNrGOcT~9q`x}<%rzmEi9J)U*Ml(V^tDGb5} z-um*gngeY6jX@xCfO7nI3zeYonzn$D5I2ZXQ|92{;F(5`&Az$0Ii{SPoMIvx z9%PPO_+r!BgDjR%+IibxYXZ+fJ}bq4*XK(s%gX|b-@oU)2H8+wC)P~Yl$iV1uST%W zS^CIZP^M;pFuXxmR{q`NYyer41uU4#db~IW6jr|i0s{{knwlKCx_gDy>Fz>RHHLS@ z9UUE0c6WD0S{afyiSc==U0#j4Kf^_DM^bX=+Ky&LZwxP=?adD#FJ%YcHmZViS?-H_ zJnH1^JjSD$+`&^9r9*{zFTG^@yMXNKbmv?lF!0~s@$qpsU~2~SO^1u%HJHNB9A^F{ zFfcH9zR8Q*Pk_pk^OTMua(tw_o81AdFX^wQ)>$ej8{1k{csRcJ_%8pa`IokGq#(ax zgEUqJ>W^@+tPt_M2WDVcy46AjRU1cug18BVLXH<3<|1kGRUA?0d-LaA!^6BFF>kN8 zum5j-rl+RxusY@y%IfOC$prF3q58q+`@cqGK*$tSRz3^tk7q9hQx_;$AI!um0(8S7 z`p7o_bC(}Cm&aNg>+7wV?Mixa!otFE2#W4;J^G_+Yq(;T0|F6{lAg{`Tv|F`)zriw zm74&*5t{MmaTB|VG33|u($d{eupXzB6cp(oZeJUMTT7A>6zAe1qN1W!;BMpsWg>wo z-&?*U1)3+w4J8;zZjm)LHC3N!o3}wiRUsuM#Rx{v26)^`FlQzw8@AdGGZcYn>d}Uz zTcm+R)dM!%IB>I%7HV)RD)b-=1yvF2MoVwn|DDbKEY`b5DS5wo^_vZ7h*%cka;UPO zC|qj`ynf!E#w_KRIM6wOFBh}5wKe=S4fG2Eo_lk`Toc~5OiWB1C#RQ**96Iqkt52sGD_>bIN?bY}BQIbHS&C^smVsp}*uz(3ks$;5 z0K`OfHhDNi{du{nif-GmHehloA7}GId47cx20T@>V%WIlAz{>yU_v}k1vuc~8DI5eHhQEIJoya6 zHNf+53Ocov!8jSl3bqoG|y3N@!=ybgJWkqAqb{{SM`ZA)h=_wtx_L|Kq_nc^{`L)K|fBO}g~T?q(bAD{DQtAB1-6MBC z@M0*gyn~HxxBXzAS^78o%p-x}3m*J!D>hQZrISpA_2oJF(B*f+yF2ih`#6w->M8{|DQ3DPx%K}keGR5(v%Q%g@1Q4~I>N$D(LVNu$~;E>pz)6&Gaz(nmv9%Z5K8U>TG!lWA( z82kY)O(Bt$BwZR}9Htx7MT1cyx*;i8Sb)(|=!!N$Ot7)jIp#j*QN5YW+&lL?zH`3s zAjpLXKx|J42*mfuj{MCpY*Xip$ZYvT;=X(9xbQ$C8ejIAxrq?+f*aY(n~(1>R5OwH1Oih>T2@Q z)KnP&TAp_B#Jtlisw6>K)7QdxlF6LU=L@h~t3$sF1!EzT$s9QdZ1%dj;dm5X*TeC6 zJTa&az6L_7)eawSeB9X0%*-4CkOu*?=WRqlV%&2|6#cCpE*6XR#igY>`}25u`f_S= zQe_Abpe2zylzN20h`^qel~*@TPk%NOiQ(IpMYyxG{h_b7cVKq*$rl3nBl;&wW?~YV z8HH{rDGu2iM`uE#{|HGq=W@9rRaMVBrs)qTN~l_`e(MkRhx|SM^L#$vn14Fo_v`E| zvbp)Wc5`Ip%Dwv!zOrxv0Rb3>acjF=-nyp6q6|}|QmJa1b{7B+-mkBJADbAz4g?qL z$H!Y2g2CZ*I$aPd>e9F%6Or7CT!lHN&bUbx*<`5->AJaqs7002ovPDHLkV1jnlF!TTb literal 0 HcmV?d00001 diff --git a/frontend/public/favicon-32x32.png b/frontend/public/favicon-32x32.png new file mode 100644 index 0000000000000000000000000000000000000000..a66e4d4300e3c51e4ba3c7f589d67c15941cafb8 GIT binary patch literal 1417 zcmV;41$O$0P)Px)LrFwIR9Hu)S9?fXR~$aKJ~|gC5ofhp5?6Ywb2`GD!%G!ng;-59Z!&BqU4w+j8rfK{7EKIBRJ2=5CkAN{mj%c{pooL3TTH5gp8JM?#wY@ zkQgYA*i0Rd06|fxl~Y4a8D!>ojg1|JZfxgjKAG~+YYnW(9jUC*XwseLqiak>#x6uhlj7% z*4COJ0ItNcY{+d;1OVK}jo4^3%2HEPKaonMMR5MY!ou~8jEt{gu!Y5?s`T`C@4)`; z?XA(dx%pA0vT_~)z{zHpm6Jg9dn_k5P8emOp&{XyvvZ8ld{(RN+k}LKLb%#$wSFg& zNUp-%PELNG>u@-NmDiN>0Hl0^Q87-Z^B_Gv z{dEXnX=!O+EEbD?cR2P=MMp~vLxx;srE;F6z_Yc07KJMzp&Rjp(I~?(Y}R72e2Jbv z-vBo@nM}DyZa4TaFV6(yTP&6@l9G}>GZ+kc*Obb6oTULa3R)o&^rJ@u`4~3a+t(+n zsi`qTt8Hy>^&Yv8a1=#V(4zhQ{co#Os$U=grIN}3%c!)iSe1|mh6e!501URfyK9b% zi<8;yb}In5J2v+8*}c6zb9{XKJ20JwK@tGsJkHJOs{_o+&bXyK6y)b)v(R>Cv$_V&kNVPT%a^X%F47aC3T znXay`-+TagDDbC)yA-o|peXQ;S*_Lyi9~|IfHs?LQYw{RAuo2H1`xzat0i8s^VR|X zT^a8T4FCiI(FPA{QC)t%hY8-opier(^>Nkk-u*nhcb43IR&{l~!D)YSAAilWJpk&z(^ zg<^E{-l(edqp}`#of<7tC~mHYv8P$GeM*i(yH2kByDhmXwtAmX(#ADK9U7 zPcD~#)7jZ6K~c2j?%lgLv$C?pDJd!SLqkKl!NI{Dy6m_pI59nKT3B3MDsOFVwMeKa@sgnoPm05WY_t@h21j*fLWQKQi$>2$gc-?Lu7QD0Yg zdVhc4ot&H;u4&P1w|(B0)YjIvfnnIIm6eqrtgWs6Y}>ItEi6=A=xGBY#t z{pIE5MZI4C2PG$>3{j1XIDcic=8zrCiGiI>X-OD>kfQ(R7=%D&z|S7sv5%{5uGLKM zlTPNaA>&(+sxkuf_*6}@z)=_Ypc4S--lzXa;N_d+gnTi)Q-uNe#|xP0fD?GOWv}}W XB~~~^+pL1t00000NkvXXu0mjfp*N}~ literal 0 HcmV?d00001 diff --git a/frontend/public/favicon.ico b/frontend/public/favicon.ico new file mode 100644 index 0000000000000000000000000000000000000000..ad8cd358ae4215e9e109d386c3206e6853530c8c GIT binary patch literal 15406 zcmeHN3vdJkY2E)WQWLQ)^{^FSaAwxL5U|4o6wo@)bv zA;2I65z>yveG6JMh|cydol-GDCniqrS+cq04<#if52mH9488Hj8-L>5 z%)eyGl2AcGLEnuV^J9R!b@}q;y>iy)4BEJHW6bd3!@KX?wX;uAaq-PFXU^ zH@w{_LZGA|C}e(NB|QrL2u++gv1@&O-Iw+C^=C$nO6ZoIojnMlQ8FijIo?W5jl~^R zSWp#De?55;y4;gRAFIZ=-j!pg|)Tk%g$pz zclPWi&TnHykQFVhmL&?~d^>VquBoX>JazKa zxVgEdxuRnG4Q1u!p{Ay$mVbZz@e+zx`5~xS;$bBi==$obuZ~!@bXolD*|UF5JM{P! zD^}1s)0O)vd(9*iY{`}ggT0Z96R@9JY@)Gb7e#`Y+tO4n86d61FaMZTv_#GH5@`Qd zHQA;%OLUzHVUNWg6cPl#TcpCQ_*UF1i-|LJ6X@_8218ZhLa>~XF_Aqw5uXmq#$ntl z2ZS&i6iatMeqmspMaty%0dWTUvxHNJYl}*$r!q}u;;nwo}-_V*hr@HW7@Q7*N+`Lw(o)k3vQ%*xTgush!G=t;Jy|Iz2EpJ z#{aCebW8m3_~G5BOqoLWytrqdeHKpz_7XY#t^-MeV|^y$~ko-=#!?%lf&;`%;p&Du4$ZQZ(c-(7dz zc|CK9R(3bcg!|}kaGy)qv}w~{$&s6vJB46LNmB>C`|i8zaQ~(2eIP$Se>s}YB;#KE zG_|8gkM55e|4j1IrAu!pEiFwSIdWuQT%&J#`|Y>W9)JAtUy@I5F3HJZnMLD(=p&}f zcAVtah-`y@33(mY0giUmL|7LiTNc4hF{zo+Qm~Evn*vP?~o6Bjl^IN{-$B&n^SR3RmM8{!k*2w(M)517s zxrWwTzIb~vmUwSin(@M7csUf@^c3d9y~aW zb91|Y|NiNmN8}I6@n>6DYx9PgrQ+(e#~+|mr%oNDym$_+{-UO)ie%Uhh8CH-`Gey8*K07Tb)Pr`j1yL!tvHs}@2|^O zUww6&`g`u&xo-y!9N3+9$QQ_V=FFF;z}Z><{mC52?43Nk6jQVE3>}r?nJZ zzSde^262mP+4}YCM_#ya;S%M!bm>x4Vq#)H9#h=!V=iC5j906cmfG6di@4wZ1*DGS zZzr;e+2*vcN3_wYsmsTtrKR1!68nM$|BQ#Vm%RsjB_}5jCqM0wC%|Xrx0!`xsmiJZ zktfo>F1V-Mo1UI7_6)0Kom;jO7POdsQD?A|SF6LV1UH+L#I#&ll*!Vn?vcjBrD*+N zB-jH9mveL_UjBB%V{OgCsA@>Cy}D?4!Xo@C8nX&F{$oATDVJ05=2v)cnDY9;Uopl{ zpsM7DBSb3Oup4Td>TgeMn-Updnb#w`aygNP%V74Yf<*DC5T`}CI!Z877-~ml0gsc} zY3aapSB(n2@aTw#t3xW3{6-c-Rf!_m6`-Ah;)kGQ$!VxCIxYUQ%h*T^Y>$nN?UIs` za*dt~>r_2p`w8co_~gW{nAcSA6f~zv7;0e`&)R6a;n}}8$0Q^qbi>?Wzl@BGK9D=( z^(2V-$Y{(9_M@DH8#iuT6xR0oVb1Xy#CPJ@wzPQOzYTMs`FI!Y#ly8|(V~a&&N_m5 z7A{;kcw>Hkve41-`Sa&LjCaK)>(;G%IWI5o7i7=O%p8h&+F2Zf+J<7zvl#F7lTceP zymR%;T$P!Kxu*ie(Hv6%^I`Yw+_`fD-h0LX(}QE3ZNy^{EG{nodv0Fd4$Li#<~Y2= zPOaQm`2_1(Sy{Kf_uhNzoi{!{K7@I%&vSBeQZNrK=9Ou!74xjw95Z?H!I6th@kgx-@SjVzB-@3~Q^)*#Fd1PYnY{9_xsW zE-Ncri9PF)M;^I_>^NrkA2@Je7mm>wIv*M9Y~}cy_9;`I?v8P}iOxgJZPOCII4|j) z8u`S0IsDylyl=&L$KqY{x6DEF@R;Ye=G}s{E)Y#?!tBR6-y42Wr)3M$yf}{a9XS7c zk`3d2E%B~hyY?1Z8|K*md20-pp{nvC*%iPL+KSi1y6#ztb8@|dgn5{hU&l9 z&Om8#+9juM*Jb}1zb9lDT%T^l^}oNqCmZ_0g~?%l!YWMi!v45F4Mct7+R!0&`X8d0 z4I4J3<65NFSV*RImc@$~C*U6aJm&jeqMc;2nOM?sKgmViF&$Dn-(>xf#o#`h=my2L zJ#^wfCl29Tm@jC@JnmtvEri^#xPQ!n-CzCs*eY2GZuDU`0~xV?e9tq1*8*r>mcD1f zy?hdr={#+!p9$JF&ML|fBGupNlTSX`M|JBPZ2J)})^&BBE302;xB^)YplkiJX3e^z zp`k%6#L&DqZ?HANcV}2O@U*{v{b=g!PP<`2{iWvl-wEpn*<7>Pk`IU8dKQ*!uJs#< z^s01O!rBCu;WETA{&=7qNfK{U9H{Ul^d*;99>9&}E;cIzx=W z2=(K-GY)GgV$F(bK6v0j`G+5VxS8uG-<~~t9@Y6({YDBM<1Jrtpd5cI9BaQn#dm+% zEn>AtTyvUn9U71Kmd9}Y5kCdG4pmiEt=6B1RsE_+KlONE440v*ED%)vgn8$kcLtn3 zeOjG6XRv6vYuBzJc!wJNB{n_=HDA;mee12ae#J!2`q^W*k!oXLc?yTs(!q) z-;OmS5-_~cis{j4> z-_PK&r5Z0@yjVSL`m`H4AKp0!;=7J&QNQ&M@6_Ub*tLER^lFVF4b>}>eWHHM^Yq7c z{d2vxk3RaS93Y)Ch;wGUj=@j*=%h)L`l|YMG-qg6+bW@`AK$`_#xth6M^TOV&HgPc zHK_1iTO!v_7gNO4;`%h|`>S78p!Y(T;f*OMD4c<7er7W^t_|N|G~j!~K6d>wDlRVW zdK{-F{*BW;0pEzXQYq9JP*y?`Fs9vV?PM)(U(4$dz**(tp~mL7lr>t#=RJh$Nb0rgy|ve zm)n!HJ+bYn+&izU*VWk88hn!Ygz4E^4QUksHieObq3Y?RtN>0|`z;{v5PqFR%DScf zb+(`3hgOewkFI95S2I_nUVk4oNX|AfyMlG2E*pLEaOp@V%#TH9kg_w%iTTm;;|O?- Gz<&X-7JKsm literal 0 HcmV?d00001 diff --git a/frontend/public/pwa-192x192.png b/frontend/public/pwa-192x192.png new file mode 100644 index 0000000000000000000000000000000000000000..0c76865e7b61206d62f18886ff07995f53eea64d GIT binary patch literal 8867 zcmXY11yEF97k|4fuuFHx(g;d70!s_hC4wL*C`gNhg%)k!J@ZP)kp7X2o!u532i3u18AP6GXxT9hKK3o6$M&f~=1~ZQM-~;Avpsoa! z|72Q+AT*?*qG;r8xov@OM6G_(f(~MTkGL6(rV#H8!u@tYUr9mD{z(<{iWf;!i9L)y zSBXdxM60$2;pPO9;i~!~{NQ%me%5va1zO;d|KkN(EnFepi!V!9%^94=l!O?)ZC&AtrVOZE>)ilmD%EbP< zAsXzH2vaS{o)DLEzC-2>TdNGb=4}n_l1ihij;W^Grq?(*)_l*};d3;EgJ|t%`jrr5 zc`=zm3P-g%SFaP3j~@%dhiSa8$CTK0?hyN{L}iYZAcHToZ?6z!?kt0r8Ky|?WKFx!5Mlyh*aX=d z&qp}4_T5ad_8`2!NkR`_AO8GS{pzHT983Jino$U@L}nEXEjWt11%IT2F;GO2!|%A= zky`UHq#26UVtTX%oC-ZKq_;wgXp%#zxS#3h)Mo|1;L~M>J`Ok;*L3FjG zbaC8Jh>o|9Mht?nN->NW5s;keZLKQ|&q8Cq;I|^HAee;M*PGZ^B$QG*P@OH@|9G$< zJX*}YMMg>od(24pJX0fDAim@slTuI~E(LZodI%>b@tL*%-nxh$p3SwfYKp-PZH6rg z9&)ra;ydTOP2#5^I5}DaisV$w_GiPQT8q3FSWJ@FyyIOr5JE61Y5@)U=>T7r z9SV$raGX(7e9x7qr3)eaj-sP1gnb|zQ&hOqgpZZcPpY|}sT$t-n+0nY+&Mip_1<%5 z=AGHPr)6FsrJnYVYXRLuC&S zaUxdv85ZdK8=8eL7(+qMBm|OxK!$;mGNJE?ZrHA6VWHz~851+JM{P|_Fj@B5sjAjv zdc2Z28TiubYA6>c=jp}SS=mS7R}JMGggiV2ru_UR(%(_79{19=o(7V(BDc4NU}oA@ z>l_$SmKsV;%IB{|5mv$1L2L^UELBluDio6uO2RDT`#l&n>#;E!PWBpsqNbry($^n~ zCZrX(O{ozV`yB_5h)&Aq(9Y1<*f_5E<;C$Wo2O47es$y6rNX?Xcp8FnI>6r6$jgA3 zSAS+O&TVdPj+WdLjH~P(7{DhbB~3|BPuFI*&5!+let!NfE-p@>r>7^c#6FXSlvUnO z(tSm1bzvdi6&vE5&IZ|6Obeh52JP!qnKtAZ;Wv1B=`6o`3kS4^kuQlBzw0>&ol-d=J!+4&tqbIZ&I0R_S#{18lEHs28ff|ABc z?qyBZIZX@cv;Fz{(!H#@x|$ohO>UfmJ&*|{WxbwFmv&b%i0PKG{vOP6|Ni~IP`2e4 zI6bF-jCjvobVq|o9OF+PKy4K-d+-Ska zkFVn5hHgSqQ+db}h@DN-%lcqfMCBHl7~!F|p5EZY^73*56>3!c$jC@<&(EKk-K_x( zF9`|XI{2RKFyD-bDm;4l<;D79hOAeyR(KZ+tUZ6-!Oq_7MJFBqSE7(Hy zBV%G>BJ?e-pag`yV83zxb+2%AbTn7=N#E)FhCc(d#5=PsF~~W$mA-cvEvU*xr#ESB+&*s9D}e)kt6eoyYOkWCNRWNZ28JY-*}0 z3oWfAx18J+3A43`xHz-3wY8glXCys)Qy#0#i$d>jLAccp4i16=k7&c$$U5?qNzinI zuTX;&M%19a+|wkSDD3mOBqeEQW@c!zWIXtiqf@oti~SH45qaL`HS4c_-$Gd#r|-Qu zKR*|jY6~?NClWJ>FrjHjoO0fL_G}hm(U`nBRsB@A2d2!L+?OGHa`@?S*Px>wBk8B{ z(f0PGjq9rmb<@Tq)ds0hmhuFTm{^U~a!(Sw zH6;_%gUywdko=ojZ|Ob;nAbZut9U*T>S<{MtNH#?_F4Ld+!Z{!{_bMgtFaNe+tB~$Pv)kCNcDD-7>92 z2M??^5>(=nzEVL^O!rD`YNN=l4wimY`}_F?WJ$TPFAL){xa+3L9GP|&8H%d3s(XUU zD@aI480ryO=8=~E{Ajw)$?#{UobSiq6xfAW)Cp8(6AYlx$I5EX&B0;wLr!F4z0>R3w@`kI8 zqz#FRR;Qs*s9Y%9%m!ZBV4F)oww(Ot*&7N0^$;jKC#R|`JKL(3_hR>CYx3{2i{tIZ z=kD$<3HuiJ?%i`3D>kORb?a7R?2TU!X#BJGw(Do7#(w?UCCkA98>g78B2CV~ZUCUM z4D7fAXrV-}uHOxAIAsLp;5>sy)kW~|@OtZ=e>0SS{HRL*h?O<#IZKQrwl$Me3+mx@ zFG1gjchfO2O@&2oBrnw_#v5QO$5L?U@l z_V&=c*Pkaqr#s~4JT5T4b^bZt*teTjf}X{kHzQ@7VjnAFdRPfYi5hHVxNqRjIhFhwCHyIQS%aU(s(QCdw?Ed~QQj zC`BEAa9mICm6?sD@O}SP@i0){bIbHE=yXbihZUR*5wZCw5eY#XjFQevjg5_lmXr-1 znvK2?;0B?4WAa&)KCGk zuRI29dENeKL`rK}2&TgL#1Q_DuCg>SM-y7=+e=MNEg2slPxK7S(Y6QI;rUg1zdAe0 z*g7dTHr88KwsGC4{*lX{ub-zTC+Uf^y3=*Ktl?qFPg5bzy)>}ZXHC8*8~WS0Bur9! zH~IKBLi_sqOa_RCM@BXq9Ve?C-QC?a)fG}wQra%h{u&Ms58o-17V;O%;dQFvwG9uZ zp~NCJwM7i`wS)oK{eI=P*cr!rRAHy1Lt;ipBfZ1O6$MQ2sl05zLG5w;~9*HuxeXjTr%b;6K#y+$IYk#fzYt zRvMr4!*!H;42$@Os*T7OUG41;P7V*#_rareaXPgj&?vfl9jfdAf!y5P2|YYKYV)7V zE5Mzu&X0_no133OdmEHw)JlpfudRuBB4`(hiwq=FKG1uq zR9Izp#|VxM2?^oK8X(=BuCJh=)=e{cLknx&(d|iFURq+Jll8nwUF&*r>?GFz<#e+8 zY3L2!*aMdAjA$=24MZFYg8XrCa6|z$@d_Fdv&vt_eEoX+Fel>Yih4|Be*VgJP!L>{ z;K#IS`}DMIU|R_B+O#zvE&8rBtT5+i)AJW((Dr8dXYc(S@v)+wy}doSudi=n=^Jgb z;sfum?v|F8i8{?bX9rvWFD~l=@=$lgTEX=Y(GDGUY!DO~857g;%sE>Ako_M}r!7p9 z&NJUr1Rv(-3_pMVJfXs}>AjHSgpelBteU!dTRhiYyokMhId14osP5*sIDIM$1g49~ zQbS8?^y72W?F1g(*5AMV7Xf@W4T+R~`0ydn#wJg9_fOLsox6eO_o1n?Ga!S~HMF%e z*p-OZIUse4c^nfZ7%j{wNVOv(^>@bQW##L#87je-zAfK9+R)LI-JCXw1FFjs*^r-qBdvKkt{uJnRO#uM`>z$pQ7K8hg4Swq) zrK@FGJc5G%`~b^8<<{X0Je>w&k3qt5f~?-Qm*()`LJg5f$Av|D3R@<=IO%Ut?K8pf zz%b_K(=x)s!Zm^auJd%AG4-`{jEt-mMMXEXH8k$ud7}az@a5x2j~->Eq@1k3hu^0% z=faAKg{SjCKhQ&(0P`xHrt3yZ?p3IH;Uok|I87ND8yFl&aB>C@UzLLP6>#tOp9HeMC{sl({)oOJUs)Tx<>a(|I$iH#K1_zA1u>5g%&6D3_%|N{XD)<7CCUB5*CT?6#TD?nCdf z5=qQCMpl>T7zADMN84G5OrR9wdzSHc7vjX(mxl3@H`;y( zr{pQ0sB%G z9B@q>IJ>_s=NtTLxu+Xr{74cKorwmwMGDAU+t4-@5`+El6xSk$j^KAGDJkoZHh)!6 zCZI2`PG-lRJb4m*ulxbI=YlA`s1U3>yTkf^C#VLToSYmHM<*xo)b^+UcQj(e&>iM_ zfp%^!XzI1v{_jvvSu;MHc&JJ_gebh_<=+**{69E@r-4h|aq1NhTjW#nC$mV`tl&cF z-j4c$GO#J`x^Jw3mu?3ISpkohesA=>6D0;9s#veXQb0fGsRMmee9WmQh99cqkwcm7 z8;k5%m4=Ge&G$vqT1fahx#K2m*_u4hH! zTL96Q54_u86}KL^W24nKcm;4J+C=`XJZ|vV$HM-@H#s2xXw;y}h$Z@i=QBW+o2TF3 z5c(1(aiU}6;}2|rs#U;gHHI%_y+yik3Bh<_y-i9BYcMiC=EK9ofYrZe)yqrBnM%GT zygM8mxe506_IBrnhm%hezEO}2P3yI_z;uJ|3fo1%tl_j5{MEy?$ zp21sIQPEOaUhYhas~Q~94f%cQ{BdKvr{`I8a&j^i`COI;yS-2Q7><}uD{!e-%*@PX z8XnqE|COwaO!Gvuzi-TwE?yDx3c>VosSdOIqIaPAHX-gyiegGi46n*4F1K#x784B( zjfg8+rY7pr(9{$OF8m5O_uK$$p-<2DWg+2oDM7BEEKp5N4c_O^pEGJJD>1jE)_K_2 zKJWp_6bj6N1&2k$Cusf&EhUzrHJzW!1P-;P01+hH*WVuqa)g=4wTqKoHULB;y}g9} zR|-$*?AN)Vd5NHTWSB-%0}4CG41-2HG0@uEiU;q#0_yN;4=J#p+8^^&x7U)i(v=e3 z=tNi{P3wwk8{JoROdmXWaL3BZ3OOB_kS+Hc^m5(w#Ka}1A46isEk@(5JZGB1!b0Ay zmHsqRpwd;OwUYV%l->k7nCBOOrCoethSA)IEdk!l*d+a&F5X6BMv34Lv<6~b(OH4y z7}{hU)2U&j_}XE#5*O#(xa zL-ozu0tr$ZvBAhz{hG>23Ki@-HL%A)Y#qjTSztb>Rw501mGC3H;Wd6YSPBSb7PJ`jMIVukG$e%vl?JFRIA6@v){w3~}Z zQoEIMa@~l9q`cz^Q04d1k+D0lc6+9BLXK83I6x$hNMD`zFk4e*Zm11*k_Io$GHN|>%!!W;rm|4cCuDO{-Nm+R)iZ_r9bN?#kVSunPI1@fT zz90aCU9&BL{{-R7R3Sw4k4h^l6jUZAGXOW-W0Ci}vld|a^5vN|s7(cr?J47HaI#4N zDZ_n%vLb{Z*O1W*Sz-SK1KjaiA68u%FEiH#w(cba@gX<#ndE#97l1Ilkt^F7&8m=j zwe%yw5Yb!bS-Eqv*cBr=I5ec60+OiDUb~i)KN5JHP44}AfVLYssjI822IkD!@Ik_Mn07b5?t6LEj5pgce!?SwxRn;w(Cj*(=JDZ!mAd1@t zYCCpwZqAAb(nW5pH$PwQVbawdxGvPMtgk2i;&)ah`>iS~;+?mTPY8&L4uQ`%!aPNe zjgL1A-n@Bp7UT|X?d>XA8ynB8qh9X=#qU*8ROCPQo|u&7cA@MOLvDq-tEp+k4YcejsTL!*7tbp3=evS z^ghlSDbQtocl%}4GeC50$+yh&3-a?R*s8P z9$|0a_SV+bajQZ=#HVAi*lVx|41xqCjqKXkXlB{yy@P}Ewt<0N#!-iqg1C>IdIqnSbuwVMc+Ta`rdfDNK2Mw;@HgPaYe>8#pQdCxM11ZBd<)!6i z{bACH&+e;^9v(+O{p4HZxw*Lk%`eb1L|cF>Y1-QtRhp3Cfd~KI*7n-P+4075JC)fY6xPQp15a5?#^<9svwP52Z6&2Oi(CFyp#>s48 zg`DeL>t0@7-Za9=j*1bq#d&Hfoy(|$>HYh1(;)8B9Zk?m6@nc>pKM-)CqZRhpW1+7 z^#-0|r|>B-4i#qAPx_~T*Ry-!GXs`4TJhlXFgX;~zRm#y`ShVDcsdS41B0E|Z?94J z-AG@_pH{{WtnR)C$rDI4sR4)D+u7OO9O&yK+8N3XRdRLRD*{3*cR!F`v0zdO8x0)5 ziHL~EdFjKJYkwuBpk*NH7FPS-jeG!s!8=@BTm^WmZ;<>eQxv+jQ224DR7Xb#E(wEJ z;r^FiP<@Keo;}l6Q^RX%ZVuqj8c?UCR#`ZVgI>OTNeo1L49I+r_s4qy)#T`7ne`pl z;yE;+gZ?H+xlzEZhF3vC!o&$k8!KyT>l&LM6ow$&tSl}juprpZ%E(|=S%}oEBWp#o zv9YyHOwf_bZgxg7uE1b09v}hncP6V>Kh@TXf7wX9<1mS2WCZ$fR`7n6%_``-Beh7n zq0`LZ%!7mH-9LXi-_g}oV4E!NKP`cQIt|G&6s73aUk?NQmw*ea{&)FT z*R=Xc)YTh8+G80hDV6in(>}plx5|Wtgj@yq_|66OTTGQ8oAN0jeM>_~n7=rJ=yk)e zKnGWhGB^_;VdI~X5m6B~wr}*%pSt|~ z3%K4E6$n3T0p`E61+B3*Kd)a>P{8xp&W`t~jm?WQ0O^ac?DxS5j_B5UhhOD#i<_Gz z_x1E3d{UNDz!KtqC%?T!07z8X;n@Pxz^`+ejn4N+^)FWkLkA2D43q#Iu6MVm>ysd% z>xrfFeAjzzj=7}ceMfl zYqOV|+x$x(9}*Si9FRwDM}mxne*d2H93XiUY>rx^Hl7=^3hv?Z8z^xKaD{i-+1VqY zw9}Fj6aOS9C3$4Y`}-6EyKDwJz~{_7JuQtc1oR@&IvExcmMW zBO~L*#O&-W5@I2!R3Dm|@$1dkPIF93O0wDk!a*x&;Jt^+j{RkMMTMe?jT|dk$6t%Kv7Ue=6rBK z;orhH^l0#@{X@8ZO)%I}6%3icF+lX+eN8f&zocsh^E}tRpFkZxe zuRMDa{ejdBb$oK-@&G08ItQ&M%>l}kEJqL)^NNU6b^rQBODvh@lQ)3X7nYE?t|%$l zjPXA|)Hg9Oh~EZ{`q;(7*ccZWv+`-cy!}`#WDcEXk%dN9STu6Gay0<4%_b%`D#*w8 ztVz$xjw)6RCIdFhCyGH_le(4@=n^2vB?XuD}545(uf&L@Z0xP_DW!5Nh4VKm&B_zKwYu(z&UvaJv)iT2}bA z8?cNgU=+X)n_HyHMqhzu5xtxbWnIZ`!knF*z44P*NL1M^%quI~76*doRmzLi<>e;F z_f8HD4Amv+=`(BgBl%H`5{{+{;^MU~GJpy{faToe;o%VkqZTrVjJ__i68r`1g@I%L z!RA9vm2;=Csqo@=#bN#kAb(cjh>S&+3GuBNGb0Dvgi`7?F6K7{o+S7_V1G~J|xzz?4xj7U^EirW~caxE-fg` z=|eO|mL2jSbJbW^g(xvTk7MLqBxa+&q2E9(l5s(AJH8X0KvMdJ9W0A##F7Ue=|DvM z+7%Uvl_IyoLJdb0Pa!vp9JW>nrXq?==|qN6g26xDP^UlJ>$%}P21+b9dfoVSk+NWB zP%X$(3GI81tVEEnb*O?lRUmyW8a(`KW)2J!uqD`rSCMxfT?@ncX_GX0VDHr<`fakm zNwD*Ubzc(FliRb4Y-JO2V^bGeGZM)t(T@s4kSqPpVQlOIcqJVcZxX-Ua!#EzW8HyG zFwi@un3By&!Az(8sz+~rvZ3nYo0oz~g-A%$>aa37A$J%lg>{xn6SUo`!qY)Nt1G?x|jW)5X_W{$T8xS9IXRI3!7i+Eig=mKyis& zV7&h_bo(1}5DW1mF~LP*rxC_s5zsVT^|f3n8e8c0Z5g~4z=4Tm-r|9T6;k^liHX+I z(7c*@9;pdk$l$|#m8-8Ne{$ys>M&uLDFgraC)R(E!4XKYR47=Kn6OSAkrxT6Oa&j) tJ=sb?|M!a&xe)voILlUgwlUWCL2&R-IE1_e ze(bvT9f2P(2UWR;P(cUv5(LphDCr04uKH_#W#r)nNO zOj)`FCz20@;Pf2ZyZFBHcq!V9&F}9aXVKulE~8kb|1DBK0u~9vyI7F`j~=p*b$s|1 zD>)2F4^iO!w3o(Oxe%^ye`QP^F(U9R)tWtoj+RH<*u*Ni!``})# zz&bAHN8yDr^9vF~?>~1ulH2lepV?sG`o^e?l%($jBDr;l7yT!aq&3*Fs*+gUE45~o zh5uGk1S`$Vl3-8B_vHH2WePZb-IwGRtj0;w|HSS`S^V?AFMES8Zzp&#{!=Y#5^yPk zhi?e}llu@Hev8fedusSFTfK1M>Ow5?5i1(HMM-l_jjg^>Y>#vbt3+HdA6$sg$48DP z>ED{mh?5&o_Ls(sb3)gFH=fNCbb|M1BDp!1)P-#`9srfTMEnHy?-;_wz{l0O$ag?s zKD^DwPg{_0D)`pbpw;VB-;>A3u2UlDAM=BY8b+$|)|2ag*j%ClrbO<$DrQRz9a4Rl z#{chaPlz{lN$8LWb``O{0Nk z7Dp->U3>=tE(wYJV1A)ecZnsTm$ws|;Quy+l&^mQtM5rd3uRFK(~%T1SST(uj5R6j z5vQnw-KhTRCiu6xse8c6e*S8_P)W&bu(9;;FEsxiu73fPEi@uv<>FsSB1jGh7m67@WQ)1+dwe*NDX?EO1K69OG88rM`r5Wr4M?Oy5-@rpkv%w^ z*3WV}7ZPOxiIP|e$}c3EBLWghg<>uwQo%~3L~VT`Q7Z$`mjGI;i-(hd_^$DSW5694 z(btDGQluF5Xn?~)@HCdNJQ#AEyW!g-elxvn`^wdhhxD@GHx$!G;G%(f&`;d~$Cw-U zo$KPxf`A@Xoh|G*LB0^FN>>B*RR2*T+TLW|E>4< zIyei|kZI&?_=#z&x-cGaZ%A1PMifL+c44e>jKBc?{RWMGfrx$XaF=27n;(RXrTSqj z`B>c#h(*K>^GW(q;6bGbRnW@h`CXg2kYJS<$qE&dySNiGWc0jH>HP1%H#uMk$nSr& zUJ7iGnl&-(;&hPEj|&a@-<{6=+mN=&&-x25+NVMv0Y|4@9P$6^L+HPf!Hgk_>lY$m zeeBCSOL^Y^vkA%n>(IgoU}?6(V@v&9;Na@E{mU=DBM)wy@%uYs4{Dl&dystC|}e1W73OxR95Ue1zJUMq`5?xwMKDRx3 z%Sz@KQ!Mc5x8gEivTNt4*%zEyTOiTir$6Ua@HrCGZ$OZ8S}}LWG8Tm7 zq2x;0)Rx~prYW*6BL|XKas_~&)b6Ew!4SN@ zJm}S!0YdTz$*QYsHa=RO&6Oq~*drTw_Ov88wRQxs_N_2h-#%Q)`0u?g>)@E}W9-Xu zWTA@0lz;eT#EYN7Hy2Dw!|$-O_m^2{?bKzbMNM3~ zojFAylE3 zhK5X+lRN9`vmrbZ*0I}5i@VjfISVk0SAC(qHy)ztUa*35sF zK9H|t@hhC=_|XwLyuOWhzQ+uU#MTwN(vfKP0Uetf10SFb9SGuqkB2)8xmKf?fxMzL zjpw5#d1vo_1m83$wNJ0Jky@`@;XIvXlfYwKjD@ufP3?(MBhVX^yQAl-2(*kYpWPqn zy>-1w%iK{zDgo;}@c!Vi*^O!!-giFQFieCJbeZhgt?su7RprljWqb`k(uS*P0 z?|;(uY8+fkP{>Opsf1moI*fZJEkb$uzWr>=OV7~HF^z;0x(7h*SHhj&eXr(XwUYZ; zJl7VJYo?|{vSbh^;!-eDZetTN+S;*%gwE|_O+twQmhe6*in#+hTMXn4@9a+u4RVFZ z1d?gg32;{97({#DQQexl9DcU8liDG&ULp09+99YWbcDWsh1wWWr8IVYMh?8%3nnFa z`W0wYpw_hE4PLP2URh!KlP}AmleOVVyL@l&pVm74q;U~l@7FTm=v8!Y#}}%8*mIa$ zleORqj@Ugrb%7D=>uMk`UgRC<_ku_HsC5-x&DM{bW=95^lOC%@0ZDH2amUOw`q2zN zH?_p}hhB{M_~?jk7{m$KB#o3=m;?$|J&Grzha)5*d396>uo0nwUG)S>A-G{!T8w9s zII#WFw*)li>f1*`dGS0ZA;OM}Q*$OWVNwL$;M_#Z=%Lq5z-k}97k*FNvf{^fuijCe z%hw)(rc81ecN%mOTr2IZZmKr?{(5;0t)ZdeHb&s;7D_9|`FA!#rTTZGkX7LMd`F^3 zc-j+~-+u_B>?JU;@M%S^X1qvt=&W-3?ad@%t7l=uS^{ca^*{c!QC_ZWed!2)x(uvXR?OS{ZUJJq@yrKIBLz$! zmK)WE6r4Y&O~&wPpBxekx##9%MvfC*CT_EjWl;sRrzbwE00DtB@?|c$)|*Wn7!jOI zV}^~WpXaW($`V5$B9#boj_-i$C=Ps)Bjlnyow_X1h23rp%i5n;&~&2yJ%{<^doM*_fy+YWw7Q*Cfd;qu|?o{BZaH`*$CNBwm;4bvl4DJS~gHxP7w_=sn(z z`2-tlhXy=0-mzRizS@JDjU|6_{R*|7Zh}hk^hfNn!BqXTO(ZHoJ;p{D{xMsr_Sk!k z*0!Yy)Qw~aNo}LP&?unSn$d5K&h!0R=(dS6x$8+{@X}5g8p@6QrtSY zM`crYiouLKkJ{V6fm(){y7z2L&c50u`b?Y#szRkjPtmv4GO~2atwZdrToc`&8MPlb`tGmCh_*D_dT!sP3b;(J#B}9 zN|NZ)@v6~l`QA6GC=1mxmFFEDP;IB%wHf6y-dgKSOQ>^abI+Zl43%W@cwVy}sqh?f z^GI!KV(4U<>z+@X#armzt#r>c1vq6H#Lf1@5`-=ftQagpkWS;o z>(m`gm(4dSR|?#h(qpVY&Mw%wl~Hn5pKQ13=-Gb9CFr`g&;S%aYx4Z;z<}KP41ch~ z*$N@~4Pi!i8(2)5;61W%NnLpT*kAXicf@O)l`Yo8dls>i9vd}xs%tmfwkjhG_JA!F zh|6%z5}^u@KnU-vbh6-hT0AQkz8+dnf{;WI-6N*)%@Z|DWqO^?jMsDzKLPf#amoV5= zziI2I61yvqhkYmJg$aJ(a`o%-rHq8-UKNS5d^CooYvH~H@Ysn-?#s6~jD{=o=n<977<}rn zhs(-L9E5q*(UEj;!YAp2vN>)!_q|Np^*n$EqZrO{D>w&a#*FV1FChNM>v&BZ(C_?o z;5i=efFMyJP2Ovv`4{&HA2pb{AoEl#)N-(39A)4`)4Z}%e1aUW@Hsz==YHNm626Z= zUa@`&iAFv*%JBqPZJW~Qvm+dZL}wlyY)CY%+aKhwg1~fiP_-TWi#`47@etMME4&5jsjWAV_J!4F>>^` zVch5O76nfaz!h+Hj=s_1GN|?3>;C5Kv#x{ZVqooZ8Dl+u^br?-73t;kAzzF8^WVKe zgHgl7M@>mg5aXo}-?l;6Zk>@^*+w$6K*~0gwWoZT!qIY7!5f?B%!#XU8@(W28dw|F zWq93r6n*zeTQo=eTtRq-+^Pti-Y6xOJrvl7sCOx6sH7^~5XU2SwD5_@aC@dbz(9+! zFGB9ccpc|y2GfXW`dvPD%AUq!`;-E}<(?7>$B2Rr@xdc0NP^+xV%Agqs{ogIG>?B)}aE-`M;sqIY3$!X1Wg2dtlY|h?Wz1qzub=Dus zk*Nw~Sbhz?i#H8fZQio-g<85Hd+t7F-1_ctYc34ixTeRqlLw0vHB}kPiAR@FwPP7O z`g+7TaW?m}P2Akz|r}=xgGe7-#Uo~0rRNH1NDJ|c~9^4m+_U~wr zY(J(FcqSu7ZwBMGo~rl70SN~Jz@js4D@+Tzo_wUM%&I|2SKy=*9| zxr!MkX1&=2=GaL=d^-a^I4@j^2ja;btRYf36mn!?(EnSGIiznDBY3QRPHop=Beha%^&x z_TH+v3%KkL-F67cAlrL1Nx-R!@;sB4qrV1wd>Z0D*BWI$R$=G(vB}abcob&k8_%Kc zP_#N$YStUmKKS87mP&Up*H|D|tu~Wv&{r;R637mqg-?z}H1S|1-uqP2@?a&ybrr$0 zzca_<_K&D_YM9bWqO%j7M^J^eN6Sv(-Zu=mUOn~3$~t+Ne}%mx!tvE_ zFDA(7QqZ9-z>L$bPeh)<&G69A!>zZiK92wg?sB?nB2jX+;^i6tKv^C=wU%au#o<55#{>+%kO7~Y`GZ0Vo*Y|{^7|^r@)9+ z`8eYh7H^(?Yp>J2+)%jfmc!_0R0OL78JB(@zyv=CnNn#91g9V|uoF~b5m6oV?j64I z_5=Fwm+^Xd>7?$cM}qYnwof;F&X2v4y!MBMshxK`>kIU%%#U`LV&sqXZuM+w)fNK$ z9N@M4>a~Ue-%``=hwNGf)VIt$O9uj!i>ULcUO$L|zSY z%+1P*y>BxiTF8kMUP_DLHSfDsmEK{U>yZ7sa<}J^h@Sm>GZKkb;P-6SfCof%WYI$> z6ja}~uEW9-)I}&we^xv7#$^J~MD1i9-zCLd>mLK7M@D^0)o*$2D*z?>&LOyaRGEo&B zh@$cQ8l8CcAaI-;*E-L^c`uJwl`EZ(mX)!qIam9qH79%R$MH|N&P3|rP1LNZYk#w! z>ksd^GLf?yk3<&}8__F($Z^Zei2W{%98te?(13!d25>{T% z7aPp{2xf~_X-`O?wW}kw*5Z}B_9qn~CJQlxiaiymNSnqI^Zj9(dJTPD%+dGS(^#vR z3E7gNDw^}fxom#&cWY2Vn8~R3dHZ+`NHu8h8UK#`x|W(x_fVb&z@o}E%xE-efmX;s z=utic6rQGJa1GSWc;CIda__pW4CHy=aj}b5Mf7>h+Tj!H$|crB`_|j@mLtj2ISDqk zPuV`%zNVXEE;H{}2qfpi?2%wX7=OKne>9sQ7)nh|HQHJoF6poJ^5D9({C8{SRS8@8Dr;5~<_TG}69ZID!E$KV zgAWTEi*Gq$p7C#nx$ zVJNy{?!Rmy3Z~?lhoJ&kR(GVN0tm#0ViHL}ZN>E*dPM!rnLtOKxPS6m@P)F!n(6c=b`79cltRmD=c$+>pZC221TrB zy{s82tnhmv&^sfc|Zbw$!g2Y93yQBna&(Dr)C@B3$VeNzK z;Tf#6jshsPyp4wUZ}6OS+y!o0e)X*dwXI&}(=tc)SLM=BWH1rV6QG;YC=#9Lcwn8? zK*#M5H`x2qQ1KFEeqTx{(SsoIR|)UHo5;zF*CL?cp3+V5GR=A;m{Q1~#&YiUr|JFj zXnA2-dC7;E7|W~^VA%k=F+2o@2%y{@UKlwpTFHis1{6snY7Qnu93{$3JAI7JZcDl<#f4L3qXJ4oi22nG#TW8&?M%zAtwBa_( z!zBw}L&c9;0$+B--jlj%t?iX-rJB5u1JVi2Wa&kz+pCdZNs?%lhe5@%6))vD9ue+t zSWg3QMpoQ`EODoF#I5XB`m!VDf5$l#q*WYnCk@nk9j{oP*AdQvB9}&;_o?N`q<`)e zVo3aIJoRC$9gL{aW`O!y2;!lVA{5DeUAzu>@obqWI2G4o;pkGQbh){G$PKWR@TmU40Z9xEr~djMcwQp*2k^#(?-*ai&fj> z`FcTqvKsjlMc^6;+0x@xY|?;|*%t;SI43^Z+}DV`#p7T-k@ZWx7x{z}9rIFmeQ<-u ziEozt$QqY^5Hhm#+Xm+}w*v6*nF9=<7s5ex2IqTYbMWb@4 zQ{7O3e`b<=Se>i~O;ELqg%mY4*n>o_El#;G6kI zpzQn(@2)Wpv8^6tISfFB3pvNLG;k7^uH0%@D6HQ8aOoOVscLwl+s(af^nRZ9Ho(;a zlP^NWdam+3o>a?F>;`2kOQ%tJ`#CWJIQ`@OtVmK|IjU)oR@owK&gx!FS3+NHPSd$+ zc2%2G9{4HzHneY_Z%@c{Ud!RCTwydbZW42e9xB&Fq{x*+% zPFD;B%Ke=RVwQR{b1eo7_OGOswNDAoa>46AznoIoCB%z;NPiVZhd?*;?5Ox0^&a`p zI5#%MxQ+unm@odhQ{Q079i64n52_+xK)LJA9gm{nrNP4F39kdas;rcwcr@k7d#~{?Ul2DWM`6?)SDRP{zd{3MMBZd z`r~j+P>bJ|2&iBZVA|^U?AtiX7eS$kS3u7b!vN{hNebiJb^%Z0LAiw zYwc{%Gw8{?KNIpr2u@FsGjumj@>6F@cw*{vYb01_b#S)_R14}x+*k4wRk{F1%?4HS z36>?cSHd3spcdf0f0@qnaYo$VgivZB)yB8ij*D#xUI*F)6GK~se%C!(`UrFDuM$DN zPoCVn1Q*AH@Xvig@LvzGsos*kdMlOh5ag`pn^WJ7OF>2dm3Fe%N`ASs=z7IWEB8mm z-iExNLg8nTilLu9etm>m7Rb#Pi9g!oU>#SB+d(ZJdYoBoqcIKwp;+U1f6dBu*r)xP zc!#-Yg%0YOpwn_+zYe}%0;`*Q<2=X7eM6l7&|Z^DB%& ziWi`oP-d$n%g)d4z)64(S_Q~P{;&p}9wGnUf~?oazU|;)i!(cMHNdi7nf!@i*sT)= zJ($r@D!$f3ZJjdn^yWZvI;LN9xi^WSj?gqup?6S=R;^y(fE>(FV2z__bRNX@d8$sT zGg1uBNZqi|rF6^7dG6Ex|U|MM?sr z!$p7Aw);V8!u#!LIg4N<@O=Tz+sVo@GLC1v?5IKi)mI0`?eStImczwi?cs&uj*E|j zX~mA*P?IXP2am-Vp@-9{_G2K7+L8S59?*wriyoI-4sYKRAxW`&q4RRXT4*uFZ+G!a z=$TdzKSPbDdxR4I@C>NITpPYSyzDHx1*%aNiPm?`6pCvCKrfhxit^eY(3E=@!HK!a zIJI(B@)0V~(&l77p{jab5nDyCbX+p(J-2!|5hSoO9YDAK65cSk4PLOE;z#i8@z9!~ z092_<1aX?M;Z0JHdZQtE#--gSysRlDUd%Nc zjm~l+QANeY+X|=!+@O5pvX^u%pxYHL{iOmF9sV!|j?*JN$)J7zqew!7aDO$JO^l;u zSem?WCICG?r6mYmPK@~TL?=t74F0Iu{i+W(l8lfp?PH$`CZD-_ zLc^K^!Snp3R)GKbz$a5s`LvW67)n!2mp#k+qXx=RV+|L9U+DTVqZ(cRx2x-%A&cP zeY=65d|?o|@Bh?bI2&}r3Wt{6W7^mYt5YBNU5X177i;WEgdrtr^=RSWb~%$mk3s5n9P|GbL1R|Y>C_q|xZko>ou$8p+ z>Ya}F;YZQN3{jd(SJ3JYGt^>$l`wWF_g&#II?&~b!G+nj2(RSrOog*tYtPFJ^bS^o zv9A)mb+69;pWHTM#a67Q@L#7O;lnbQ4+*T66oKZ7Yki-LM1Nu)>Ehb(JN`Cd9`A^%cW zK9PoInY%;Y#^J&YP0vey(t51!fsiZm;qYep{-}+F7%X)+-rk$j*&0K#Zh*emdd=Dk!>{@;=Q- zVkc7{1h$+u9;s_8c4*SmP=pZueiC8wY>AeH%j1JtqIfi_x4IW{~CN$Sda@+Cnl|BAix zV|NDUwGoZpOqFw=aPPPza+U+#R7ie0;qxq!*&+#Ggs2R+CKW!t4n-&C2szBR5MB&#hzrIMrh|=+gUe~(-p8}J` zITC@=dU8m~p)+q|XvPw|>di3R^BsYtgE+7KmW+; zn+Iwn$H_BC-SsAkPSFLmN9JwE%bn*tpg%lewK`N}7UJ)Ij5#I)eRmR_6hn`5a*-9& z(~x^OQ)YjPm-*rJxp8|;1z?pgXANh^8+9uHyv?K$(2zG&Qvr&mVFy^_uZz9J!k04?hpqf7MV*)EKmcdtM=RlA=9XwAJh zzh4%CeTyZzXmN`u_q2hYj!iA6$-i~@ z;r`yTk~bGVbqK(fdAZAk+t1-@a($d3p`&D7&|IkDsynea@mlN8jS)ZDYz$zb`I8s% zomvV)%t+_I0Y3IRTuDF%@1+J*nv<2KEod=tH?tElWcnqT7t~>LZm?_e+B@x?1039! zuOk4`^n50>xP@dU1bPAQclxbCRG@KeV-tHP2!s300R zF4cuZmn07LQry5=a%|iGckDS59mLXCVq1I{QeN-++!*Yp;mNDg92I!(~MZF;K{G_E2T( zgm9h6VMLLnd)Kihh~P==dq8gkPdQRVgN8pgW?%ilhrFv-S$foy2G}MxjAr(-(+Er8sHPXm5N7fvqC{m{h&@#@Fi4SSZTdu?-aB$mI5SBJ|9NSnW##_rgYtH zZ+$@&pFD3fZM}a(lNPt$rqX5Otm;=p$q`d|>DISqGMzzb{1@^ zzJhjGjn(Nu{55__wtr^8uyk_iXU^ox3=wpVHTDV$)M%Jd3AF>PVOlUlYE3NQ4hAW7 z>^hm(-Ww^fP$-9HI!c8s*OMf)d`Xox4y50pj+7L@Kv>UNGQ2DO3VN}zxpFVhFvr7aRlrBjDSxw*?E~+ zuWTS0$HWjPnB2LfbMl2IS%?AE2H*tT_x$m#KO#mE4`u3^uG8Rh#>eASFc*t+X$9tDwLoKLO zFFX?1)T4~zC6fsg$Q^ZLpznf1s(DTr$t#}FSf#1{A3loVCWdNX-bwJD*Mqb*CB$cf zc=)jiWJ%n#aBAh@e8L_8q#z}raUd0h)64zZXHZ9=37@tWgd(A()DIs%5CdBuZnyu~ z@~qVKbNZv0kZ?!U5V#yPduv$y%?D_c;nDCOF>F)GI#=IgjnAs?bbq*gm+B*jZpCA4 z=kPSp>a>Id?RFcm>R*mT0E#t$yI%m@twpW=_9GBBJq@7q7W`sXKsm5E-M9yOSnX%` zY&z)x5lps1;=%kH&Y}u%DYxNM38ICAX{ZP;Y`Mg^z1W9VIa4{2*QDbU4zpNN=<_qH zNU&j+u|2?1E-<+@dd2|N{wnk$&=Lm-rZRUYU>+*&=QV>DCT4{*sx5(6H4Wg7z%q}!pq?*%#>zqk7EIV%jmv~ksm zM#OPZpH``>2AaZm*Dd7{lDzMKDo6t#Pwd;Ihyp+PiuAfcWx{|<&wTRZFgwUUyMM>= z(1q_duF8<#Vnj&NE2=Ae0PUvRJ-cc_p!>;sLx-XqTNBkhKRe~&Gg|EjeF^{OKK-6C zMO<=G3WTJsW}hNpAJfBH9G|cOJ+AOwIAgQbdOylrnLH^?JqlGRpp^PZ=N?(^l$ zTxE?L)NwoG{t;JWKKd)|YXNt|J}t(Z`7aSvcF(#WKw%ebast4%==By(iecB8V7^za zk-aRZ&A2|Q+b~$zb=OJ`We8T^A`gg=Dg0guY=xeHp}=d}mBQ;l@^o^e?z|QhmL8Rq zhorswNV(rh1W$?ll)gQhMk;q|;RkJUnLOR5fISvXVDi{oc2d>sKu}ziLTl5^Z-mVN zGqBs%yGjF_GSb|h0e;rCpgdw%7CZ;8LPEXA4@F>g*+-BO3a5s4`{}{1{oZ8}F@c}s zwkEouOsU5S+y%wfPaF51G}Ll$W~kH4LjIgT8{JE^fQ^~?Q()l(YRtBn95$uXQ8EYW zJ?ozOlOQjj#wWYGDLey+w*5`-Np{?KH%7hz{4N$>vLFhl_Ym5<4en@s*qqVKKObii zU?UZw{Aml&S^=$#oQd#auU`+oUIoa(E>-g(xNbb(j09}WdXF$DXC_2MxqQMsH>Po6 z#G4-+qndL?DJzy$LH`LmcHc5$Dg4X$0%|lr>cY6sj(LqH;IG2 z3+0G@1PIQzU{p$sphI9vF-LQHv?pQ?7>3v;tnBpSWL$Wq;H>5d7~SaUfKCUobZE&m z6a?$yK22n)Ssjgo{A|M%^? zd8V!f=n*cDl;yg0EA;${9JkB%K%gHD*OO~5+`pvC0=R*1&j$!-gsDveVP3bY)-VbB zo?FcVEQe*b4Biz;^k2$eDJ1+i^ad@{-vTnr*G&0GfZ)^jMJ{xT<8@zY$swcn6<&)0 zMc}W8^m-f~=(aqeK}d##uSsPQ`|NrlDd}jy2_ch$G#Ud4Z5&Vc1~1!ueAqr}Q>O>A zz`Q-`jS`lz_nEiDZjG?O>!TE>$4qaJ17y7n~?%BrMvm%6FnduBdOMS&G$dWIQP}NOKpg% z{d9JOq+Z;%gY3^3JFU!QB($Nn_z+_8aVLhRdtA;B0EbN~*`uRfYeitf^@}&4=5NekLJzlT`-5s;7b0oXFS%mdY(9*w_q>ysHdM(BsSINH2JX|M8v=-b-@8 zxLps%HJXhqlkH<~8}I<0bX0^Wt{>E*hIt=F3H?C-pCo21(Ro|lxa7j+ia>h^{fdx7qD5aNXcL3UpiGcmd{U5G|CG%D?N9n(OUdW0GzfSHp)aV7 z=>rzxBd2=j)S30F(P8DWU*XBbQ<`Z=sC@gQfQ>m4!uMY8%XWJqSuMoam9JB7^F;>4 zw5^c0A0S)adqQ@((ICc-y}`qB{Bf|1Zt3vXq8ko(@D2L`p3ea3TlHNg-nZybgueIT zt2IEfuZC~lbEDj%goS;ScBhh!%JiP$N2D^>I|wSo4_#42`;t@KxbcF%i~mwW>>IQV zV)e?aD~RiC0OL^$(LAI_!K;%L!ksmbK3MfN84I${Mg#*3x@+T(oqT}6$Os@HO-WPP z!Y{^nF4|?+E+#e*AU`KDtN)*2uwtO0zqT_3E(S?Rz(?gY7~93L#V;&qYD<_r+F6K`}J;k<$b)&C7Yn1K8Eq;xIhV!q`Ma0*x_aWO&B0oD*qprXDIuL(Q1 zK&NvTJ1Jp{MmJi-P&>E#HZ`xq;q=Z;j*zb>(bUtVP*IZf4Mgp68X&iHvYcIvRVZAz z9jtU|QfRkm#|o1YErcZhj)FQ7b{+-tb;s%ky*2UFA_Q zO&dCb2^8!X{+IbKu%sj~M2i2GfgxwZKn$EuE6Lz6u8;jUh!qa>NTETN>Efs+!MffH zgc}#zWFda12yyr8UH72^Z@NZ2HQhtRa7N%8j+ADF6qXZZ{|N{Le+UR-SDc0Y|6Nzw zBk+;a$Nj%|{QvY@$tJ`QI~|~3J)l1ZQVw_CBfgm3`!~~Yal8Kyb8jz`pu(R#$wqK6 zH}$`sHaM?Zpxb|w9-MR%z%uR>N3;dWHW0OI8WBUot!&-k_ph$o?mg^Kv=;>>QyTCDOcJ%w0h8N~abUcdz(K<5fqMM+jsGq$0LAw5-&H}Nql3NPdJlM> z_?Ec;Z3M6-n6Tc(ne+gSkQc$(y_h|e_XRhPyXc7bZ?a520eoD$+9h!z3oh`Tq$#~D z7ay&_$5I~2^Z(fz*r_U7IR^VNoULB-`)pj*I1{{n?PLnfmW5r0cY6yIvYUyaR!F<@ z!+5oN@fscxyvOOY22}F@-2U)Q_TAo0Ia2p)h z9q?9>?#|NLr8`fl6L?>?e6Bl)aV$=wLoKTZ7_>3ha4Zp{%H@w3anD`CcNG**C)k!Y zV=Ya3EU5?u4IWtET`B<_SXt0wf0|DLs8MCVSQCcGXAW3uh`3v=qfFHnivcr!q0wi@7=IT}cSZd?Rtv%ZCpaUqNMiHctk=8k5X zjl3gf8mL-K4g64PK+!e$=t!pi@7b-TPdRsv#q;%!_SbUXldz3k^%djO!Y!qgDKi6k z!KA>+l8Vo>wgD_1An$33In7bh-O+FW1noi3M$(2zDKDO2zhxG z&=PW1@E5h}j-m>x)`gGV%+&%O)P9u^0}VF*Z+n^v!v%U*=!n!776KefR!5L%M@0RD za9RxIJAQy9O1jlL5UYQ8H;~>$CabzW~acTJ2`kUSI(A6@%_U-SN=}ZGTFkcDNoOYiG3bYjjL-KM^kQr!XCh_a(7!(y9fJdsC%vOqd zXlej<-&GvMR3xM=fEpG?763KN;hJGWDi6KKPoEx!r|GrvYuACNexn;>LEHB8p3-O_Ah7sRcKMa`#xM2o2i1oX@s(jmsAhG(z;aQQVJ(`5Mf!lwY)&LgoXHdYvP zTk7;6jl5KjGb#~h%S8TYE`x&U#(>euh!fANQM=M%E?+Q2C12}TL{VCAQI>G07eL&@ zjS}c94ZV-YUcZ(Vrsx*IvCMg>&xsTuwi)eBT0O|%ptS<0)P3ZyrSOGim?Ef(7m#0m zMsZ*W)H*~qL_Pe!Q=z%Lu6x(}JGKum@25X@>y{zsD{g*h5o1yCO&0hR<1nt!R)-Fu z%s-(%rA}zRww8}fe3nDpYO`fAx7hre&0t+b@LELySqLJ>!~R`>(I?N@lLMo$(~6*o zrdjX*1_h>@go-UB>nuG@XInmE^c=ptb!9$rImfMk#pJkU^GOdO_#Ck1m$e3!V;kT< zQg4?JqdZ=H{lN+8tY(2scF5wSu?nE2b|cx-Vz%Q>D5*Je#e&33$0n)t znJUQzfMAvq?GI9?MUe1$tM=47#nvhH6g7LV<=;ECF*sv|_uN9FUuGxMFU^BNqda{n z)$wD%_|0ifvC=p`T8S4qvs@kGsszG6Dn6VNSv^tOH9QxwdB7&j-0?E8T) zkPTFE5R}QJI^8(FewM&2^73UmvxmUr8gg!5K+A*dyEuuU2dMr$6ZTW%s}$TR<12-u zl+=8IH4b3L?C0uY`*QoNOluI0^U&uuK*qB;{Y82R3M7^)$9wp69X+CV3vXbHr_XhK zFrDJZmM60j8I*PG7|rLsN&xxA{$8g!4d5ZdQ$ZLPnt=0Z#R24 z7usoeaLR<#tgz9VA()C!fh|d!XAXqDE-~y5hH_!bpLk4^NfKV&)vN`x8Ou1n z;^);zPPL~EU(I|*Cl*>QQe6>}rG>M9+EQLb6GQ22FLgWyML|vFhyRR{h4vlKkK_qS zWgM>u$w&zH(HkIhu8EnL2tjg=mlf-P@fTT&q55w?xXK=;BMl`$o%l_0VOAMaE@Nn+*w9$IJ(#qIm z=`sgDlUompZ3(z1-5QY}%10&sE&I*Gn-7P|eDq=tEULa+p`FL}uqEUQ?)On<% zddemB9FS5you-!%FmBe+((7MbEm?Co)VrOB^VLUtN4=g-!M#4jog;7H z6!RV6qkN`N(H?}~8be-m*=;~gzrVRgpDaP@K~o9_BFzDf1(1h|r&^ub69gpMov-8_ z=G0@nu^a%1mAceB$3)q>NJyO7K1cb0gW&6*wMb{ZXLt6ZydcFW#pUR+o4YW}u`)r(A9CnxQj{d;Q~4H*g6=7pCmF~2zP zmCzvmG4rKHWWjKuX4=tucv6wq2$)sK-CEe`lrYfJwezoeQR1hcclY#{ig(Z0M05Vr z0m1r4Yuw5@&zM#W8oG7!x!`T`ZIh#QOUt+7 z(JL7$>f^_!HzT|mcx|8B#^3V_+7-Yi>%j-3TQ#FY-GdnMCgWg*+xsU)D}@W4$r7Kx z&Zm*`JQxLat8j%BT6l_Qi}Sl5yMRKLx8`*){G@O4M#3`J>Y$iz_}ZYi^+dH>@IdWy zKgMSqu*vvOjgP<0RDa+xEk&YJ(i93^q(I}_$GX98O=M{e(FmM+&_~{v96O z9e8SzsNqE}JG2dkTJDEH%vHfiHOhAnj%*A?XQdvL` znGM@>km&2*^ZWBUB2E>bb|#7(7+?+zdW4!M4#;RfgbvrxbXTZhZeahs$W7j_i(eZF zEWTk(OV`l24<4^Fw=8oN7`}KE4#BJYI05sh8`xgaaR2%n2sI&=CGht_qTx+Udz6Z|J61t z;I`Xw{{m>IHYNDetcH=@#7`6%Ra|#z_8u>{w)%-_n^-EWk!cezY!AdkH&`&|+L%-h zimGx?Xb)gQAo*yQq6y3CtQzI9QGF&XwCg92r19GAF6)TbF0p$c{i7zDImr**W3rxp z8G!Ac{pJvn^`YT{CEb#|Ekg{xHx(!uy7QfRw=e_@MSm)rJdj<`FpCC%*u)NhWZ4+S zMtaOx?o-k~&U$t5tg4+fu71o}N5n73`Sx$-VawjL2Q=?L)57U5Wm&|PE?|^A2rUff zYV*a4#Zt)==)YR|HHUB-SRI3s*?@6^m9faj92?RP+29XCz;!-6MM&Q6`%wO^p`#Du zcY8oQzg-%wv_`gWdyd(o3OtuZ3EvjhYQ?<4nn~iN)&HZDYmbLI>%+fcXhuU5N^Ujd zRuoD^u0un)cTusJk;UdRQCT}B8M?TYos{Szgjg{`6p5)=)NTsHn8__xWrsv;F4ca= zOz->0?jP^x_22l+@0|1dp6@wx&htFqC)e^Qob1PqkKZjFOlG|TqSgF$qNa-r&-l{V z2e&*pLKISBl)Z9QuYIxJ*S^5%?J6felGBt@)zt4m7`&K4r?G3u!{Mfy0S3YkAWy|!46U<5mmOnB(2ZV%*NodeEo!Z@I zPCVSW_Pcj)HzZUjW+RD*ZfCtuK?>zmj1T5zLjjHXo9jo7qi4dR&vJijyxH^W2vg91wOnod~sz8n(Uh*lm zqEsVc#kDVn<W@7|K~39eJYZ(rnoS6wn^dU{N!qEa&~h)pr+nbX{JwV_J+DRtqs z()lm_U*}`(9eE!fC{gIS>ULem=ABg=IP)?Cz~ghEq-`Q8|LPBvB}ZFUHk_^5M#$vg zy6q71H6QPmzdXvDe4A=@>nf}A%)-xgov(n}i>D5rg>m+=3qxzXd4~6@oCiO>m5yc= z6&0O)D=a0g8f6`xIc@c^!)#Z3G|KPZ242;xsCqlD|5`y$AT{kdhQEar6HSTbRsx0{ zwVzz>{|3c(_-TzH#K0Gg-|sAWpb^YM53MM+RiC&R$1Ye9QI=GgYO%=R+q+f4D)2W%_3lkNy7bhn;2F`H(h*4&)-~N>~rX@ij-Q2dX zTzA$!gK|7r;lX(1*d){u@h`ALSF8GF&u*!{H~?gxPxGzfZi6UJWHyMf4jj5C%X=Wu z0{4*BD8D~XThk|7*H*PzSK|n^{-%EFJ)VZJ>le=L`f1X%`q`0+d5sx<+w&88W!&R&9qwh;op2W($ zPc6F?uphldcP+21s+M9xtL_P!gMh4EDHo6RH!L=XsY0_y7q~IRpL#q)`s!+2aNIxq zLHqJ!(L#?wJTSXYptd#2mSSY zRAt(BC;rc}e7%Dy+xh|R8ugZ>jqNy)(87Hj-!b7l?wqh+A8`t&ggdW9okgc080-e} zkJsEPeNj!xH16lncq)nbip4sf+-pK{*PO4Y_$=VK%Qq*-^sMe>Aa{C|ezm#UJ~0wI z^)6G|yz-}+>X2A_Okr3-L$8Au^_Wk4K7Pxy(vaqktM^OGxSOgE7)0xK7gq84k#i{7 za~F=xozOfJdS&d@5^uGTuRX5q4pNVTo9B3MIQ43@=^S;NU@Cjrt&a=k%D8CtmD|g^ zi)Hn5&gW6XX&OkKt9t4p>d%Npt9)rz<|*h9;1*>*y(9>xw9U{5H^{31rXhDs6IH*2`vu%_f23$ z1POlgA}BPpvCZzfS&_d2IGSya+u6q1nciCfl#i(?T^ePG4H)ius~vs)ENKrs ziJZ>xEf{#%LEdqd7!DR0df0nr1h)%IdA!k|B7~zG5^^#derf%nu%S7(+33<||Fd-Z*k z7JZC}2OcrWcmOTNr@f%jfMXr1VAUp#YRK?OUvB0~#7jc|xSgBC5O+itJTr|OTm#R< zVXzxh$6x;=tz7uk9lITHSyD#I{bJEU_8&evU5nE93=#=#o+$NboaEU2|9BHUJlKjTj{o9Ds zbl6MZfXgDx&(1~dsd(xSGD$O+EF^Lsm&Gfz^eF{Sa6cC(ArtGVba7TDGDw<4 zM1!3uCjEd@1P7}?TXJOZADG^@+OD+=yq6)gmD)E>O{-wYk4p}!T zlh@bzi(_QAgQ1Kpxft&ZOYa8Q1FmoDaW{%WlhW?K0|Pr@^ao%uFf^0cvcgtb@_ufQ zIlo0HkZMuM5{QSFV5HdLA`Mka5vwB zTJ8V0}4`?`82YlO$uv z3*%PHvn3+|&PQ4^^pw@}>Ia(Lh3!*-Ag8ArTo)6KWN`7s%Wf|bCN4}*9Y`7`^nA^} zwKfghq4BA7cI0=tlDB@lR|gP zHv$EV#c&S8mc)M6IhEG??Czj3q`tER%es6yQlpmo4};W32R5hQs$A;a0SsN#XGE8} z#EF`ki%TtGEKdL2cjz}rui)iJ*Rp+NHPI4HLc&*B6uNH)rvLT$M6Z@~(UJy@(_+9Q zpoX74t2jtNt7$S;Mr|P8myBx{vbsYKw^v~%9?DC4xCnQ%BG^C}xx!UOP(m}gH9L+b z5|$%Gzg=Khxwx;afN3rXNHY^pGZEUbW8c&hB@dzPyOCLTe(`?;E$Eh7 literal 0 HcmV?d00001 diff --git a/frontend/public/robots.txt b/frontend/public/robots.txt new file mode 100644 index 0000000..14267e9 --- /dev/null +++ b/frontend/public/robots.txt @@ -0,0 +1,2 @@ +User-agent: * +Allow: / \ No newline at end of file diff --git a/frontend/public/site.webmanifest b/frontend/public/site.webmanifest new file mode 100644 index 0000000..45dc8a2 --- /dev/null +++ b/frontend/public/site.webmanifest @@ -0,0 +1 @@ +{"name":"","short_name":"","icons":[{"src":"/android-chrome-192x192.png","sizes":"192x192","type":"image/png"},{"src":"/android-chrome-512x512.png","sizes":"512x512","type":"image/png"}],"theme_color":"#ffffff","background_color":"#ffffff","display":"standalone"} \ No newline at end of file diff --git a/frontend/src/App.vue b/frontend/src/App.vue new file mode 100644 index 0000000..4437b0a --- /dev/null +++ b/frontend/src/App.vue @@ -0,0 +1,3 @@ + diff --git a/frontend/src/__test__/basic.spec.ts b/frontend/src/__test__/basic.spec.ts new file mode 100644 index 0000000..2abc95f --- /dev/null +++ b/frontend/src/__test__/basic.spec.ts @@ -0,0 +1,7 @@ +import { describe, expect, it } from 'vitest' + +describe('tests', () => { + it('should works', () => { + expect(1 + 1).toEqual(2) + }) +}) diff --git a/frontend/src/assets/logo.png b/frontend/src/assets/logo.png new file mode 100644 index 0000000000000000000000000000000000000000..e834886b8bd7062025a738e84615605f968670c4 GIT binary patch literal 36025 zcmeEucTkksw(p0is3?r%C>Rix7F2SUjG|}*0;1$BIU`XripZ!a+62kqAfjXmO^%LW zg9J%RMrm@AMsj* zojvSbjPG#UJG1@c375>AO`NP8U924JPoO6n-*Rwukvat{-P(ek(=bIjmX1r=<&JM0lCT5r2V3}-aH;&?R zPG-g~4o){69BlugDAj)~`Gf#JkHCpDnpXCv4(`rp|LF#^OU5o{Ql~&`{M`I}+2PZpY z*sztIv4t70qrJtc6Mu_QT+YGP!3kars}ua^pX24^RGl2mt!&{J&T3aKp1@z06X6#V z5#bi#;ioS5+BI>!y|at4y@?tAlGG^>4v&?UsrY#lb5T(fk@MUFqUXRblB2?+=Y3iAtc3z(Rl=N9D? z66H1)5V^%IBq}5%#4jKqCM59p(^Q2*w@vBzOpkt4}9O;JH?Z5tJYjuKZJmSVC$SI^w znILO0Gd=a!kFEX-dHi3!`LFxkEzRJe{|77Cdb_iOxr@88li39ekllZ4g}nc2@y^C> z|L3Lu->GtIP5*i6|Ap25zbySPv6xsI+gq4HgylVjHp7cz(Uwwp|95>+fA>#u>mQdx zFhqa-I|{=;{*L2j_V8#Yh|(8dUp|6i5A5-mF5J8mJGt%=rax4~VYzXCyZOQ-eL1me zXK3&+zMA!UT)Q1z5 zN;~`JuD8lW9oZiFd+;wP z$N$SX9aBi_@>bWOOjt@vD!Ci;zYxW5A`j1wI(wt^KFz4S?+UHU%Nf7T)gnqAr(yvO zCg)aj*n-Tu@r_lXfTr@i;Kg0dX2YMN*h;r0nyd$H{$R~9Z-{QO!}uE0E4Pgb<;8*pX%5N0uM6con#@O`eLeZ;r{V zrx42cE~dmfDa3n@zfCR80r9&?1CC?+yf<4}Vk&QKadV@ezTo}U{OwwFVUq!w?Q8T) ztqB`Kf)h zu=n}6bOKAd=)a@VGBSqTiSwY4IC4!%1SE1hllv*ZSqDtFtSMS-rZsjDSH>UULJnE) z@ZT+uZm`O%w)Uct3lbwglM^tj3rk(|CY+^>Wk`A{CiwMGVvDhDA48?qej^$_ooL>h zF*Z=0WBKp>amSuoLW-L@&``7nc)_qegOGXyDO~X?TzoB(qMeA}Bt=|2I zUGT&JsO(_0U#GJ8*sHsZNy8u)z6hkL*XGu)RXdo-;hF|#JL%c@JSgOacD{>THFctC zw5x`$nc&8yKU);lmn{kXhivKodamawlCZk*K@AIur2$Z{>D_nPhI2+0>p5euV9qR@y6@~D9vKe1Xvgm&iD`uCoc7Ya?k+YJ?_6NAbN^1W!vXlSApmLyp+!`m~3d;*qsgqCF57 zZwW5$PO?+~bT+Wd=+=rBfjg9J!n&%p@YN(FravwVXgliTn8 zEX-03SpAaGhz^Lli7M(w-j`VLL6iFjE_IJb9#EXGx}U4!>R7#-0~~BWpY*QEJ4o{y z(Qn(uMq)$5IW>`p&d>e{lHAx|DA28xWmN>?%v8>85Z7-*FYMh+tBI>4LuzG4IEXm6^|Kk= z^O#*)mOY*#i)3&LJtBa=dUv8-elY*V2ZKRW>9!N2JH|P&nm~B?Y_n2^?{dB2aM@xX zXD9u8m!F@n6hxGSjR!pKP`co^^q!T$<+$)XIaFmh-fQ}8T>%?tA{LnkV}0ma=DGCh zaXq^f ztE5*=A9EAWpf^%=fTEy1GT;}Sgwjx);tQUJgpHpmXhSH1+jyA16d6@&@M)d&YmYj_ zqZetB;6pr@pycHG&Kw&RM;ooLx6%%57kO|o=&R>MhM?kLJAt+Q=lf3leti96W%Rj>aV{{mGH@#lQ<0I@qc#9zC~J7$1L? zFp}UWO@D~%Ob^5R&qPmo{|`p7S}e@0lS**8oko^{emH$-k9sx>R{RW!wc{9@i)eLY z-blRHuwcTnlDd63SrCFCUa{HZ`$G;SR5R~i{9DkLMcvtAu`Z3zu{Tp0#cRsjt3rH# zCJ}u2_GXo@So0JP-0KKJmq~z;4cgr%#3LUNjB`;-_%sedr1>yI8bdIn+iPOG<`?tc zGv~2j4!H8PR$%)#l$AQ179rs6j~)yNVsZN_J97O7gn&c_cw(62WkNna$EfO_x^MQ4XBFvXP3^>7{DY2ro?(<;GEy&(CgNIb}bvf~@N`#F*FmHl_fo~_v>q1SC4Z9<=ap=@2>)d-@UDQvv> z7b6Q455XX24}y|&qx&mA8l(5!%5&sVP|hREqx1Of`{p#xceK}Ya-6%KFf62 z4P`6tsesFAGwO6TbP`R`!Zwt-gw4|VlOV0jkQdKb`4TSUDI2RZBV9IG+&v6(`7`CK z!#5IEZmRp9BBks+A2;pfW9I3FPgLE6TdchGjnmmA8N~6%yNpWzMzJxRIZ&Dh}LF7`Lg?gGrAMe8=&gCMAVPM-SD% zK(Z32e3XuAhm?_cTspkw!NxLD#M>L(brZP79%mJB9~t!_XZ$c^JuDiQ(-zp!Bk}cY zN^FZxyeb&X%jFRV@9ep}CyY8iJ7n!3KD?m^q1$%b6*Op8th9Fom3Q?QDcxy^syd+j z%*^e`U;x$@4pJ*Flp^`ybFAA_KgEb<*^2Jf?zWCefbjGEwK|>5#%ze)hQ;u`(sRI%U+&}*(VNrK})$#Lv^3xrAnyQkPj4LqVjRP zlbpI%pfHjBNtJN7Mz6Qu-uQc-Hq(CdtqD5!zu>zx40|JMnoO`a`@n)+O#~Mi7S?AL zw}PD6WV7T~;~+>HhUYrz71fowi0|d{@Yj4D} zH`OgXCg0WO*mmZm>P3+WVCR*dNTV@}t(55s{_4ioLk7n*QWM?0x}`21L0*BC$X1}+)SVAT zz&dhz-MYp+B6)of@Re^Ie!*5@@-22lKx zU7DjZS3BL{@aTA=-HzkJ77z!NktQ=EIF|K6)a}~JlJ8W^T@_x6Ftd7zGA{Hx zl%&tUtY4L2!p3DmUvU}1VCfyVYv)Pnw!uRDKH{Vl0vU<~mxX@6O-N5#b}@z`&2u@nE=2(k=C!nAl5u?=%yJJLXJ1xg$v;z(esu*Tb>&M$E%8l%5%fG|fd!8*jN2sn2Q6irN0 z;@s*QC1&)(LkW|Tm18au!-O`*!@ubz38Qi6;z5sY#4HxcIzTkC9Q~NMLIit+z;U8jgtT>XsdvEwiymRfM@@xPrq<-J7eh-&0mfY#Z zk1Xyz{7`@0_~ynO?B4Gqx`lTla2t~b$oQsO<&DMoO;t#c36(}Q%*OnpKPvoYPu zn98cDMi5xd_1mV%MVukr8AO?y0n{?mLJL1)a({gkP$(wSZJWE0J(#Fg#&@e>wu&u8 z(h20bAA+8XI9ibT1wtS`$8&yM&$&T3u={NYKKbZZ{{V*ZQ0+JX0rHb2?Nua>1Z0(e z{&D#+<&$93&7m83-y3K_htLz=6z#ETj1@b*YP3sP{H=<)8Ebl@P`(GBtF`npNd++V z1#0?pIYzxWN=M&!dB|gV=!N1!^@*wCiOf!`l(o<}+lhl`tMYa#kU zRF0_7UVL7$98*D}p*yBp$g*>=MUmHzJEI)f5~Q)4OdK9(@yC6Be&#YPFNN4s^Z5#E z9Ctxa&N+8{&JdPzgZGLiO49FbvdTKjRR)rwlEqv(z~w`QHH{CmO&+uRR$BlL_Dopw z6AL^C3gc^ec74A3gcSPxZB#BPF%+(o?cZ?Qf=PTXhFJ{0D>qsb;wr%~lyGR(om2Q} zMS^;ohFM>!hgpfSs2u1UA?cFazZDVnU z8nH8gzD?BX8c;^H!AR=dJAV-+ck_`6Wkt!k17o;GU-Uj52z}qdSwi_lmu~$!e?W z9N6PtvDyn--zmK`5U@shQDN&dO4(@K;jaui(&!J%%VaJ3Ca6C0jJSPpb~lUP+H5Cq4QWn`xEQEt&Ady*`J}%d zM$Et^Ao7gM|YkA`k|OiP<*gP>h-m#{I%=i^9s&(x^?k#@$+M^LU%&VYx5aKH@wJ2pYLNQvpI$a{SLbk6EG}`UA=nne<&|0-J)h+qez26&9JdgqbMv$`} zg6b&|Pcssk_lD8;cNhWVqYu zd~8F>x%eEYX0`6N(;(8~GZLs}p9+=unJ++K^|9gFEd*AX=+4K@e(uq)xKW*EU%vZ& zsmC0pL)Srls0{CGh=Rxgw3<_ps$O&B#8Pjv3JKTJbx;4MtsCci;KB3lj?TlMtdPSR zT|~9ge*G8ZcQ}vP4@|ACtv3o;M96QFr~$M-Mw=zpn?j6M-Lk2;(U3MeLk-6}7+*%KHEWQc!tfY3D| zuH{S-yUn}utTGs{P0Z-kC^?I+6Q3(vBl4^lu~nCd0^DiY!TUQ5buFE2Uy2lKJ9}qO zH9l+mbZ-YerJA1i=Fp&3kIzAjoeTW_KpbE^ zcJ7+_$YoW~bKNf!EC}ScDe-mRKRgDmQ1-P-LOd)c^IQQwrx!~RERHT2J<6J1VYv34 zDPsuYCmnVG)%*6%dV#K_(=ULHxnN1JQr_F|5nRj)al!rmD|6qun10i#u3_IPdP0vb zG7~0HVDSaP{FUVJquM9KO4%Sl37PfuvN>Bvw!C|XSBJV3x*sv8A!7>#Or6ZdDzX@Z)98L zB?P>6g=%*-f&Bxt*+s|d?Py~aZ#U_ag5YtPzK2=D4$TTYMFB+=J(Nrifr&)hWRg%^ zB$Q>-06obOHlFlYXl7|CZpPaJY5yP9PqQ!z#g8>xv@)LIioQSM+<88{Mfv1L!3kbHN0k`?ZkY-Q$}#hgRGm8X7v%kEZoftJbUHEcoIDj2zgKJ z)dXeWg*x>`R2@A>Z)gP_jU~T`m<-r=er0qE!;S8Qgv1el=Q|{p<{96`lG2Tp33U%Y zLU)}jEj9>NAzNVeTEimtwmeDtt3Nj!C4li3w%%5y;C{*QTz-{cDc z!SMyixMn*~LQf3Np=@5mzI3(*sf@^fan3Gj=Ii*tkN1qM140U(5X_wVJVx@RU9QWd zY^%Ia?Y0Ql?&>!9O-<0wOJKe5ChLK(Dro^GAyP!6qkGA9oU2@gEBb`mJ>Whc02q(T zkEqijSl$QK+`SCGXRMorAa_T?*+8AB-`(&z9)!u9dY(?Zg5Es^a(>D(>pweMz>)Ov zur*OnX_7-4C@hnC;HCls%?JvwJmata+`48!d}6J}rXsWMa){SJFMQ^-Sc;JZrW%Yy z+5R8O0>c2vZ?bKrWIS=oOX&$JDKdt_@fu9lJFrI(=9)7a6)8Fd$sd)p9-}XV>fGci zo=RkH)g$cD+@X`ZBM8B6sRU#QrY_!wEQIaqJ7VB?uix}(j+E1@0*&sPl) zwk(J4Qmf7$fYjE=1v_U5l94#X2iXJ7Qau&cs#|<_pC!GvuTE;nZgNn2)nfT#8HB?` z?OgNx2^| zgx_jGdAO^~`uvv>U;@+XKHwuz>48{_=BdUs2ANBJ6y0A! z{H$b6?s0CiL*zp9Jd#F<%;}>1bD4$TAJHf$zasKivxb4x&le#FmMR0QEgDai7Ck0e zP~l9Is0)^nXcnA36*1rTkg=6o2(|LFunFf7=hlVJ!SV6qIO3o{CiS zamhY_=z{s5!1N;MlU4VnHAjUNbAkRGCM0wtEn$7q0?;*9d#sfscg5Dy}pCX?_ zz)Lsf{W%CMJCvqkXRvhOru-|cpYD%`2#Vfi=Or2yaI`cjHGu8u`CAaU$dE7PAg)#1 z9Ad-Up9SWl4KM<>ks75F=iDf!UE*d(G8nzm-FFQt_as{3D?B0M<8QzWp3(CZCL#~V zxIra9G#NRT<<#|{bwwMiaQ4cv+m}$4pfSXQCvtyPPFb=hWDv+{hU?Y~pvs)yA17_T zWHtbKqRU#Gh_rMROf@kJ5NwxKO88^|jrZWueoO8r`|Y*nyxK(W;jeA^5^D?d$?aGN z#1vUW3wG^L929r%@jeTkNG#xf{haMr=$5a$HZPPVv3G5oOQV`81XXOsQw9=cm%WwB zHSZZOfL?5h^Pf)=m2*@Gw!}{JhuI1-Ha(c53 z?2;TcjOS+}$19hNq}J;z))Z~VruSpn$HDKkDuy3_oNJP)FlGHY_>pZ3iCv*vy6x)t ziN6+jqa8i4MUS=F+BE1|y+Ap_S`qAR;tN1Z@riV_CWOMhCf{8_D7?w3x2@xDLjWz~ zC#Lcrfsn3_GwHirxA8E0D~%;igQ~K9>qC3ua5f$3q;^e1ed5uspX~VZjrptsX-nzp z_tE{KM5*_09tt*1<{GYEMk%8gpb55a^K?J4?BlCKJYG64r8VSHo|;3$?1fds_9iGe zVxhuOAM(K;ikR4mjibr7b(_~3bHfewfXi;?Fm=G*LOuaL)DJJ=SdO_j)z&Z)>V1}) z3C$r8nU#E#LkPKplB(u|$#MYNlHJru9=bd<^rBn0=qa=tvpZS@k}9lyc^RNkfYf*# zrIX>txbBr^F;u*Pri|qNNmInng)Q(6`+Lt@9Ia8 zoA8ucnGOE>2yQ3LmKrelR04S6GXr_8$~GwURPID?cpn{8l(6MY zQquEpRr33dXLOT2-#(XiXYFyS=T=;}7y~)<6toT<)XT%k>4?|50^6y4AbcY&w~->A z`?P#M1A0*&MG-U5iCQUwQbMPmQxSUgaVU};C7vgh;Y|jIjyjaSbCk%lp||o?OAR?< z<;%ETYkscYa3d$b^i{BNVzC`dq70pv5iYiF!U-X=>p}dJK2D;O>+$A2J*PZ7Uvi#O3 zAaY0ngHuY)mU{`{O*==zaTOVlP?=}MnXc0Pre{%c{yJ?#*81;kPnkz0_e4s0x!HIa zycEF_2a()MH;h&I95|%7xaY#!wqBf#<98SysgyLxOD%i5*(*CXf_a(PB_J zm|Dt73ZO0%aE^af$qAATo>EE6NPZb~{2XYkOT*Z8klA?7MjDD|;Bh@aMGL3pmqN9H zH9rJf=M!pZ=;4L_29k~)O~>ctl{2Fs$18j3Ccv-^8-bQ-TWL;*Y7>)&k_(e-Rm$jj zlI{Ccx)_r3#kbOXAe9e*=^h)EtkLc+_t50m%<;vN&v}nLj^knkRlDz zo(PommOi7|YGuDQuT@(>RGup6mmcH(y3Z%}cFp08gBOio>uk>Qg<_05l9^+TxF7hWh-&9 z^&xC(fXc6oC)-t&Zdnp_H;di`-PBvtJw-coU-p)-PR9T(_73nnCI8-3@QW$JWF!t~ zq-zY~qlwj1&Eu1~P4?Wq>onK{DB7|2APbb@?%8Ea_pc3$>ekz9J#AY%!U$bJh&sc7 zN}GT@(FkY6X{!Q~AJu|z#vGrUJI!+oS^sgUz~QN>cK4?X)qH}JhNqaGFm@<*OM7UB zkJDn}abQLdvWlX-x{r(g<_5veltLz{d&m-04%u3y+l7(eq#{G1J0Cv#6Lm}w!Q=B$ zpA|ETM8fw5SOcULS0~-X=R;&AvKhW1Ybr5-p$?JOgtBIYGj*(7_UdwbIL@e7D^t!1pFXKu;+Ic>bBpSQ`nnoz$QENtN zxjqdI4;^@j-gm`Wa1mvDQ`WrYj)L-b9pzVM3JqC9;1?i%yGF=A9Zqh3BQ#h5utWZz zKSOGwMx^G~c6rkh$t%pX4ZNS0jC}O`*605*SaJ}#^=>&Zw4}iroC*i2RYfN5^5a8L zzSOyw10oVX|KxMY!+}>olB=^)$DL3RJ_D)0a?}bCjcbSY>D9y!WKw9iT>8Uj3*J(s zSGXu?A5UVx?`lr(Gn~iZ6H%x4`$Y4ke?C?#!zwQ!F!UL$x>06x4J`PzU0}2FCF%2(TZFh!DyxrDEih|a z_1Zzs=+*p+=j1=A%AzjNRo>MRI#DCZFASis!(9&Tqr8Z?QrofLhpYBq0Yor3N=3tV zeGFI^=10Y4f&DFc#^vsAYrz~y+a4YKlf(of{=Xontv(O&d6TfOTnu>j8CZI`JP2L{GKAK~{4WLo+z6&n?oW{UtEED{#qn-XNKQh)Gb=fXrvPL1 zM}DksD&tly#pecb7wqX=CmlD+A1wl}%?F`ztp#y-q{(l?dAKRTFdSGc0|;u7&BzMe z8s9JsH^jX7V(3?9#Cfwze#ZA^YKXBV4@)i-9D0sbJ2HmUt?p}KmCPv9f7K2rucQ`^ z{_%JZ@uC?#`Xej_&0#n>R=Mq*2*tnVAOY#?M1Fqd5Ez2kM0&1HJv~CI%wkX*1ZS9A zL%kx+AhY(PBj-Sbps<%D%;Ijm3hbrEuT2oYbdl;Auy>HuiC=$LPx~?I0++ZHGsswZEd2pOmQ`YoW5&5Nc zHJ`nShm8)ia?L;!ruZ zfgA#K5zJu~^!s{!b2~qp7?3mHp{qGGjd$bvs+?w$MhnY=7T8hfvfhS1xBo0u__kix zl}^^O`gz-E2jy92;kt38r!dx+8W2R!O~8~wV_H_ZPBJFi4j%bm#R|3HD_jg7>5eYyDliung%h zVAm2)RTN8vZ92%%P_g$J9a)dvg9Rgp4qJRYo|uxt;kPlDpt#U7-!;AF?m}o-5p&HG z4i$7~b*XIB_fpXS(qb^st1yEabKU6v)Q z1Azz&1+&7_+vI>bp3m~r&tX)9QJv*uAbU$~mMI-U`)q(VBPOiUho}fwG zi3G(c%I(+_*h8B=Om1n$Hqaff=Hgcf?+EFwq<=Fe*xQ{uJ6`) z8peZ6)a^sb{`|gmNvYr>+Ay!>YvV!1J%gwupZ^TjLr~-a>%oI>6m~dqi6(b(kowt4 z&|vs538k0G(#^f5v)@0UVb?TDC$56?3ye1-mzkfv`j3RxQ{+rv(Arybn>BqnKLko5 ze(xh9uk41BfB=89A8oq+L;GF+>Kp_O$ZV|3SJkIuOjr3m&p|Q%C(KNA=xmgN%Txv- zPQ?InmpvF4B6D}>e!G7_*^;~VR`3r&JcAP9iAdL({*{h`nt`*-!7!;I;83ydrrsAy z4gs^px{#s`G&qXN&jF6m&pudV9GvaZ013!#FHe(Ee#V{H8<16v){$Ig0FQsE*l-24 z8FXLm=f_L8G`)TA`^Y!`@$EbqeOa4!_gSzJ1-?ia!JL&MaZ(zd0s&$ez&&sGHKx@+ zIi#PJHk1@4Z#flLJ&%U)YYkQvLF*ZH`nB~ZadbcF} z`;AYv$Gx~9p=Q;Iw*GpY7Mn+vu%}Kuc?0JGB&G9I{+C1cDPBGh_Z!^0?KHy^RC4;y z=yz>s3O5t0XAw_<(E|FH_0iX3VP@l+@akVRun{|A)vAmykH)cXF4YR!b>&?z#KAaU z^=BC&?9ExEJk2>}OGJzSLe$TBk=a4KXf3oYeOMR`yIn6l_I|dcf1T&Au^7|qRc@F+ z4bnro#_6J>Ej@%!FElv#3IacHTYWl#OwU&OYPRZ#rEcdPAQ3VDeP~f-0Q86w$^4DV z=fB`j+l)-SD~4wIblZsf}E$$<21DO z5|1O3@>3*8Mhh9-X@a>eM~U8vNRy{l+(DtzQ!!*`e9rnpw*|}x9ezS&{v)?h9F_fu z6j?#kk%c8s86*nj?niIGZP#j0*R6%h_xDxfJ1Et6P@v$a#l}(X5%(>4d;n(g8empW zVWA}-6kXsw?VONSHx;VH^8NHAEDf@ItEb@o7lfXh5H^jT!P1%!8(ed`qK*dHdA{IZ ztZpn;^y@H~9qbUnGh8K*LODiISEGT*{EB!mm>M+qJ^_mqzap3)y%pNJc0nQwxPzen|^X*)ZvOAiG7KGX?Rr17yh#Bks;ZKPrj1jlz+cif3 zbiJuNh(FE)ye;2$i07kRnm6f0oOws6eoj{s)qcG}1z7R@cAMrr2n6!EP_L>;XFvd) z;Ny}{#H{)1-vaQLYwI31kw^W1-W+ z87h<)Q?C{CpJ2zsO`T*?L}Zf1mw-c+IK=&q2H{H^4;w-OYHaRBKv2BtGzwV)(sNNw zru{yP8FeA1?@jK*X!nMuSuASr4FQQqhqZ-LFFxE}rLL*3o)ePk2Fzzc1Z}dK1V`s- zJmQqiA($PlCu)VrtdFT^=a}Nv%VVgMqu6^?NHoq}B$N=y0gTd?hT7HVt#t9is{ZY3 z4&d#RTKVr`3ih0VHi}wHNRLsz`Yd@SX z{5|rL#x|u4sEa7yAM-{`CfoSH2#^E!mlw)*@#G)FySJs^b_W|HM_I(btxL(VpFf2) zYoiwwMJK6(K1_ar&DJ7BZwn$d={w+)zup~5jh?2|%Y3z25HE0gbyNqD=j_*_Z-%#q z+IG7=nysqNEMFP#po|1|VVFD|G{E8}h(aPRO@J)pchDdD%|1oK&L~*bU%mMQlVR*) zzhA{Urqdqf$U}Fb(2=VTvsr9hCV+3(3Em?3@}J4PW+QR#$D4>(DG1f(OBSGf@e$>V z+_>v3dLBdJv~jh}hfh>mA7=&HVz8ZIqI99lW^8QC%4bv)(XDL>NK@UR_y~3qz@Shu z=gI3*eE>URCZQrzFS@*OL=W)s8@u^-R}=xLm4vvVvrrwg@!^S1I-73XDhH|Ns6(s+ zG=)!h9f|5|*%z$T{bXNy`8IzBs>uI>4q0UjIwXVQ7Ec(%^7D3Vd7$z(4H}(808HN0 zeIfK9_Q~is*s0Mi%cobh1BEwWsaY@|BBEYo63lq0eZgTM2PrZMMWF+7a|c&1GwI(1 z>uNs~y(@cpgmJz+2s|Tju)(hXQ<;z1p}_&j;`M-H z^}-oBJd6L9#?t}pgixG3rmfz^HlpZ>b?401j7a1Y;YNrLHR0(4D}7Ohe0 z0T`4KzutP>!tIhw4SH)ngm9D|<)c6y>=Djsw1YM-Ieo_7je$%CUv=W!q4bv>LCF74 zCmVD8WaZE}d(Xoc;5F z`m#469!Lbt1m<9{1Fgvr1un7|6DtNIShiJeN2Sukw?$E_a3DrIhmqMCjk?=ddH;g3 zpFpZm;)D!<<1PECw$KRfT9xp+j08Iif*n0IjlZfR;+!DiZcman`j}bx2TT^i=rZ*r z$tcfUsAK|8MRN8Mf~?h3l>=N_*(X9IDOc4uPLpQfEE@HABj%DYmkQ;F%Ohe%+5&_N z9t0_;TaT3$LxV$MfhNEW5L`io#ZOcp@gsq(mOeAZ6HtFy3}}QpH`=3)F#j+-|HUw? zZZ%X=JN+=0xC`wq#Za9NYI2@VI$KqkdZ*e`9ZG)g)~_|-FsBDpUJ47_);%Eu&cUGk z%lVV;P$=^tQ=XoW8Lu>-ZUaMX=s&Hk3sq<$vSXL5Vfn9HDo->7%%V-58+Ji(&?T!o zw3L7YQe74CPK7L^6VTg0GdF3JGMwfMU{cP&cmmctRf=rf70i_K9mTRCw5t-BkN8W? zL1@r(OU<`Xinmuet^FK_W^19n1_M==h0t-KfWK=Fwc@9_0WhdWf;BFwyBY;QD+Uu? znm~`8ha8GQGWz-RCmGsR!Jc zD}Diw`w!iofCOV;vs~}1Op6u&j($b(z-zFRa?I*;sK@-8GYlZ%!F%$gT?~Mu_dfzs z#=_@NT^ZK0;65XYTzMQ3H8+8=L+9{F(xbOD^dT&6lPhiLLQ7Y3*r?-@r=n={%dU;axPBBE5ha3Uz|-A(ZeM|_ zF@sR%k`7)qsw&#!C}-m#kG!@QWsN+YP_jFm!8Gy1zV zg!3*`-73usOs1CQ&2ivFvL!Zz(igbLOuUUSat$omCFqRI3_4Lp4A(>)z6aLQvqj(SV_RZ0rZ102t( zLHG+CYZ8y z&CH`)ed)>vLYg&tkUD`A+!h7KMRgKB`4MZi{%f=gmMlf=HaM{#C@-en&H{ZULES+GU*A#<>1D3<~O@RAIS zhY#;m&WZum8^ys%Aw=9|)D;kVsC$+}`mGXA5=WFSwEXF8jT{F;5)d`Cx6kIyd{vk= zv4Yc`-J<|T8fEFI?`%KDRtnyZmWn`wt|AQ1`=6w~Z~hj+44(t_cq|ZxW+g8d5EHWH zgdQ*-hoy%g$vlN=md;#@7n1=D%_7owuA}hXh)nW9Qk2d-ga!Z?aI#K#vAchRNR{Yb zgg=-w`%(3gB7>;K|@c^gCC;@Kg>m8A+H1-Z34`AbLbIHL=k!}C^`gR+OlRK^o3nI zw0+oj!c-~t0JRB%|Ga@eu1tWLFkrVn!H|#ZEbvpWQ&LjqPp%rmdx4n4!`D;S~tsCXlqf& z$uar*PE;gKyMjxj$;;F_3xcE;O857msKt_!#pKUB#B+xftLxs`s!R@0g~J`f?r{o_ zMtt;O1|R6y()opjVK_l*MLlHAcMh49rS;B3HM>fhpqG{7Hr17?_l%rDT``Ky%-3MW z=m4BU+3AW=c0?p@d7m})R)~Ea=t!WKB%BG}ssRY7XU>d{9P@QWb2^|31rDdjcU3EV zJBab6Qg16^$2$R~WJXJxIn9*bIV!Qp}k z8yxr&E9Q)jwB8(&!^fk1$9DlzrvJMZy@%+evDy51I~+(4&I=5s!i37JNbBz?8U%^7 z>T~H2QEh!yMMY&oZsQpZ^>{E=qvm|M5!g8xG+?%*KRwmKxOFaoNg&b+o_{fXYZ@+$K|Nkh;J9`R>+-M<9)gb6%lqL{yw@N9nXi=Wpvxdcz$oiHt?o z9qHS|&Wi9Nlnmo^jj$H2LKgY=Flp06HNqMc+?g30)U}`{>IW2)0Ni}(Im4?tf1udH zgzg`#V*a+h<dda1@G<2Pyte zaUNpnMiAvr#f8ngB3gjnh(~Ky0Za3k8-^J#V@rA#US4pxW*_QZI?r6a*treMC80B| z3kPK$5hbo7hpR!<3A01PzorAx!jJ{%YbusWvU$uS@j^ z$Mho56kNa*%B4tvw$w1xBb?p)0$@U?Adi~qZliFf0H+JK2pQ)45FyCWb4{%SM1fA( zCekBw`E^URsJluX{T&S0vKI7p0W5j#HQK08hED!DfQA{2a4-$4L4yrvl8XoQSjS#@m=CW78;V4E z`JADGlVOoP=5LS8Phs`1FlXQa0}({(Ja~!@6k-@?P?ozV?3E}UU~B^Q5L8OZ*4#EsI>_EexAOo^NQ=%?p#Cze zcm+z_H7J6;dAwDsp_07g&~Yk1)D z)4Jbg-qX4qm06}KZkAblFGKEMFY)_Q{3|_X?Enw2=Je+oN|BL$5iMY*fWE*1AKL-W zTHvn_@qqHs{{Q!n;n{v?;JeU!yOt$c7kt?b1%3?cn0SLLf%XH5h?mZdcQ#OmzH51%d*gaJrJA{_dMbC9eX`Lh7U>d+~6D9zas88h5uIO7!= z!_jHdTf0dh&*HwYi-}fqCEobCu(0yu^L1A4VT<0DuZT%^FNCk9^cMf==1SP?KfiGR zGIPNAvJA`<8~f}7qSJ2uNC$hA;8R1nIN>Y1SnmwnHTH4n2P&&$CS!j!Msbd=<5*Rsxa)X!MH7sQ+pJ4hM3ZYzSG?hAS(__?*_ojA5qFqgXFhqX zT$=4Z9AxUt-q^dAo|@qoPJSokYcU}>Ees!v>4PI2g^SRmnbe0qLH0(^mrVWc)s=~? zMIK#Z`XUUPPxQhn=#KDu@sj=!((vb3kl9qw?XOtxJ5u^|?wiRg)`iMHLVKg+2g2av z9p+HMyf6L;-y;!#KC^k0X~p$1d&ho)SrhMewLUd%tTn{~N^xyCOymnweIpJV z!wzUT)EFi#@u8H@A-+O@y1X_9{nkRXi(RY`an=LlS7HPHv|TCyg39rYN^m00BIVtw zy@_tywOn#yS_>x>2xR!c&*6o@E(0ThpSEA zU)cMctUK6sLRJPOQ|w_C{Amd2COg*Tqj~5iU zpo#xUD40&KDzq#e?gmC9-{#BkV#THpIziO2S&Bd6?q4STMD*#CL)$Lz&K`9UO}SlL zhGWwWY+tx&^Choz*HPNl*Y6gqR^q*`c9jo(snDj2ysO1$tBaN2* zbf<*L>fB+Yyfu>}?nipKtk#rDfiQFWbN`jFDQhWcX@&(bh@U|pUTR5J?5?Hf;8<4F z@g3QD<^seKFCo9OqsFLs=JQjSNF&rji3v4w5^5ENI zTZMk>lK!gbkoY#sUyS(o!zs;_ROV^sZ=N+b7mit(tbaX6nx2Sf1Jh^%zuLJG?3?G4 zvB**6^{N51TV++m|6#ghMmm+rf&R~hhx`{4%QqbG(FNpvg2M_t*%p+aU<|u zv_wYO(g$w51_ud7o^s*K561ehrgpT95WFq^jPv=rA5WYb5u>yF@aSL$XLg^0iJX`J zuS5kCvoU$R6MS~SJn4!2qGm7*p(N$6b>nKEjvv5+pAHg^#nqHwN1pA}d?7B^(xo%U zY|6c$T=>bp9vV2i$A5iInv|yg4tjsa`ljf6E!;BCYaZSEO!OmUta>zFu<>%A>=Y8o zqdW6W0#D3(^!XTT`ftU$T$QTphAlZwV_IvH#}w@<45dTKn$XBx`UP#{2|nWH2gll7 zDN0cUtJc@T^}!9y-+mVro1U7o>yI(=J#w^*HtffIddX#Pg7=h-;n7eIE3cD<|Es+# zaffpK`?OFeayp$5;W!2@r0is=qhw^MQ??LgYpi9>I@&lDsTun=iBxvRl6^a9?8dRL zWst@?7{)O3exH8t@BJg*>$tA2uCDOh&pr2d|9n1oppc*U-VZOt-W|$QDSLL;QuC$@ z!bJElg@3(JbMIA&bg#hQK7=zcqYc5m(CW2^4QZ@+%0&8NFnu(cHp2x;q?dVb38PD0 zg7d!IwY*Ia5K40S<330^vYFS)I}tD!;%fjJGx)(u4h!l()B4jBrTH^)vij96XAgTlaVO$OyIZ^5tNj$u=}AzWI5^ zIJoVNgDn6n8nd*PAdR3|tH)V*rW|_wV;&oz)jZiPu$3kBPu{;WlkSRsu2||t!bHx} zI+(`5R&|IRNjq-)k@Gic^mw~X?d9~XLV%Cb$KjccTf}UH+*A!M!m0SRvc;)~g`3U3 zudd^&Ui(Z$!7M)VTbGMYrcui9R&{O7&)NWjdiB%}M}$7=(nZ@;);wypLaU#N>{Fbu#o>oc)T54IcC_mIvHh6TcN zLrBo`K<#EUim?jnC!US4Y?v2u3gPP%6eg!?YZgmSm5xxBcw(TaBCU3ks*JWNb+tf< zk>WP?{W%sxfWpfvs<3A2ZNzVR;;bzRcB6$v5-@}X(3Tl#dKTnJbqqXs!7tld z&n=dmv7!(FD-{b!zfHnPS`^>CW>Q0-&;~7iE)MH*%}SN$T|I^NW2ZLkk*K@#wp^aX zj<(OI=+tuX48u_zy%{1EY4elrmNsqofb}Uf4H)|V<^q_&@OhS^azXf?sfM0&f>s;S z7G;U@qyp3HQV2q|`@O-`M8wF$sMJP$P~OSrDR)Za9{qI&!L2vyFrVylp@WBji}th+P>Snwj~v^fH5bfMPr)-T!l zp^3jhG}$>F*;9cM6Dk*u{!f-P7<3(*j$-oO+p=9qIP`L;xLSvPM@ZOkS9F2L&4!({ zDqCcM2u&FIek46vo9iV)sJeg+vAjxuhT`Vi!gK0Q)L{Jao}h{(N`H#*0ks@-OPx1| z6y0@;gKEmq;& zh-&k`hef6OZMR+tduVLCiK~FTn3jW3An(ht36$Dzq%!iRHDj!l%`y_QywBrMB7o>E zp1rnnn#yXag3c*5U zfzEsQdZ*RM!14`OuKm#pvXRyAaXfA&bFDGKZY*Z|?Uy*7H_=*Ck99!P?e%3*j4T1LK4JH#BG;=( z_ddT#;`mUA5d2wIJ9lwdqasDpeY<6Efy~0U{o`im7xov#e-0oEmRzBw9jdx81WCVT zTUW-4_Ga2JWNF!7+iXO)Ou3LEN!F(8v+~*=!H+t4*F@Cy_eLEsa900j>}u)YgB42p zNo7f8+?Tq-SyC-t5R5y_wL{`apB+PGXWxOcW_NPMZ6sj}{7dNsX&f_uZQWkTkAKu> zhroO0KC%W7gl@?a>=gXsnB*1=)|rAFIVqSza#w$%Yy$r{bC(tEyfz`hs7p!qW$9#29>eUS0Hn_MToy?c?1 zXcAU#Y=F#{EI!@4O*AM%U}DX~Uobg(hB$-+@|g8Ez|=pKY%L`Kce`a(?iU~$rfB*P77hE!}eQLXsh zfOdZIY%vTaXVwH@G6i_~$V2W)X=8aifB6vbC`5=WYn`hNHxwKI)XCoWm4 zu0IfB9+3j{?-GgxIeGZnR}W3QCyToKBFZlJ%Kx+jFuKU^P1(P>EDV>FQ}Zf4>o~cK z3Gyc*E3L5#BnwQ{*?VROh|vdaC)?j>EbDLSHw(&k<3p zbqXEswfZJ49#lgJF%}3B1>sO3W!J)c_tfUrA%)%t>60{`rMzLD9{4z7kxoFc5Z*-t`I|5R5JrxNi)ClUMsW(U~X`o;f}&dIk2$i7b8c+X;=+qI0s$GG1d@X)ZHnVVf!=xooPwIt_VIpV zRv;k(zQ>3s&(O{z*7rbfQF!~+5OEL7{iP;C!X3LK-dFa1Qn39oDaN*&-ClQsM5Y~D z{c-QE#L-n8KcR8K_+&zJ689sSHxl~_zVT!AicVs3>TcEqFCnCSe5k8 zg`nYLRFV`4^idFo`N280I9bd7n>DGFEpclOTkrLuUJv--#nzXDJW<8t}wi{fWGE3 z61|gvGRvDSj=_w6_?^^^H7eUl-WgF`Y~Fh&^|Ya<%T3UWSX+x;xSN8;OF2MGp%ZGl zfwGe>`a;?Yvyu#tOG0}D_i*x$-EqL3>lL&nAtnG*eR1na2y^-_3>EajqmyeMQbU=9 zuCX@=FK4kb>3Wn4d2sT<^sCdm&IHup?U`AE&c|z0D?idVR&%_IBNSM(3q(U+xO-X( zr9VJxDZVYc05T~S023#GtNlwKOGg)7GI1?z;@_RTb=yt2qZUW;`(NXdg-BG+oCE}M zZ-`1?AeU-&m!ZnAsk_@%%Ystlh(o)kmg-S#O=`$2A_%zZ!Ct8C5My1f6-FuGtk}dTLZP z;{ZoQYA*THiKc>(OyIQZO5gENAbu=O+-Ir~~ zH-`tr&1`>Btz`*TTDC1OT-|vU>=t~{46Avm`TMCiIRw;D-br0mt<48Q;(g~Auh@C3 zBl;$gXe)dJ0d)PL`q8uewldo?kv~)WzCQ$On_XhFmMpPd*^yAVOCTzj%yIKdNk-fn zJ4_bIihA7gjA%h7P+OeG%ZQau$6C@)5LuOnM3X6pO$tF$h-Xf1xS3m}_$H@qPKREb z-r4|k1}|X0LZ6EOBm^Ze=Zk}{fRm8(9GNSJUf0f%wH=YrwGH(QiKQ&zx6e|QeQf}S zPY1%`&AevV0q4qWsyOgx^G+cW6+Lc?o%YAW2#F zdZxiqvRCoC_eKRDgn{Jd5NxB+f1NGy3*u}~o6Fs3LI#Z&v~6fAE4EU6^MQF2k`P;ny3u6VHLNF|eS zWeWQ}CKx~8-Bm7(v}^j0m}leu@n|uAlXNWgP{hTuy|gZ=a`&GJrScLF(r*J^q~o8D z9ZiKv&4S`r$BAeo13R0Vz#?}gQ`*39F3^}lQl0E(%q>2KA_72u9nb1Z(2GJ1tMrWN$gr~U0GEA)t+Y38U5cjK9MQfY za-z30fEhLakYFl%v-tRr6nFS;`b<_1?sh-$AbMCt@1=c(VF(p1LEXn$8M>#g04}rJ z;vD+}b}T?KGWcD;)PKFo1uRV}$rT4ZiF^wwwSsBwrIhC8`X7}E!2Nl*DyaU2eojKC z4sqxYszg@uNgdt#o(&c0`UyOb!@$?4Xx5)cbVMXZp-{DKGnP(JLrgDs2s7HT6^L;? zvJyhY=9oIf93=ioB0^+HJyYWsxz7Q*m~m#L$9>T7oLtD z#8h=d&(lV?->;LaWPKUHo?+gcp*?+0moS#ci!T^!uHSHB7~oEYFVDT`{bY9=mzE!G z-6hPc5DwqAvXZ1ozdm*<@+vizE?~Yl9b66iuZ&{Wc8t;P~?^b zrK$kwNR@XjQ+6i(WA{a*eh1&{Y=jHXy7NKq<*@E(*Y!!D{;6iTl&&w| z!U4H%pdl$#YLI&f%?0cb(d4##P_~z1TX7Y zo~IJ}XQsoRQ~H5{feO$!aZJ#yW6%7o&9WBqtJTLDq>gGFi!R-%(0Waogh=`(LKwe& zCQ?N&O5gKZKT+NVGAcC?zbxuAwRC`IwL8crt}ri)U^e1aTZ}AaD}wK!!z$gr+DSxIkqnw}v;mObav&IOK+q)Ww*B9X zgmOLI_kO52c%=^aD|QzpN!=~`Hh|vI?-`Ej2;_}gHeKClVws({2$vdCL_5f}vC zp9SKTTufd5QgVe}tr)HApMKGs^U2NnXF^wK<%+u)JmuvTG4Njno)^J zCmRSlT40^jr!PG6d z^;-Gyx-*br`GU+%#mTUWpWujER(O+bLPF~_@Osj&jb-LehCT_IUQBvy&= zne)!s!DS1Av_ad-^@y5d@^269;u+UQ4Jk#MY=KT!BE`~fS>qIb8{4lrz)q$#p)5+A zO_1^*aS9S4k4iQ7g_nwpBbTrAEWotVxWC#?367z-Tc{dz>tt?KsUrRpX-LRemdXGl zdDG3bpV+gaNe$R_e7a7N9?^=BYT#Uax_3^Zm!8=`M;jgQ-QRtwRDa6JPzkH47Fqpg zzMD=BtpWrXJpQ&>@WDOT#H2g(a09Ju|*s{V4pQ9T`s~FR!5V{ zb~^;0BgJLk`84&|hpbHADOA)&aW<3T6&*CAJaCjGv^y|KnjJ6okD)_id;;yjl?1~} z_SF$MyT$jIzp{#>kg^23{IZ5qsVgY6mm~PyL4!X=D2-!*Sw_m>pZjA9g(Ziy)5|`} zpRdft9V5gF-$oS0foWhvRZGe!l%DYDH4kRzUoH<1$5&lD0_gr`j`zE~4|4J=nre;3 z+8g4K{Y0W*y!PC1i3<^KUVs!V#h%4p7_)XATMuqoiP~L^*6qc-z1a7`x3C_CJ0F48 zyF<+5qu%f-e!*oo9CmT&`FEq?v5efLSHoz01-iF5+&Amf2i>Qc&I@}Vl1;oF+Wnmk z^uD5g9kfk)AlU7D-t-UA<)NktWoc=n-)4`^1{K=K%~@@)E^F5g4sNnp?2uaa>%CaJ zPzC1s#Ok+2(1u2cCA&q9u}jN4e&T^ERL-{%{hSMc|RmUWuW}t zX}2A~{;qJP9=}%g>o!b<)u#*I<2l#P*F9?X!)@QA+fLL3A7F(AZY;WK2c7M(Rt>#S ze!_4RU8EYz*ZDmjO~+##6FJvLDskI4aYxYd@U>b%n0fdsq8j151PvM{-2rfQ3s{fi z9gPYh=Q<4g*hYtmTEo+-?na)Z`Fcsf!^UcF&Ab%V3~0S=EWP_%{nt+=KUG(1lk9O( zk{#q4%|N=$H(_5Hv7jJF9*)Pfdd2(FBuwDt>Reu4UI%Y)Z+hBM9e=x{X%_i1&LRpP zr}aJCLc_?5>nJAfnCP-9e}gnAIrSyD@39JA?;9$#yPTPsDX*n9&N@%XV2G~QD4wO3 zk%I^UzjrjxSN3Xj+^Z4$Q|=1!A#({;?mVLbLzyeFvQ)Ru1}BjEq&KqX=j^XsY#INQ zEx(yxE!NcG$RnE z#C0A%eApl0n_G%i_O7ln2%dWY%f4c>^=Du-GeJ?{u+4m9_nRC<&LP_@zY~D%1PED- za8AAx=$F!rKXB)`G+${2n&E7Wi9vFWjg3PEw;L_k4HZ6A`LkzVn`Xy6=H^R1;h3ur zpGHN=!p5yGVPcwThQ-SV&t)x3*EEvon=4a{@r@c4zj(?}s?n#55s{H4V&l>E+MFBr z_Q>!*HqD-%UM~%heRGOD`o&oi6Z!IGDSvBc$~jBo5`M<0KlAB!xa{q4`PKVc-rug8 z5wh)CUY20~ye`8;`@>v#ri`3r20_NG=_tgy`#*|@MsV}9wQ%D|}<>yB9$-}i+ zteM!-k5&z|7wA#`Ha3X-i6xSgo7ut4$6b5C8&}!YcA35~_pm)E2hai!^R&iuO|yNV zfG)+hFCF_<`)y%%6~iL*>XX%d=gkKQ;fU@@E>r|^g98o%`AsnDm@rQ5*B$7IB}jWm)sqn z4_I%TEKNAdfob}+9luMLLU9}po%T=RK3)FEHT%zYz9~_o>Ed$ zT*@zziKp#SuxPR}^E(WfsoyW^=;*v}Z&&xE=pj;>28MDJbp@TiiOY_=h77QIy#c82j7MRk-gZIMqZ z%dk88#R0AL3+D^Tuh8gU!W?y%dnu+}Gboqgf%aUVzfdds=&Yh*G%`G~w?|1xNR&fJ zoefz+@YEw#!7v#tpM2D7`ZGPJ&$$Gf?2q-lgM|{n zn47IK9zaY&Ln9foOccgZKSGi*qV@8Sd18nJsR>;ava^-vWRvm&iZgTPC-FIzNOB%ZB>#s?>$ux^d6b`t{qI$k-mjX-?6nNo4@B91ji?cbySTt?mvGAeuI(3g2V+2YGDj_+!{hxnsbV+O6|9a7PDYY{5 z+#zCqXnm5Uma2xvXm2m~$T7Xuivv%W^%9TXLK`J9)?~E#m-#19$=qyW8Slu%K z4c^nwhs?lprcCq_jA&+$kMh=igjbmG!)s^mcaD|#i;wnWR2S&g+0h{ zpo5`GKM$})ujR@5A)9~`B*MD{xS#UgF8+28vqVj+fSXz5Jc&n!nF-T-&m0%iJM4i= zE>-!*vj5QQTLv^+xyJ#Aqofl88{IulBhV8l9T!G#%Si0$96OArOR1?b51jK`Y~Na^ z5&?)m4=%!XH?H^AKxrOi~{TT1oc0KkQ+tkJQK$I#hH*(TSoUuSH#Xs05|eMu@Fop69Y#w)3* zCGZL>Qw|e0FQx|jE{f-0#%*6pl*Dno+XrhP8doJ6;Y7~)+6M$|B^#5SOVkOJB55~c zwLOBN*JJs7EB%>p>Cte=rau47+~X8qNyY|Po5$&@DUqem&5^>h2PJGdtIJQ3no+gU zGrH4l3$^925r{IL*IBBI4f7^KHApS{plX(D>T=r3ZLlCM`007z15*^xEK1eEpC)C& z-^|w}P`HWQWAjekg==tf4gccgn$yA1Ls|jFo&;Aefn1w^EMG|W?CWYJJSKG}ajepB z_U~thk^Pt%I)c_3Ui}>UZm}NVeEv2#Z3EBaZp0#-l;apA^oX=N9L3ne!opFQ!XRQb zmACkVmBFpFfhi&09N->|hCUZG>nw@cDs)=D;Kj*9j#VtoB(S7OyMwi6ll)N% zfxpswy51#-a~51pHKM7YL)1EMJ#p!Mk{j4Nf1!`9^n_HX*u&07YbmeR)C#Zs^dRY| z`h+m|ANgAa{{U1rt3tjbY+0B*-{pr%{8lXAvFAz5TzpB4R8Wcm%{fsx-&oRLk>p2~ zIc)#TYpm + defineEmits(['toggleSidebar']); + + const { availableLocales } = useI18n(); + + const preferedDark = usePreferredDark(); + const isDark = useStorage('isDark', preferedDark.value); + const body = ref(null); + + const toggleDarkMode = () => { + if (body.value) { + if (isDark.value) { + body.value.classList.remove('dark'); + } else { + body.value.classList.add('dark'); + } + } + isDark.value = !isDark.value; + }; + + onMounted(async () => { + await nextTick(); + + body.value = document.querySelector('body') as HTMLBodyElement; + if (body.value) { + if (isDark.value) body.value.classList.add('dark'); + } + }); + + + + + diff --git a/frontend/src/components/Form/TextField.vue b/frontend/src/components/Form/TextField.vue new file mode 100644 index 0000000..2fbd52c --- /dev/null +++ b/frontend/src/components/Form/TextField.vue @@ -0,0 +1,31 @@ + + + diff --git a/frontend/src/env.d.ts b/frontend/src/env.d.ts new file mode 100644 index 0000000..d27eb5a --- /dev/null +++ b/frontend/src/env.d.ts @@ -0,0 +1,8 @@ +/// + +declare module '*.vue' { + import { DefineComponent } from 'vue' + // eslint-disable-next-line @typescript-eslint/no-explicit-any, @typescript-eslint/ban-types + const component: DefineComponent<{}, {}, any> + export default component +} diff --git a/frontend/src/layouts/404.vue b/frontend/src/layouts/404.vue new file mode 100644 index 0000000..c96df8a --- /dev/null +++ b/frontend/src/layouts/404.vue @@ -0,0 +1,5 @@ + diff --git a/frontend/src/layouts/default.vue b/frontend/src/layouts/default.vue new file mode 100644 index 0000000..f71177c --- /dev/null +++ b/frontend/src/layouts/default.vue @@ -0,0 +1,18 @@ + + diff --git a/frontend/src/main.ts b/frontend/src/main.ts new file mode 100644 index 0000000..5171ac4 --- /dev/null +++ b/frontend/src/main.ts @@ -0,0 +1,19 @@ +import App from '@/App.vue'; +import { ViteSSG } from 'vite-ssg'; + +import '@/styles/index.css'; +import { ViteSetupModule } from './types/ViteSetupModule'; +import { extendedRoutes } from '@/router'; + +export const createApp = ViteSSG( + App, + { routes: extendedRoutes }, + async ctx => { + Object.values( + import.meta.glob<{ install: ViteSetupModule }>('./modules/*.ts', { + eager: true, + }) + ).map(i => i.install?.(ctx)); + }, + {} +); diff --git a/frontend/src/modules/i18n.ts b/frontend/src/modules/i18n.ts new file mode 100644 index 0000000..ef4f61a --- /dev/null +++ b/frontend/src/modules/i18n.ts @@ -0,0 +1,29 @@ +import { ViteSetupModule } from '@/types/ViteSetupModule'; +import { createI18n } from 'vue-i18n'; + +// Import i18n resources +// https://vitejs.dev/guide/features.html#glob-import + +// Don't need this? Try vitesse-lite: https://github.com/antfu/vitesse-lite +const messages = Object.fromEntries( + Object.entries( + import.meta.glob<{ default: any }>('../../locales/*.{y(a)?ml,json}', { + eager: true, + }) + ).map(([key, value]) => { + const isYamlOrJson = key.endsWith('.yaml') || key.endsWith('.json'); + + return [key.slice(14, isYamlOrJson ? -5 : -4), value.default]; + }) +); + +export const install: ViteSetupModule = ({ app }) => { + const i18n = createI18n({ + legacy: false, + locale: 'en', + messages, + globalInjection: true, + }); + + app.use(i18n); +}; diff --git a/frontend/src/modules/pinia.ts b/frontend/src/modules/pinia.ts new file mode 100644 index 0000000..db34dde --- /dev/null +++ b/frontend/src/modules/pinia.ts @@ -0,0 +1,14 @@ +import { ViteSetupModule } from '@/types/ViteSetupModule'; +import { createPinia } from 'pinia'; + +// Setup Pinia +// https://pinia.esm.dev/ +export const install: ViteSetupModule = ({ isClient, initialState, app }) => { + const pinia = createPinia(); + app.use(pinia); + // Refer to + // https://github.com/antfu/vite-ssg/blob/main/README.md#state-serialization + // for other serialization strategies. + if (isClient) pinia.state.value = initialState.pinia || {}; + else initialState.pinia = pinia.state.value; +}; diff --git a/frontend/src/modules/pwa.ts b/frontend/src/modules/pwa.ts new file mode 100644 index 0000000..6313341 --- /dev/null +++ b/frontend/src/modules/pwa.ts @@ -0,0 +1,10 @@ +import { ViteSetupModule } from '@/types/ViteSetupModule'; + +export const install: ViteSetupModule = ({ isClient, router }) => { + if (!isClient) return; + + router.isReady().then(async () => { + const { registerSW } = await import('virtual:pwa-register'); + registerSW({ immediate: true }); + }); +}; diff --git a/frontend/src/pages/[...all].vue b/frontend/src/pages/[...all].vue new file mode 100644 index 0000000..2d70677 --- /dev/null +++ b/frontend/src/pages/[...all].vue @@ -0,0 +1,19 @@ + + + + + +name : not-found +meta: + layout: 404 + diff --git a/frontend/src/pages/index.vue b/frontend/src/pages/index.vue new file mode 100644 index 0000000..0e7d151 --- /dev/null +++ b/frontend/src/pages/index.vue @@ -0,0 +1,157 @@ + + + + + +name: home + + + diff --git a/frontend/src/router.ts b/frontend/src/router.ts new file mode 100644 index 0000000..657ff61 --- /dev/null +++ b/frontend/src/router.ts @@ -0,0 +1,17 @@ +import { + createRouter, + createWebHistory, + createMemoryHistory, +} from '@vue-router'; + +import { setupLayouts } from 'virtual:generated-layouts'; +export let extendedRoutes: any = null; +export const router = createRouter({ + history: import.meta.env.SSR ? createMemoryHistory() : createWebHistory(), + // You don't need to pass the routes anymore, + // the plugin writes it for you 🤖 + extendRoutes: routes => { + extendedRoutes = routes; + return setupLayouts(routes); + }, +}); diff --git a/frontend/src/store/index.ts b/frontend/src/store/index.ts new file mode 100644 index 0000000..43ba769 --- /dev/null +++ b/frontend/src/store/index.ts @@ -0,0 +1,7 @@ +import { defineStore } from 'pinia'; + +export const useStore = defineStore('store', { + state: () => ({ + count: 0, + }), +}); diff --git a/frontend/src/styles/index.css b/frontend/src/styles/index.css new file mode 100644 index 0000000..b5c61c9 --- /dev/null +++ b/frontend/src/styles/index.css @@ -0,0 +1,3 @@ +@tailwind base; +@tailwind components; +@tailwind utilities; diff --git a/frontend/src/types/ViteSetupModule.ts b/frontend/src/types/ViteSetupModule.ts new file mode 100644 index 0000000..db6062a --- /dev/null +++ b/frontend/src/types/ViteSetupModule.ts @@ -0,0 +1,3 @@ +import { ViteSSGContext } from 'vite-ssg'; + +export type ViteSetupModule = (ctx: ViteSSGContext) => void; diff --git a/frontend/tailwind.config.js b/frontend/tailwind.config.js new file mode 100644 index 0000000..59d9b6e --- /dev/null +++ b/frontend/tailwind.config.js @@ -0,0 +1,16 @@ +module.exports = { + content: ['./index.html', './src/**/*.{vue,js,ts,jsx,tsx}'], + darkMode: 'class', // or 'media' or 'class' + theme: { + extend: {}, + }, + variants: { + extend: {}, + }, + plugins: [ + require('@tailwindcss/forms'), + require('@tailwindcss/aspect-ratio'), + require('@tailwindcss/typography'), + require('daisyui'), + ], +}; diff --git a/frontend/tsconfig.json b/frontend/tsconfig.json new file mode 100644 index 0000000..b6c0a56 --- /dev/null +++ b/frontend/tsconfig.json @@ -0,0 +1,34 @@ +{ + "compilerOptions": { + "target": "esnext", + "useDefineForClassFields": true, + "module": "esnext", + "moduleResolution": "node", + "strict": true, + "jsx": "preserve", + "sourceMap": true, + "resolveJsonModule": true, + "esModuleInterop": true, + "lib": ["esnext", "dom"], + "baseUrl": ".", + "paths": { + "@/*": ["src/*"] + }, + "types": [ + "vite/client", + "vite-plugin-vue-layouts/client", + "unplugin-icons/types/vue", + "vite-plugin-pwa/client", + "@intlify/vite-plugin-vue-i18n/client" + ] + }, + "include": [ + "src/**/*.ts", + "src/**/*.d.ts", + "src/**/*.tsx", + "src/**/*.vue", + "components.d.ts", + "auto-imports.d.ts", + "typed-router.d.ts" + ] +} diff --git a/frontend/typed-router.d.ts b/frontend/typed-router.d.ts new file mode 100644 index 0000000..38d7314 --- /dev/null +++ b/frontend/typed-router.d.ts @@ -0,0 +1,95 @@ + +// Generated by unplugin-vue-router. ‼️ DO NOT MODIFY THIS FILE ‼️ +// It's recommended to commit this file. +// Make sure to add this file to your tsconfig.json file as an "includes" or "files" entry. + +/// + +import type { + // type safe route locations + RouteLocationTypedList, + RouteLocationResolvedTypedList, + RouteLocationNormalizedTypedList, + RouteLocationNormalizedLoadedTypedList, + + // helper types + // route definitions + RouteRecordInfo, + ParamValue, + ParamValueOneOrMore, + ParamValueZeroOrMore, + ParamValueZeroOrOne, + + // vue-router extensions + _RouterTyped, + RouterLinkTyped, + NavigationGuard, + UseLinkFnTyped, +} from 'unplugin-vue-router' + +declare module '@vue-router/routes' { + export interface RouteNamedMap { + 'home': RouteRecordInfo<'home', '/', Record, Record>, + 'not-found': RouteRecordInfo<'not-found', '/:all(.*)', { all: ParamValue }, { all: ParamValue }>, + } +} + +declare module '@vue-router' { + import type { RouteNamedMap } from '@vue-router/routes' + + export type RouterTyped = _RouterTyped + + /** + * Type safe version of `RouteLocationNormalized` (the type of `to` and `from` in navigation guards). + * Allows passing the name of the route to be passed as a generic. + */ + export type RouteLocationNormalized = RouteLocationNormalizedTypedList[Name] + + /** + * Type safe version of `RouteLocationNormalizedLoaded` (the return type of `useRoute()`). + * Allows passing the name of the route to be passed as a generic. + */ + export type RouteLocationNormalizedLoaded = RouteLocationNormalizedLoadedTypedList[Name] + + /** + * Type safe version of `RouteLocationResolved` (the returned route of `router.resolve()`). + * Allows passing the name of the route to be passed as a generic. + */ + export type RouteLocationResolved = RouteLocationResolvedTypedList[Name] + + /** + * Type safe version of `RouteLocation` . Allows passing the name of the route to be passed as a generic. + */ + export type RouteLocation = RouteLocationTypedList[Name] + + /** + * Generate a type safe params for a route location. Requires the name of the route to be passed as a generic. + */ + export type RouteParams = RouteNamedMap[Name]['params'] + /** + * Generate a type safe raw params for a route location. Requires the name of the route to be passed as a generic. + */ + export type RouteParamsRaw = RouteNamedMap[Name]['paramsRaw'] + + export function useRouter(): RouterTyped + export function useRoute(name?: Name): RouteLocationNormalizedLoadedTypedList[Name] + + export const useLink: UseLinkFnTyped + + export function onBeforeRouteLeave(guard: NavigationGuard): void + export function onBeforeRouteUpdate(guard: NavigationGuard): void +} + +declare module 'vue-router' { + import type { RouteNamedMap } from '@vue-router/routes' + + export interface TypesConfig { + beforeRouteUpdate: NavigationGuard + beforeRouteLeave: NavigationGuard + + $route: RouteLocationNormalizedLoadedTypedList[keyof RouteNamedMap] + $router: _RouterTyped + + RouterLink: RouterLinkTyped + } +} diff --git a/frontend/vite.config.ts b/frontend/vite.config.ts new file mode 100644 index 0000000..32dbc4a --- /dev/null +++ b/frontend/vite.config.ts @@ -0,0 +1,131 @@ +import { defineConfig } from 'vite'; +import vue from '@vitejs/plugin-vue'; +import { resolve } from 'path'; +import Components from 'unplugin-vue-components/vite'; +import AutoImport from 'unplugin-auto-import/vite'; +import Icons from 'unplugin-icons/vite'; +import IconsResolver from 'unplugin-icons/resolver'; +import Layouts from 'vite-plugin-vue-layouts'; +import { VitePWA } from 'vite-plugin-pwa'; +import VueI18n from '@intlify/vite-plugin-vue-i18n'; +import generateSitemap from 'vite-ssg-sitemap'; +import VueRouter from 'unplugin-vue-router/vite'; +import { VueRouterExports } from 'unplugin-vue-router'; +// https://vitejs.dev/config/ +export default defineConfig({ + plugins: [ + vue(), + VueRouter({ + dts: true, + routesFolder: 'src/pages', + }), + Components({ + dts: true, + resolvers: [ + IconsResolver({ + prefix: 'icon', + }), + ], + }), + Icons({ + compiler: 'vue3', + }), + AutoImport({ + dts: true, + // targets to transform + include: [ + /\.[tj]sx?$/, // .ts, .tsx, .js, .jsx + /\.vue\??/, // .vue + ], + + // global imports to register + imports: [ + // presets + 'vue', + { '@vue-router': VueRouterExports }, + 'vue-i18n', + '@vueuse/core', + '@vueuse/head', + // custom + ], + + // custom resolvers + // see https://github.com/antfu/unplugin-auto-import/pull/23/ + resolvers: [], + }), + Layouts(), + VitePWA({ + includeAssets: [ + 'favicon-16x16.png', + 'favicon-32x32.png', + 'favicon.ico', + 'robots.txt', + 'apple-touch-icon.png', + ], + manifest: { + name: 'Vitailse', + short_name: 'Vitailse', + description: 'Opinionated vite template with TailwindCSS', + theme_color: '#076AE0', + icons: [ + { + src: 'pwa-192x192.png', + sizes: '192x192', + type: 'image/png', + }, + { + src: 'pwa-512x512.png', + sizes: '512x512', + type: 'image/png', + }, + { + src: 'pwa-512x512.png', + sizes: '512x512', + type: 'image/png', + purpose: 'any maskable', + }, + ], + }, + }), + VueI18n({ + runtimeOnly: true, + compositionOnly: true, + include: [resolve(__dirname, 'locales/**')], + }), + ], + resolve: { + alias: { + '@': resolve(__dirname, './src'), + }, + }, + server: { + fs: { + strict: true, + }, + }, + optimizeDeps: { + include: ['vue', 'vue-router', '@vueuse/core', '@vueuse/head'], + }, + // @ts-ignore + ssgOptions: { + script: 'async', + formatting: 'minify', + format: 'cjs', + onFinished() { + generateSitemap(); + }, + mock: true + }, + // https://github.com/vitest-dev/vitest + test: { + include: ['src/__test__/**/*.test.ts', 'src/__test__/**/*.spec.ts'], + environment: 'jsdom', + deps: { + inline: ['@vue', '@vueuse', 'vue-demi'], + }, + }, + ssr: { + // TODO: workaround until they support native ESM + noExternal: ['workbox-window', /vue-i18n/], + }, +}); From c780a0d3acee2a0647c1d26056ddf593cb0a4f46 Mon Sep 17 00:00:00 2001 From: Hayden <64056131+hay-kot@users.noreply.github.com> Date: Tue, 30 Aug 2022 10:05:31 -0800 Subject: [PATCH 007/530] add ent to taskfile for swag generator --- Taskfile.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Taskfile.yml b/Taskfile.yml index b67fbe4..2bbb6f9 100644 --- a/Taskfile.yml +++ b/Taskfile.yml @@ -4,7 +4,7 @@ tasks: api: cmds: - cd backend/app/api/ && swag fmt - - cd backend/app/api/ && swag init --dir=./,../../internal,../../pkgs + - cd backend/app/api/ && swag init --dir=./,../../internal,../../pkgs,../../ent # - | # npx swagger-typescript-api \ # --path ./backend/app/api/docs/swagger.json \ From 9c1cced57651d6b0514824217ff2c73049031246 Mon Sep 17 00:00:00 2001 From: Hayden <64056131+hay-kot@users.noreply.github.com> Date: Tue, 30 Aug 2022 16:06:57 -0800 Subject: [PATCH 008/530] setup api client --- frontend/src/api/base/base-api.ts | 17 +++++ frontend/src/api/base/index.test.ts | 24 +++++++ frontend/src/api/base/index.ts | 2 + frontend/src/api/base/urls.ts | 31 +++++++++ frontend/src/api/public.ts | 39 +++++++++++ frontend/src/api/user.ts | 18 +++++ frontend/src/lib/requests/index.ts | 1 + frontend/src/lib/requests/requests.ts | 95 +++++++++++++++++++++++++++ 8 files changed, 227 insertions(+) create mode 100644 frontend/src/api/base/base-api.ts create mode 100644 frontend/src/api/base/index.test.ts create mode 100644 frontend/src/api/base/index.ts create mode 100644 frontend/src/api/base/urls.ts create mode 100644 frontend/src/api/public.ts create mode 100644 frontend/src/api/user.ts create mode 100644 frontend/src/lib/requests/index.ts create mode 100644 frontend/src/lib/requests/requests.ts diff --git a/frontend/src/api/base/base-api.ts b/frontend/src/api/base/base-api.ts new file mode 100644 index 0000000..785cb82 --- /dev/null +++ b/frontend/src/api/base/base-api.ts @@ -0,0 +1,17 @@ +import { Requests } from '../../lib/requests'; +// < +// TGetResult, +// TPostData, +// TPostResult, +// TPutData = TPostData, +// TPutResult = TPostResult, +// TDeleteResult = void +// > + +export class BaseAPI { + http: Requests; + + constructor(requests: Requests) { + this.http = requests; + } +} diff --git a/frontend/src/api/base/index.test.ts b/frontend/src/api/base/index.test.ts new file mode 100644 index 0000000..2f40e0c --- /dev/null +++ b/frontend/src/api/base/index.test.ts @@ -0,0 +1,24 @@ +import { describe, expect, it } from 'vitest'; +import { UrlBuilder } from '.'; + +describe('UrlBuilder', () => { + it('basic query parameter', () => { + const result = UrlBuilder('/test', { a: 'b' }); + expect(result).toBe('/api/v1/test?a=b'); + }); + + it('multiple query parameters', () => { + const result = UrlBuilder('/test', { a: 'b', c: 'd' }); + expect(result).toBe('/api/v1/test?a=b&c=d'); + }); + + it('no query parameters', () => { + const result = UrlBuilder('/test'); + expect(result).toBe('/api/v1/test'); + }); + + it('list-like query parameters', () => { + const result = UrlBuilder('/test', { a: ['b', 'c'] }); + expect(result).toBe('/api/v1/test?a=b&a=c'); + }); +}); diff --git a/frontend/src/api/base/index.ts b/frontend/src/api/base/index.ts new file mode 100644 index 0000000..12f6df5 --- /dev/null +++ b/frontend/src/api/base/index.ts @@ -0,0 +1,2 @@ +export { BaseAPI } from './base-api'; +export { UrlBuilder } from './urls'; diff --git a/frontend/src/api/base/urls.ts b/frontend/src/api/base/urls.ts new file mode 100644 index 0000000..5acf4ed --- /dev/null +++ b/frontend/src/api/base/urls.ts @@ -0,0 +1,31 @@ +export const prefix = '/api/v1'; + +export type QueryValue = + | string + | string[] + | number + | number[] + | boolean + | null + | undefined; + +export function UrlBuilder( + rest: string, + params: Record = {} +): string { + // we use a stub base URL to leverage the URL class + const url = new URL(prefix + rest, 'http://localhost.com'); + + for (const [key, value] of Object.entries(params)) { + if (Array.isArray(value)) { + for (const item of value) { + url.searchParams.append(key, String(item)); + } + } else { + url.searchParams.append(key, String(value)); + } + } + + // we return the path only, without the base URL + return url.toString().replace('http://localhost.com', ''); +} diff --git a/frontend/src/api/public.ts b/frontend/src/api/public.ts new file mode 100644 index 0000000..7cd6ff5 --- /dev/null +++ b/frontend/src/api/public.ts @@ -0,0 +1,39 @@ +import { BaseAPI, UrlBuilder } from './base'; + +export type LoginResult = { + token: string; + expiresAt: string; +}; + +export type LoginPayload = { + username: string; + password: string; +}; + +export type RegisterPayload = { + user: { + email: string; + password: string; + name: string; + }; + groupName: string; +}; + +export class PublicApi extends BaseAPI { + public login(username: string, password: string) { + return this.http.post( + UrlBuilder('/users/login'), + { + username, + password, + } + ); + } + + public register(payload: RegisterPayload) { + return this.http.post( + UrlBuilder('/users/register'), + payload + ); + } +} diff --git a/frontend/src/api/user.ts b/frontend/src/api/user.ts new file mode 100644 index 0000000..a468737 --- /dev/null +++ b/frontend/src/api/user.ts @@ -0,0 +1,18 @@ +import { BaseAPI, UrlBuilder } from './base'; + +export type Result = { + item: T; +}; + +export type User = { + name: string; + email: string; + isSuperuser: boolean; + id: number; +}; + +export class UserApi extends BaseAPI { + public self() { + return this.http.get>(UrlBuilder('/users/self')); + } +} diff --git a/frontend/src/lib/requests/index.ts b/frontend/src/lib/requests/index.ts new file mode 100644 index 0000000..7bd0a14 --- /dev/null +++ b/frontend/src/lib/requests/index.ts @@ -0,0 +1 @@ +export { Requests, type TResponse } from './requests'; diff --git a/frontend/src/lib/requests/requests.ts b/frontend/src/lib/requests/requests.ts new file mode 100644 index 0000000..c0c83be --- /dev/null +++ b/frontend/src/lib/requests/requests.ts @@ -0,0 +1,95 @@ +export enum Method { + GET = 'GET', + POST = 'POST', + PUT = 'PUT', + DELETE = 'DELETE', +} + +export interface TResponse { + status: number; + error: boolean; + data: T; + response: Response; +} + +export class Requests { + private baseUrl: string; + private token: () => string; + private headers: Record = {}; + private logger?: (response: Response) => void; + + private url(rest: string): string { + return this.baseUrl + rest; + } + + constructor( + baseUrl: string, + token: string | (() => string) = '', + headers: Record = {}, + logger?: (response: Response) => void + ) { + this.baseUrl = baseUrl; + this.token = typeof token === 'string' ? () => token : token; + this.headers = headers; + this.logger = logger; + } + + public get(url: string): Promise> { + return this.do(Method.GET, url); + } + + public post(url: string, payload: T): Promise> { + return this.do(Method.POST, url, payload); + } + + public put(url: string, payload: T): Promise> { + return this.do(Method.PUT, url, payload); + } + + public delete(url: string): Promise> { + return this.do(Method.DELETE, url); + } + + private methodSupportsBody(method: Method): boolean { + return method === Method.POST || method === Method.PUT; + } + + private async do( + method: Method, + url: string, + payload: Object = {} + ): Promise> { + const args: RequestInit = { + method, + headers: { + 'Content-Type': 'application/json', + ...this.headers, + }, + }; + + const token = this.token(); + if (token !== '' && args.headers !== undefined) { + // @ts-expect-error -- headers is always defined at this point + args.headers['Authorization'] = token; + } + + if (this.methodSupportsBody(method)) { + args.body = JSON.stringify(payload); + } + + const response = await fetch(this.url(url), args); + + if (this.logger) { + this.logger(response); + } + + const data = await response.json(); + + return { + status: response.status, + error: !response.ok, + data, + response, + }; + } +} From 5471cb16ff175401da46aa2e836bc0dfb907e14f Mon Sep 17 00:00:00 2001 From: Hayden <64056131+hay-kot@users.noreply.github.com> Date: Tue, 30 Aug 2022 16:07:05 -0800 Subject: [PATCH 009/530] logger --- backend/app/api/v1/v1_ctrl_auth.go | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/backend/app/api/v1/v1_ctrl_auth.go b/backend/app/api/v1/v1_ctrl_auth.go index c09e3f4..20f09f5 100644 --- a/backend/app/api/v1/v1_ctrl_auth.go +++ b/backend/app/api/v1/v1_ctrl_auth.go @@ -31,9 +31,9 @@ func (ctrl *V1Controller) HandleAuthLogin() http.HandlerFunc { if r.Header.Get("Content-Type") == HeaderFormData { err := r.ParseForm() - if err != nil { server.Respond(w, http.StatusBadRequest, server.Wrap(err)) + ctrl.log.Error(errors.New("failed to decode login form (FORM)"), logger.Props{"error": err.Error()}) return } @@ -43,6 +43,9 @@ func (ctrl *V1Controller) HandleAuthLogin() http.HandlerFunc { err := server.Decode(r, loginForm) if err != nil { + ctrl.log.Error(errors.New("failed to decode login form (JSON)"), logger.Props{ + "error": err.Error(), + }) server.Respond(w, http.StatusBadRequest, server.Wrap(err)) return } @@ -99,7 +102,7 @@ func (ctrl *V1Controller) HandleAuthLogout() http.HandlerFunc { return } - err = server.Respond(w, http.StatusNoContent, nil) + server.Respond(w, http.StatusNoContent, nil) } } From 7361dcc5f703179ea9203e65b8ba763b0808f2b6 Mon Sep 17 00:00:00 2001 From: Hayden <64056131+hay-kot@users.noreply.github.com> Date: Tue, 30 Aug 2022 16:07:21 -0800 Subject: [PATCH 010/530] setup basic auth --- frontend/components.d.ts | 3 + frontend/package.json | 4 +- frontend/pnpm-lock.yaml | 10 +++ frontend/src/App.vue | 5 ++ frontend/src/components/App/Toast.vue | 71 +++++++++++++++++ frontend/src/composables/use-api.ts | 23 ++++++ frontend/src/composables/use-ids.ts | 31 ++++++++ frontend/src/composables/use-notifier.ts | 57 ++++++++++++++ frontend/src/layouts/default.vue | 13 ++-- frontend/src/pages/home.vue | 59 ++++++++++++++ frontend/src/pages/index.vue | 97 ++++++++++++++++++++---- frontend/src/store/auth.ts | 21 +++++ frontend/typed-router.d.ts | 3 +- frontend/vite.config.ts | 13 +++- 14 files changed, 382 insertions(+), 28 deletions(-) create mode 100644 frontend/src/components/App/Toast.vue create mode 100644 frontend/src/composables/use-api.ts create mode 100755 frontend/src/composables/use-ids.ts create mode 100644 frontend/src/composables/use-notifier.ts create mode 100644 frontend/src/pages/home.vue create mode 100644 frontend/src/store/auth.ts diff --git a/frontend/components.d.ts b/frontend/components.d.ts index 37aaa6c..50c0ff9 100644 --- a/frontend/components.d.ts +++ b/frontend/components.d.ts @@ -8,11 +8,14 @@ export {} declare module '@vue/runtime-core' { export interface GlobalComponents { AppHeader: typeof import('./src/components/AppHeader.vue')['default'] + Icon: typeof import('./src/components/Icon.vue')['default'] 'Icon:bx:bxMoon': typeof import('~icons/bx/bx-moon')['default'] 'Icon:bx:bxsMoon': typeof import('~icons/bx/bxs-moon')['default'] 'IconAkarIcons:githubFill': typeof import('~icons/akar-icons/github-fill')['default'] + Notifier: typeof import('./src/components/App/Notifier.vue')['default'] RouterLink: typeof import('vue-router')['RouterLink'] RouterView: typeof import('vue-router')['RouterView'] TextField: typeof import('./src/components/Form/TextField.vue')['default'] + Toast: typeof import('./src/components/App/Toast.vue')['default'] } } diff --git a/frontend/package.json b/frontend/package.json index 45dd54b..6c8018b 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -23,6 +23,7 @@ "dev": "vite", "build": "vite-ssg build", "serve": "vite preview", + "test:watch": "vitest --watch", "https-preview": "serve dist" }, "dependencies": { @@ -46,6 +47,7 @@ }, "devDependencies": { "@iconify/json": "^2.1.78", + "@iconify/vue": "^3.2.1", "@intlify/vite-plugin-vue-i18n": "^5.0.0", "@vitejs/plugin-vue": "^3.0.0", "@vue/compiler-sfc": "^3.2.37", @@ -66,4 +68,4 @@ "vitest": "^0.18.0", "vue-tsc": "^0.38.5" } -} +} \ No newline at end of file diff --git a/frontend/pnpm-lock.yaml b/frontend/pnpm-lock.yaml index 956b60f..b500c33 100644 --- a/frontend/pnpm-lock.yaml +++ b/frontend/pnpm-lock.yaml @@ -2,6 +2,7 @@ lockfileVersion: 5.4 specifiers: '@iconify/json': ^2.1.78 + '@iconify/vue': ^3.2.1 '@intlify/vite-plugin-vue-i18n': ^5.0.0 '@tailwindcss/aspect-ratio': ^0.4.0 '@tailwindcss/forms': ^0.5.2 @@ -60,6 +61,7 @@ dependencies: devDependencies: '@iconify/json': 2.1.78 + '@iconify/vue': 3.2.1_vue@3.2.37 '@intlify/vite-plugin-vue-i18n': 5.0.0_vite@3.0.0+vue-i18n@9.1.10 '@vitejs/plugin-vue': 3.0.0_vite@3.0.0+vue@3.2.37 '@vue/compiler-sfc': 3.2.37 @@ -1269,6 +1271,14 @@ packages: - supports-color dev: true + /@iconify/vue/3.2.1_vue@3.2.37: + resolution: {integrity: sha512-c4R6ZgFo1JrJ8aPMMgOPgfU7lBswihMGR+yWe/P4ZukC3kTkeT4+lkt9Pc/itVFMkwva/S/7u9YofmYv57fnNQ==} + peerDependencies: + vue: 3.x + dependencies: + vue: 3.2.37 + dev: true + /@intlify/bundle-utils/3.1.0_vue-i18n@9.1.10: resolution: {integrity: sha512-ghlJ0kR2cCQ8D+poKknC0Xx0ncOt3J3os7CcIAqqIWVF7k6AtGoCDnIru+YzlZcvFRNmP9wEZ7jKliojCdAWNg==} engines: {node: '>= 12'} diff --git a/frontend/src/App.vue b/frontend/src/App.vue index 4437b0a..7dc8709 100644 --- a/frontend/src/App.vue +++ b/frontend/src/App.vue @@ -1,3 +1,8 @@ + + diff --git a/frontend/src/components/App/Toast.vue b/frontend/src/components/App/Toast.vue new file mode 100644 index 0000000..5a92ff6 --- /dev/null +++ b/frontend/src/components/App/Toast.vue @@ -0,0 +1,71 @@ + + + + + diff --git a/frontend/src/composables/use-api.ts b/frontend/src/composables/use-api.ts new file mode 100644 index 0000000..806cb88 --- /dev/null +++ b/frontend/src/composables/use-api.ts @@ -0,0 +1,23 @@ +import { PublicApi } from '@/api/public'; +import { UserApi } from '@/api/user'; +import { Requests } from '@/lib/requests'; +import { useAuthStore } from '@/store/auth'; + +async function ApiDebugger(r: Response) { + console.table({ + 'Request Url': r.url, + 'Response Status': r.status, + 'Response Status Text': r.statusText, + }); +} + +export function usePublicApi(): PublicApi { + const requests = new Requests('', '', {}, ApiDebugger); + return new PublicApi(requests); +} + +export function useUserApi(): UserApi { + const authStore = useAuthStore(); + const requests = new Requests('', () => authStore.token, {}, ApiDebugger); + return new UserApi(requests); +} diff --git a/frontend/src/composables/use-ids.ts b/frontend/src/composables/use-ids.ts new file mode 100755 index 0000000..dd1a164 --- /dev/null +++ b/frontend/src/composables/use-ids.ts @@ -0,0 +1,31 @@ +function slugify(text: string) { + return text + .toString() + .toLowerCase() + .replace(/\s+/g, '-') // Replace spaces with - + .replace(/[^\w\-]+/g, '') // Remove all non-word chars + .replace(/\-\-+/g, '-') // Replace multiple - with single - + .replace(/^-+/, '') // Trim - from start of text + .replace(/-+$/, ''); // Trim - from end of text +} + +function idGenerator(): string { + const id = + Math.random().toString(32).substring(2, 6) + + Math.random().toString(36).substring(2, 6); + return slugify(id); +} + +/** + * useFormIds uses the provided label to generate a unique id for the + * form element. If no label is provided the id is generated using a + * random string. + */ +export function useFormIds(label: string): string { + const slug = label ? slugify(label) : idGenerator(); + return `${slug}-${idGenerator()}`; +} + +export function useId(): string { + return idGenerator(); +} diff --git a/frontend/src/composables/use-notifier.ts b/frontend/src/composables/use-notifier.ts new file mode 100644 index 0000000..4158cda --- /dev/null +++ b/frontend/src/composables/use-notifier.ts @@ -0,0 +1,57 @@ +import { useId } from './use-ids'; + +interface Notification { + id: string; + message: string; + type: 'success' | 'error' | 'info'; +} + +const notifications = ref([]); + +function addNotification(notification: Notification) { + notifications.value.unshift(notification); + + if (notifications.value.length > 4) { + notifications.value.pop(); + } else { + setTimeout(() => { + // Remove notification with ID + notifications.value = notifications.value.filter( + n => n.id !== notification.id + ); + }, 5000); + } +} + +export function useNotifications() { + return { + notifications, + dropNotification: (idx: number) => notifications.value.splice(idx, 1), + }; +} + +export function useNotifier() { + return { + success: (message: string) => { + addNotification({ + id: useId(), + message, + type: 'success', + }); + }, + error: (message: string) => { + addNotification({ + id: useId(), + message, + type: 'error', + }); + }, + info: (message: string) => { + addNotification({ + id: useId(), + message, + type: 'info', + }); + }, + }; +} diff --git a/frontend/src/layouts/default.vue b/frontend/src/layouts/default.vue index f71177c..735bf21 100644 --- a/frontend/src/layouts/default.vue +++ b/frontend/src/layouts/default.vue @@ -1,17 +1,14 @@ - + - - - - diff --git a/frontend/src/components/AppHeader.vue b/frontend/src/components/AppHeader.vue deleted file mode 100644 index a561faf..0000000 --- a/frontend/src/components/AppHeader.vue +++ /dev/null @@ -1,80 +0,0 @@ - - - - - diff --git a/frontend/src/components/Form/TextField.vue b/frontend/src/components/Form/TextField.vue deleted file mode 100644 index 2fbd52c..0000000 --- a/frontend/src/components/Form/TextField.vue +++ /dev/null @@ -1,31 +0,0 @@ - - - diff --git a/frontend/src/composables/use-api.ts b/frontend/src/composables/use-api.ts deleted file mode 100644 index 806cb88..0000000 --- a/frontend/src/composables/use-api.ts +++ /dev/null @@ -1,23 +0,0 @@ -import { PublicApi } from '@/api/public'; -import { UserApi } from '@/api/user'; -import { Requests } from '@/lib/requests'; -import { useAuthStore } from '@/store/auth'; - -async function ApiDebugger(r: Response) { - console.table({ - 'Request Url': r.url, - 'Response Status': r.status, - 'Response Status Text': r.statusText, - }); -} - -export function usePublicApi(): PublicApi { - const requests = new Requests('', '', {}, ApiDebugger); - return new PublicApi(requests); -} - -export function useUserApi(): UserApi { - const authStore = useAuthStore(); - const requests = new Requests('', () => authStore.token, {}, ApiDebugger); - return new UserApi(requests); -} diff --git a/frontend/src/env.d.ts b/frontend/src/env.d.ts deleted file mode 100644 index d27eb5a..0000000 --- a/frontend/src/env.d.ts +++ /dev/null @@ -1,8 +0,0 @@ -/// - -declare module '*.vue' { - import { DefineComponent } from 'vue' - // eslint-disable-next-line @typescript-eslint/no-explicit-any, @typescript-eslint/ban-types - const component: DefineComponent<{}, {}, any> - export default component -} diff --git a/frontend/src/layouts/404.vue b/frontend/src/layouts/404.vue deleted file mode 100644 index c96df8a..0000000 --- a/frontend/src/layouts/404.vue +++ /dev/null @@ -1,5 +0,0 @@ - diff --git a/frontend/src/layouts/default.vue b/frontend/src/layouts/default.vue deleted file mode 100644 index 735bf21..0000000 --- a/frontend/src/layouts/default.vue +++ /dev/null @@ -1,15 +0,0 @@ - - diff --git a/frontend/src/main.ts b/frontend/src/main.ts deleted file mode 100644 index 5171ac4..0000000 --- a/frontend/src/main.ts +++ /dev/null @@ -1,19 +0,0 @@ -import App from '@/App.vue'; -import { ViteSSG } from 'vite-ssg'; - -import '@/styles/index.css'; -import { ViteSetupModule } from './types/ViteSetupModule'; -import { extendedRoutes } from '@/router'; - -export const createApp = ViteSSG( - App, - { routes: extendedRoutes }, - async ctx => { - Object.values( - import.meta.glob<{ install: ViteSetupModule }>('./modules/*.ts', { - eager: true, - }) - ).map(i => i.install?.(ctx)); - }, - {} -); diff --git a/frontend/src/modules/i18n.ts b/frontend/src/modules/i18n.ts deleted file mode 100644 index ef4f61a..0000000 --- a/frontend/src/modules/i18n.ts +++ /dev/null @@ -1,29 +0,0 @@ -import { ViteSetupModule } from '@/types/ViteSetupModule'; -import { createI18n } from 'vue-i18n'; - -// Import i18n resources -// https://vitejs.dev/guide/features.html#glob-import - -// Don't need this? Try vitesse-lite: https://github.com/antfu/vitesse-lite -const messages = Object.fromEntries( - Object.entries( - import.meta.glob<{ default: any }>('../../locales/*.{y(a)?ml,json}', { - eager: true, - }) - ).map(([key, value]) => { - const isYamlOrJson = key.endsWith('.yaml') || key.endsWith('.json'); - - return [key.slice(14, isYamlOrJson ? -5 : -4), value.default]; - }) -); - -export const install: ViteSetupModule = ({ app }) => { - const i18n = createI18n({ - legacy: false, - locale: 'en', - messages, - globalInjection: true, - }); - - app.use(i18n); -}; diff --git a/frontend/src/modules/pinia.ts b/frontend/src/modules/pinia.ts deleted file mode 100644 index db34dde..0000000 --- a/frontend/src/modules/pinia.ts +++ /dev/null @@ -1,14 +0,0 @@ -import { ViteSetupModule } from '@/types/ViteSetupModule'; -import { createPinia } from 'pinia'; - -// Setup Pinia -// https://pinia.esm.dev/ -export const install: ViteSetupModule = ({ isClient, initialState, app }) => { - const pinia = createPinia(); - app.use(pinia); - // Refer to - // https://github.com/antfu/vite-ssg/blob/main/README.md#state-serialization - // for other serialization strategies. - if (isClient) pinia.state.value = initialState.pinia || {}; - else initialState.pinia = pinia.state.value; -}; diff --git a/frontend/src/modules/pwa.ts b/frontend/src/modules/pwa.ts deleted file mode 100644 index 6313341..0000000 --- a/frontend/src/modules/pwa.ts +++ /dev/null @@ -1,10 +0,0 @@ -import { ViteSetupModule } from '@/types/ViteSetupModule'; - -export const install: ViteSetupModule = ({ isClient, router }) => { - if (!isClient) return; - - router.isReady().then(async () => { - const { registerSW } = await import('virtual:pwa-register'); - registerSW({ immediate: true }); - }); -}; diff --git a/frontend/src/pages/[...all].vue b/frontend/src/pages/[...all].vue deleted file mode 100644 index 2d70677..0000000 --- a/frontend/src/pages/[...all].vue +++ /dev/null @@ -1,19 +0,0 @@ - - - - - -name : not-found -meta: - layout: 404 - diff --git a/frontend/src/pages/home.vue b/frontend/src/pages/home.vue deleted file mode 100644 index 3d89e8a..0000000 --- a/frontend/src/pages/home.vue +++ /dev/null @@ -1,137 +0,0 @@ - - - - - -name: home - diff --git a/frontend/src/pages/index.vue b/frontend/src/pages/index.vue deleted file mode 100644 index c22447d..0000000 --- a/frontend/src/pages/index.vue +++ /dev/null @@ -1,188 +0,0 @@ - - - - - -name: login - - - diff --git a/frontend/src/router.ts b/frontend/src/router.ts deleted file mode 100644 index 657ff61..0000000 --- a/frontend/src/router.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { - createRouter, - createWebHistory, - createMemoryHistory, -} from '@vue-router'; - -import { setupLayouts } from 'virtual:generated-layouts'; -export let extendedRoutes: any = null; -export const router = createRouter({ - history: import.meta.env.SSR ? createMemoryHistory() : createWebHistory(), - // You don't need to pass the routes anymore, - // the plugin writes it for you 🤖 - extendRoutes: routes => { - extendedRoutes = routes; - return setupLayouts(routes); - }, -}); diff --git a/frontend/src/store/auth.ts b/frontend/src/store/auth.ts deleted file mode 100644 index 092d1fb..0000000 --- a/frontend/src/store/auth.ts +++ /dev/null @@ -1,36 +0,0 @@ -import { UserApi } from '@/api/user'; -import { defineStore } from 'pinia'; - -export const useAuthStore = defineStore('auth', { - state: () => ({ - token: useLocalStorage('pinia/auth/token', ''), - expires: useLocalStorage('pinia/auth/expires', ''), - }), - getters: { - isTokenExpired: state => { - if (!state.expires) { - return true; - } - - if (typeof state.expires === 'string') { - return new Date(state.expires) < new Date(); - } - - return state.expires < new Date(); - }, - }, - actions: { - async logout(api: UserApi) { - const result = await api.logout(); - - if (result.error) { - return result; - } - - this.token = ''; - this.expires = ''; - - return result; - }, - }, -}); diff --git a/frontend/src/store/index.ts b/frontend/src/store/index.ts deleted file mode 100644 index 43ba769..0000000 --- a/frontend/src/store/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import { defineStore } from 'pinia'; - -export const useStore = defineStore('store', { - state: () => ({ - count: 0, - }), -}); diff --git a/frontend/src/styles/index.css b/frontend/src/styles/index.css deleted file mode 100644 index b5c61c9..0000000 --- a/frontend/src/styles/index.css +++ /dev/null @@ -1,3 +0,0 @@ -@tailwind base; -@tailwind components; -@tailwind utilities; diff --git a/frontend/src/types/ViteSetupModule.ts b/frontend/src/types/ViteSetupModule.ts deleted file mode 100644 index db6062a..0000000 --- a/frontend/src/types/ViteSetupModule.ts +++ /dev/null @@ -1,3 +0,0 @@ -import { ViteSSGContext } from 'vite-ssg'; - -export type ViteSetupModule = (ctx: ViteSSGContext) => void; diff --git a/frontend/stores/auth.ts b/frontend/stores/auth.ts new file mode 100644 index 0000000..f2f1b43 --- /dev/null +++ b/frontend/stores/auth.ts @@ -0,0 +1,37 @@ +import { UserApi } from "~~/lib/api/user"; +import { defineStore } from "pinia"; +import { useLocalStorage } from "@vueuse/core"; + +export const useAuthStore = defineStore("auth", { + state: () => ({ + token: useLocalStorage("pinia/auth/token", ""), + expires: useLocalStorage("pinia/auth/expires", ""), + }), + getters: { + isTokenExpired: (state) => { + if (!state.expires) { + return true; + } + + if (typeof state.expires === "string") { + return new Date(state.expires) < new Date(); + } + + return state.expires < new Date(); + }, + }, + actions: { + async logout(api: UserApi) { + const result = await api.logout(); + + if (result.error) { + return result; + } + + this.token = ""; + this.expires = ""; + + return result; + }, + }, +}); diff --git a/frontend/tailwind.config.js b/frontend/tailwind.config.js index 59d9b6e..220a905 100644 --- a/frontend/tailwind.config.js +++ b/frontend/tailwind.config.js @@ -1,16 +1,16 @@ module.exports = { - content: ['./index.html', './src/**/*.{vue,js,ts,jsx,tsx}'], - darkMode: 'class', // or 'media' or 'class' - theme: { - extend: {}, - }, - variants: { - extend: {}, - }, - plugins: [ - require('@tailwindcss/forms'), - require('@tailwindcss/aspect-ratio'), - require('@tailwindcss/typography'), - require('daisyui'), - ], + content: ['./app.vue', './{components,pages,layouts}/**/*.{vue,js,ts,jsx,tsx}'], + darkMode: 'class', // or 'media' or 'class' + theme: { + extend: {}, + }, + variants: { + extend: {}, + }, + plugins: [ + require('@tailwindcss/forms'), + require('@tailwindcss/aspect-ratio'), + require('@tailwindcss/typography'), + require('daisyui'), + ], }; diff --git a/frontend/tsconfig.json b/frontend/tsconfig.json index a4c87e8..a7bfa18 100644 --- a/frontend/tsconfig.json +++ b/frontend/tsconfig.json @@ -1,34 +1,4 @@ { - "compilerOptions": { - "target": "esnext", - "useDefineForClassFields": true, - "module": "esnext", - "moduleResolution": "node", - "strict": true, - "jsx": "preserve", - "sourceMap": true, - "resolveJsonModule": true, - "esModuleInterop": true, - "lib": ["esnext", "dom"], - "baseUrl": ".", - "paths": { - "@/*": ["src/*"] - }, - "types": [ - "vite/client", - "vite-plugin-vue-layouts/client", - "unplugin-icons/types/vue", - "vite-plugin-pwa/client", - "@intlify/vite-plugin-vue-i18n/client" - ] - }, - "include": [ - "src/**/*.ts", - "src/**/*.d.ts", - "src/**/*.tsx", - "src/**/*.vue", - "components.d.ts", - "auto-imports.d.ts", - "typed-router.d.ts" - ] + // https://v3.nuxtjs.org/concepts/typescript + "extends": "./.nuxt/tsconfig.json" } diff --git a/frontend/typed-router.d.ts b/frontend/typed-router.d.ts deleted file mode 100644 index 50c5d5f..0000000 --- a/frontend/typed-router.d.ts +++ /dev/null @@ -1,96 +0,0 @@ - -// Generated by unplugin-vue-router. ‼️ DO NOT MODIFY THIS FILE ‼️ -// It's recommended to commit this file. -// Make sure to add this file to your tsconfig.json file as an "includes" or "files" entry. - -/// - -import type { - // type safe route locations - RouteLocationTypedList, - RouteLocationResolvedTypedList, - RouteLocationNormalizedTypedList, - RouteLocationNormalizedLoadedTypedList, - - // helper types - // route definitions - RouteRecordInfo, - ParamValue, - ParamValueOneOrMore, - ParamValueZeroOrMore, - ParamValueZeroOrOne, - - // vue-router extensions - _RouterTyped, - RouterLinkTyped, - NavigationGuard, - UseLinkFnTyped, -} from 'unplugin-vue-router' - -declare module '@vue-router/routes' { - export interface RouteNamedMap { - 'login': RouteRecordInfo<'login', '/', Record, Record>, - 'not-found': RouteRecordInfo<'not-found', '/:all(.*)', { all: ParamValue }, { all: ParamValue }>, - 'home': RouteRecordInfo<'home', '/home', Record, Record>, - } -} - -declare module '@vue-router' { - import type { RouteNamedMap } from '@vue-router/routes' - - export type RouterTyped = _RouterTyped - - /** - * Type safe version of `RouteLocationNormalized` (the type of `to` and `from` in navigation guards). - * Allows passing the name of the route to be passed as a generic. - */ - export type RouteLocationNormalized = RouteLocationNormalizedTypedList[Name] - - /** - * Type safe version of `RouteLocationNormalizedLoaded` (the return type of `useRoute()`). - * Allows passing the name of the route to be passed as a generic. - */ - export type RouteLocationNormalizedLoaded = RouteLocationNormalizedLoadedTypedList[Name] - - /** - * Type safe version of `RouteLocationResolved` (the returned route of `router.resolve()`). - * Allows passing the name of the route to be passed as a generic. - */ - export type RouteLocationResolved = RouteLocationResolvedTypedList[Name] - - /** - * Type safe version of `RouteLocation` . Allows passing the name of the route to be passed as a generic. - */ - export type RouteLocation = RouteLocationTypedList[Name] - - /** - * Generate a type safe params for a route location. Requires the name of the route to be passed as a generic. - */ - export type RouteParams = RouteNamedMap[Name]['params'] - /** - * Generate a type safe raw params for a route location. Requires the name of the route to be passed as a generic. - */ - export type RouteParamsRaw = RouteNamedMap[Name]['paramsRaw'] - - export function useRouter(): RouterTyped - export function useRoute(name?: Name): RouteLocationNormalizedLoadedTypedList[Name] - - export const useLink: UseLinkFnTyped - - export function onBeforeRouteLeave(guard: NavigationGuard): void - export function onBeforeRouteUpdate(guard: NavigationGuard): void -} - -declare module 'vue-router' { - import type { RouteNamedMap } from '@vue-router/routes' - - export interface TypesConfig { - beforeRouteUpdate: NavigationGuard - beforeRouteLeave: NavigationGuard - - $route: RouteLocationNormalizedLoadedTypedList[keyof RouteNamedMap] - $router: _RouterTyped - - RouterLink: RouterLinkTyped - } -} diff --git a/frontend/vite.config.ts b/frontend/vite.config.ts deleted file mode 100644 index ee6d191..0000000 --- a/frontend/vite.config.ts +++ /dev/null @@ -1,133 +0,0 @@ -import { defineConfig } from 'vite'; -import vue from '@vitejs/plugin-vue'; -import { resolve } from 'path'; -import Components from 'unplugin-vue-components/vite'; -import AutoImport from 'unplugin-auto-import/vite'; -import Icons from 'unplugin-icons/vite'; -import IconsResolver from 'unplugin-icons/resolver'; -import Layouts from 'vite-plugin-vue-layouts'; -import { VitePWA } from 'vite-plugin-pwa'; -import VueI18n from '@intlify/vite-plugin-vue-i18n'; -import generateSitemap from 'vite-ssg-sitemap'; -import VueRouter from 'unplugin-vue-router/vite'; -import { VueRouterExports } from 'unplugin-vue-router'; -// https://vitejs.dev/config/ -export default defineConfig({ - plugins: [ - vue(), - VueRouter({ - dts: true, - routesFolder: 'src/pages', - }), - Components({ - dts: true, - dirs: ['src/components'], - directoryAsNamespace: true, - resolvers: [ - IconsResolver({ - prefix: 'icon', - }), - ], - }), - Icons({ - compiler: 'vue3', - }), - AutoImport({ - dts: true, - // targets to transform - include: [ - /\.[tj]sx?$/, // .ts, .tsx, .js, .jsx - /\.vue\??/, // .vue - ], - dirs: ['./composables'], - - // global imports to register - imports: [ - // presets - 'vue', - { '@vue-router': VueRouterExports }, - 'vue-i18n', - '@vueuse/core', - '@vueuse/head', - // custom - ], - - // custom resolvers - // see https://github.com/antfu/unplugin-auto-import/pull/23/ - resolvers: [], - }), - Layouts(), - VitePWA({ - includeAssets: ['favicon-16x16.png', 'favicon-32x32.png', 'favicon.ico', 'robots.txt', 'apple-touch-icon.png'], - manifest: { - name: 'Vitailse', - short_name: 'Vitailse', - description: 'Opinionated vite template with TailwindCSS', - theme_color: '#076AE0', - icons: [ - { - src: 'pwa-192x192.png', - sizes: '192x192', - type: 'image/png', - }, - { - src: 'pwa-512x512.png', - sizes: '512x512', - type: 'image/png', - }, - { - src: 'pwa-512x512.png', - sizes: '512x512', - type: 'image/png', - purpose: 'any maskable', - }, - ], - }, - }), - VueI18n({ - runtimeOnly: true, - compositionOnly: true, - include: [resolve(__dirname, 'locales/**')], - }), - ], - resolve: { - alias: { - '@': resolve(__dirname, './src'), - }, - }, - server: { - fs: { - strict: true, - }, - proxy: { - '/api': { - target: 'http://localhost:7745', - }, - }, - }, - optimizeDeps: { - include: ['vue', 'vue-router', '@vueuse/core', '@vueuse/head'], - }, - // @ts-ignore - ssgOptions: { - script: 'async', - formatting: 'minify', - format: 'cjs', - onFinished() { - generateSitemap(); - }, - mock: true, - }, - // https://github.com/vitest-dev/vitest - test: { - include: ['src/__test__/**/*.test.ts', 'src/**/*.test.ts', 'src/__test__/**/*.spec.ts'], - environment: 'jsdom', - deps: { - inline: ['@vue', '@vueuse', 'vue-demi'], - }, - }, - ssr: { - // TODO: workaround until they support native ESM - noExternal: ['workbox-window', /vue-i18n/], - }, -}); From f956ec8eb2e4c801f74c542d343425c7df2df87b Mon Sep 17 00:00:00 2001 From: Hayden <64056131+hay-kot@users.noreply.github.com> Date: Thu, 1 Sep 2022 15:11:14 -0800 Subject: [PATCH 023/530] label stubs --- backend/app/api/docs/docs.go | 173 ++++++++++++++++++++++++ backend/app/api/docs/swagger.json | 173 ++++++++++++++++++++++++ backend/app/api/docs/swagger.yaml | 103 ++++++++++++++ backend/app/api/routes.go | 6 + backend/app/api/v1/partials.go | 36 +++++ backend/app/api/v1/v1_ctrl_labels.go | 69 ++++++++++ backend/app/api/v1/v1_ctrl_locations.go | 17 --- backend/internal/types/label_types.go | 7 + 8 files changed, 567 insertions(+), 17 deletions(-) create mode 100644 backend/app/api/v1/partials.go create mode 100644 backend/app/api/v1/v1_ctrl_labels.go create mode 100644 backend/internal/types/label_types.go diff --git a/backend/app/api/docs/docs.go b/backend/app/api/docs/docs.go index 211e15f..5ea7d64 100644 --- a/backend/app/api/docs/docs.go +++ b/backend/app/api/docs/docs.go @@ -261,6 +261,170 @@ const docTemplate = `{ } } }, + "/v1/labels": { + "get": { + "security": [ + { + "Bearer": [] + } + ], + "produces": [ + "application/json" + ], + "tags": [ + "Labels" + ], + "summary": "Get All Labels", + "responses": { + "200": { + "description": "OK", + "schema": { + "allOf": [ + { + "$ref": "#/definitions/server.Results" + }, + { + "type": "object", + "properties": { + "items": { + "type": "array", + "items": { + "$ref": "#/definitions/types.LabelOut" + } + } + } + } + ] + } + } + } + }, + "post": { + "security": [ + { + "Bearer": [] + } + ], + "produces": [ + "application/json" + ], + "tags": [ + "Labels" + ], + "summary": "Create a new label", + "parameters": [ + { + "description": "Label Data", + "name": "payload", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/types.LabelCreate" + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/types.LabelSummary" + } + } + } + } + }, + "/v1/labels/{id}": { + "get": { + "security": [ + { + "Bearer": [] + } + ], + "produces": [ + "application/json" + ], + "tags": [ + "Labels" + ], + "summary": "Gets a label and fields", + "parameters": [ + { + "type": "string", + "description": "Label ID", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/types.LabelOut" + } + } + } + }, + "put": { + "security": [ + { + "Bearer": [] + } + ], + "produces": [ + "application/json" + ], + "tags": [ + "Labels" + ], + "summary": "updates a label", + "parameters": [ + { + "type": "string", + "description": "Label ID", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/types.LabelOut" + } + } + } + }, + "delete": { + "security": [ + { + "Bearer": [] + } + ], + "produces": [ + "application/json" + ], + "tags": [ + "Labels" + ], + "summary": "deletes a label", + "parameters": [ + { + "type": "string", + "description": "Label ID", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "204": { + "description": "" + } + } + } + }, "/v1/locations": { "get": { "security": [ @@ -1108,6 +1272,15 @@ const docTemplate = `{ } } }, + "types.LabelCreate": { + "type": "object" + }, + "types.LabelOut": { + "type": "object" + }, + "types.LabelSummary": { + "type": "object" + }, "types.LocationCreate": { "type": "object", "properties": { diff --git a/backend/app/api/docs/swagger.json b/backend/app/api/docs/swagger.json index b688cb7..5f4ab5a 100644 --- a/backend/app/api/docs/swagger.json +++ b/backend/app/api/docs/swagger.json @@ -253,6 +253,170 @@ } } }, + "/v1/labels": { + "get": { + "security": [ + { + "Bearer": [] + } + ], + "produces": [ + "application/json" + ], + "tags": [ + "Labels" + ], + "summary": "Get All Labels", + "responses": { + "200": { + "description": "OK", + "schema": { + "allOf": [ + { + "$ref": "#/definitions/server.Results" + }, + { + "type": "object", + "properties": { + "items": { + "type": "array", + "items": { + "$ref": "#/definitions/types.LabelOut" + } + } + } + } + ] + } + } + } + }, + "post": { + "security": [ + { + "Bearer": [] + } + ], + "produces": [ + "application/json" + ], + "tags": [ + "Labels" + ], + "summary": "Create a new label", + "parameters": [ + { + "description": "Label Data", + "name": "payload", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/types.LabelCreate" + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/types.LabelSummary" + } + } + } + } + }, + "/v1/labels/{id}": { + "get": { + "security": [ + { + "Bearer": [] + } + ], + "produces": [ + "application/json" + ], + "tags": [ + "Labels" + ], + "summary": "Gets a label and fields", + "parameters": [ + { + "type": "string", + "description": "Label ID", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/types.LabelOut" + } + } + } + }, + "put": { + "security": [ + { + "Bearer": [] + } + ], + "produces": [ + "application/json" + ], + "tags": [ + "Labels" + ], + "summary": "updates a label", + "parameters": [ + { + "type": "string", + "description": "Label ID", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/types.LabelOut" + } + } + } + }, + "delete": { + "security": [ + { + "Bearer": [] + } + ], + "produces": [ + "application/json" + ], + "tags": [ + "Labels" + ], + "summary": "deletes a label", + "parameters": [ + { + "type": "string", + "description": "Label ID", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "204": { + "description": "" + } + } + } + }, "/v1/locations": { "get": { "security": [ @@ -1100,6 +1264,15 @@ } } }, + "types.LabelCreate": { + "type": "object" + }, + "types.LabelOut": { + "type": "object" + }, + "types.LabelSummary": { + "type": "object" + }, "types.LocationCreate": { "type": "object", "properties": { diff --git a/backend/app/api/docs/swagger.yaml b/backend/app/api/docs/swagger.yaml index 62cf296..c13ac90 100644 --- a/backend/app/api/docs/swagger.yaml +++ b/backend/app/api/docs/swagger.yaml @@ -353,6 +353,12 @@ definitions: updatedAt: type: string type: object + types.LabelCreate: + type: object + types.LabelOut: + type: object + types.LabelSummary: + type: object types.LocationCreate: properties: description: @@ -583,6 +589,103 @@ paths: summary: Update a User tags: - 'Admin: Users' + /v1/labels: + get: + produces: + - application/json + responses: + "200": + description: OK + schema: + allOf: + - $ref: '#/definitions/server.Results' + - properties: + items: + items: + $ref: '#/definitions/types.LabelOut' + type: array + type: object + security: + - Bearer: [] + summary: Get All Labels + tags: + - Labels + post: + parameters: + - description: Label Data + in: body + name: payload + required: true + schema: + $ref: '#/definitions/types.LabelCreate' + produces: + - application/json + responses: + "200": + description: OK + schema: + $ref: '#/definitions/types.LabelSummary' + security: + - Bearer: [] + summary: Create a new label + tags: + - Labels + /v1/labels/{id}: + delete: + parameters: + - description: Label ID + in: path + name: id + required: true + type: string + produces: + - application/json + responses: + "204": + description: "" + security: + - Bearer: [] + summary: deletes a label + tags: + - Labels + get: + parameters: + - description: Label ID + in: path + name: id + required: true + type: string + produces: + - application/json + responses: + "200": + description: OK + schema: + $ref: '#/definitions/types.LabelOut' + security: + - Bearer: [] + summary: Gets a label and fields + tags: + - Labels + put: + parameters: + - description: Label ID + in: path + name: id + required: true + type: string + produces: + - application/json + responses: + "200": + description: OK + schema: + $ref: '#/definitions/types.LabelOut' + security: + - Bearer: [] + summary: updates a label + tags: + - Labels /v1/locations: get: produces: diff --git a/backend/app/api/routes.go b/backend/app/api/routes.go index 81efe4d..c50b39a 100644 --- a/backend/app/api/routes.go +++ b/backend/app/api/routes.go @@ -55,6 +55,12 @@ func (a *app) newRouter(repos *repo.AllRepos) *chi.Mux { r.Get(v1Base("/locations/{id}"), v1Handlers.HandleLocationGet()) r.Put(v1Base("/locations/{id}"), v1Handlers.HandleLocationUpdate()) r.Delete(v1Base("/locations/{id}"), v1Handlers.HandleLocationDelete()) + + r.Get(v1Base("/labels"), v1Handlers.HandleLabelsGetAll()) + r.Post(v1Base("/labels"), v1Handlers.HandleLabelsCreate()) + r.Get(v1Base("/labels/{id}"), v1Handlers.HandleLabelGet()) + r.Put(v1Base("/labels/{id}"), v1Handlers.HandleLabelUpdate()) + r.Delete(v1Base("/labels/{id}"), v1Handlers.HandleLabelDelete()) }) r.Group(func(r chi.Router) { diff --git a/backend/app/api/v1/partials.go b/backend/app/api/v1/partials.go new file mode 100644 index 0000000..018c043 --- /dev/null +++ b/backend/app/api/v1/partials.go @@ -0,0 +1,36 @@ +package v1 + +import ( + "net/http" + + "github.com/go-chi/chi/v5" + "github.com/google/uuid" + "github.com/hay-kot/content/backend/internal/services" + "github.com/hay-kot/content/backend/internal/types" + "github.com/hay-kot/content/backend/pkgs/logger" + "github.com/hay-kot/content/backend/pkgs/server" +) + +/* +This is where we put partial snippets/functions for actions that are commonly +used within the controller class. This _hopefully_ helps with code duplication +and makes it a little more consistent when error handling and logging. +*/ + +// partialParseIdAndUser parses the ID from the requests URL and pulls the user +// from the context. If either of these fail, it will return an error. When an error +// occurs it will also write the error to the response. As such, if an error is returned +// from this function you can return immediately without writing to the response. +func (ctrl *V1Controller) partialParseIdAndUser(w http.ResponseWriter, r *http.Request) (uuid.UUID, *types.UserOut, error) { + uid, err := uuid.Parse(chi.URLParam(r, "id")) + if err != nil { + ctrl.log.Debug(err.Error(), logger.Props{ + "details": "failed to convert id to valid UUID", + }) + server.RespondError(w, http.StatusBadRequest, err) + return uuid.Nil, nil, err + } + + user := services.UseUserCtx(r.Context()) + return uid, user, nil +} diff --git a/backend/app/api/v1/v1_ctrl_labels.go b/backend/app/api/v1/v1_ctrl_labels.go new file mode 100644 index 0000000..f31976b --- /dev/null +++ b/backend/app/api/v1/v1_ctrl_labels.go @@ -0,0 +1,69 @@ +package v1 + +import ( + "net/http" +) + +// HandleLabelsGetAll godoc +// @Summary Get All Labels +// @Tags Labels +// @Produce json +// @Success 200 {object} server.Results{items=[]types.LabelOut} +// @Router /v1/labels [GET] +// @Security Bearer +func (ctrl *V1Controller) HandleLabelsGetAll() http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + } +} + +// HandleLabelsCreate godoc +// @Summary Create a new label +// @Tags Labels +// @Produce json +// @Param payload body types.LabelCreate true "Label Data" +// @Success 200 {object} types.LabelSummary +// @Router /v1/labels [POST] +// @Security Bearer +func (ctrl *V1Controller) HandleLabelsCreate() http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + } +} + +// HandleLabelDelete godocs +// @Summary deletes a label +// @Tags Labels +// @Produce json +// @Param id path string true "Label ID" +// @Success 204 +// @Router /v1/labels/{id} [DELETE] +// @Security Bearer +func (ctrl *V1Controller) HandleLabelDelete() http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + } +} + +// HandleLabelGet godocs +// @Summary Gets a label and fields +// @Tags Labels +// @Produce json +// @Param id path string true "Label ID" +// @Success 200 {object} types.LabelOut +// @Router /v1/labels/{id} [GET] +// @Security Bearer +func (ctrl *V1Controller) HandleLabelGet() http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + } +} + +// HandleLabelUpdate godocs +// @Summary updates a label +// @Tags Labels +// @Produce json +// @Param id path string true "Label ID" +// @Success 200 {object} types.LabelOut +// @Router /v1/labels/{id} [PUT] +// @Security Bearer +func (ctrl *V1Controller) HandleLabelUpdate() http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + } +} diff --git a/backend/app/api/v1/v1_ctrl_locations.go b/backend/app/api/v1/v1_ctrl_locations.go index f0b8ef1..14b3775 100644 --- a/backend/app/api/v1/v1_ctrl_locations.go +++ b/backend/app/api/v1/v1_ctrl_locations.go @@ -3,11 +3,8 @@ package v1 import ( "net/http" - "github.com/go-chi/chi/v5" - "github.com/google/uuid" "github.com/hay-kot/content/backend/internal/services" "github.com/hay-kot/content/backend/internal/types" - "github.com/hay-kot/content/backend/pkgs/logger" "github.com/hay-kot/content/backend/pkgs/server" ) @@ -61,20 +58,6 @@ func (ctrl *V1Controller) HandleLocationCreate() http.HandlerFunc { } } -func (ctrl *V1Controller) partialParseIdAndUser(w http.ResponseWriter, r *http.Request) (uuid.UUID, *types.UserOut, error) { - uid, err := uuid.Parse(chi.URLParam(r, "id")) - if err != nil { - ctrl.log.Debug(err.Error(), logger.Props{ - "details": "failed to convert id to valid UUID", - }) - server.RespondError(w, http.StatusBadRequest, err) - return uuid.Nil, nil, err - } - - user := services.UseUserCtx(r.Context()) - return uid, user, nil -} - // HandleLocationDelete godocs // @Summary deletes a location // @Tags Locations diff --git a/backend/internal/types/label_types.go b/backend/internal/types/label_types.go new file mode 100644 index 0000000..029a375 --- /dev/null +++ b/backend/internal/types/label_types.go @@ -0,0 +1,7 @@ +package types + +type LabelOut struct{} + +type LabelCreate struct{} + +type LabelSummary struct{} From 8ece3bd7bff008e0a0d42a17864efafe3ab41606 Mon Sep 17 00:00:00 2001 From: Hayden <64056131+hay-kot@users.noreply.github.com> Date: Thu, 1 Sep 2022 17:52:40 -0800 Subject: [PATCH 024/530] labels create and get --- backend/app/api/docs/docs.go | 63 +++++++++- backend/app/api/docs/swagger.json | 63 +++++++++- backend/app/api/docs/swagger.yaml | 37 ++++++ backend/app/api/v1/v1_ctrl_labels.go | 72 +++++++++++ backend/internal/repo/repo_labels.go | 60 +++++++++ backend/internal/repo/repos_all.go | 2 + backend/internal/services/all.go | 2 + backend/internal/services/mappers/labels.go | 32 +++++ backend/internal/services/service_labels.go | 63 ++++++++++ backend/internal/types/label_types.go | 33 ++++- frontend/components/App/Header.vue | 81 ++++-------- frontend/components/App/Logo.vue | 123 +++++++++++++++++++ frontend/components/Label/Chip.vue | 27 ++++ frontend/components/Label/CreateModal.vue | 66 ++++++++++ frontend/components/Location/CreateModal.vue | 70 +++++++++++ frontend/lib/api/base/base-types.ts | 3 - frontend/lib/api/classes/labels.ts | 32 +++++ frontend/lib/api/classes/locations.ts | 17 +-- frontend/lib/api/classes/types/index.ts | 19 +++ frontend/lib/api/public.ts | 18 +-- frontend/lib/api/user.ts | 14 ++- frontend/pages/home.vue | 57 +++++---- frontend/pages/index.vue | 6 +- frontend/pages/location/[id].vue | 22 ++-- 24 files changed, 850 insertions(+), 132 deletions(-) create mode 100644 backend/internal/repo/repo_labels.go create mode 100644 backend/internal/services/mappers/labels.go create mode 100644 backend/internal/services/service_labels.go create mode 100644 frontend/components/App/Logo.vue create mode 100644 frontend/components/Label/Chip.vue create mode 100644 frontend/components/Label/CreateModal.vue create mode 100644 frontend/components/Location/CreateModal.vue delete mode 100644 frontend/lib/api/base/base-types.ts create mode 100644 frontend/lib/api/classes/labels.ts create mode 100644 frontend/lib/api/classes/types/index.ts diff --git a/backend/app/api/docs/docs.go b/backend/app/api/docs/docs.go index 5ea7d64..c2a9f17 100644 --- a/backend/app/api/docs/docs.go +++ b/backend/app/api/docs/docs.go @@ -1273,13 +1273,70 @@ const docTemplate = `{ } }, "types.LabelCreate": { - "type": "object" + "type": "object", + "properties": { + "color": { + "type": "string" + }, + "description": { + "type": "string" + }, + "name": { + "type": "string" + } + } }, "types.LabelOut": { - "type": "object" + "type": "object", + "properties": { + "createdAt": { + "type": "string" + }, + "description": { + "type": "string" + }, + "groupId": { + "type": "string" + }, + "id": { + "type": "string" + }, + "items": { + "type": "array", + "items": { + "$ref": "#/definitions/types.ItemSummary" + } + }, + "name": { + "type": "string" + }, + "updatedAt": { + "type": "string" + } + } }, "types.LabelSummary": { - "type": "object" + "type": "object", + "properties": { + "createdAt": { + "type": "string" + }, + "description": { + "type": "string" + }, + "groupId": { + "type": "string" + }, + "id": { + "type": "string" + }, + "name": { + "type": "string" + }, + "updatedAt": { + "type": "string" + } + } }, "types.LocationCreate": { "type": "object", diff --git a/backend/app/api/docs/swagger.json b/backend/app/api/docs/swagger.json index 5f4ab5a..dbfe062 100644 --- a/backend/app/api/docs/swagger.json +++ b/backend/app/api/docs/swagger.json @@ -1265,13 +1265,70 @@ } }, "types.LabelCreate": { - "type": "object" + "type": "object", + "properties": { + "color": { + "type": "string" + }, + "description": { + "type": "string" + }, + "name": { + "type": "string" + } + } }, "types.LabelOut": { - "type": "object" + "type": "object", + "properties": { + "createdAt": { + "type": "string" + }, + "description": { + "type": "string" + }, + "groupId": { + "type": "string" + }, + "id": { + "type": "string" + }, + "items": { + "type": "array", + "items": { + "$ref": "#/definitions/types.ItemSummary" + } + }, + "name": { + "type": "string" + }, + "updatedAt": { + "type": "string" + } + } }, "types.LabelSummary": { - "type": "object" + "type": "object", + "properties": { + "createdAt": { + "type": "string" + }, + "description": { + "type": "string" + }, + "groupId": { + "type": "string" + }, + "id": { + "type": "string" + }, + "name": { + "type": "string" + }, + "updatedAt": { + "type": "string" + } + } }, "types.LocationCreate": { "type": "object", diff --git a/backend/app/api/docs/swagger.yaml b/backend/app/api/docs/swagger.yaml index c13ac90..71f2970 100644 --- a/backend/app/api/docs/swagger.yaml +++ b/backend/app/api/docs/swagger.yaml @@ -354,10 +354,47 @@ definitions: type: string type: object types.LabelCreate: + properties: + color: + type: string + description: + type: string + name: + type: string type: object types.LabelOut: + properties: + createdAt: + type: string + description: + type: string + groupId: + type: string + id: + type: string + items: + items: + $ref: '#/definitions/types.ItemSummary' + type: array + name: + type: string + updatedAt: + type: string type: object types.LabelSummary: + properties: + createdAt: + type: string + description: + type: string + groupId: + type: string + id: + type: string + name: + type: string + updatedAt: + type: string type: object types.LocationCreate: properties: diff --git a/backend/app/api/v1/v1_ctrl_labels.go b/backend/app/api/v1/v1_ctrl_labels.go index f31976b..19dfebc 100644 --- a/backend/app/api/v1/v1_ctrl_labels.go +++ b/backend/app/api/v1/v1_ctrl_labels.go @@ -2,6 +2,10 @@ package v1 import ( "net/http" + + "github.com/hay-kot/content/backend/internal/services" + "github.com/hay-kot/content/backend/internal/types" + "github.com/hay-kot/content/backend/pkgs/server" ) // HandleLabelsGetAll godoc @@ -13,6 +17,14 @@ import ( // @Security Bearer func (ctrl *V1Controller) HandleLabelsGetAll() http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { + user := services.UseUserCtx(r.Context()) + labels, err := ctrl.svc.Labels.GetAll(r.Context(), user.GroupID) + if err != nil { + ctrl.log.Error(err, nil) + server.RespondServerError(w) + return + } + server.Respond(w, http.StatusOK, server.Results{Items: labels}) } } @@ -26,6 +38,23 @@ func (ctrl *V1Controller) HandleLabelsGetAll() http.HandlerFunc { // @Security Bearer func (ctrl *V1Controller) HandleLabelsCreate() http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { + createData := types.LabelCreate{} + if err := server.Decode(r, &createData); err != nil { + ctrl.log.Error(err, nil) + server.RespondError(w, http.StatusInternalServerError, err) + return + } + + user := services.UseUserCtx(r.Context()) + label, err := ctrl.svc.Labels.Create(r.Context(), user.GroupID, createData) + if err != nil { + ctrl.log.Error(err, nil) + server.RespondServerError(w) + return + } + + server.Respond(w, http.StatusCreated, label) + } } @@ -39,6 +68,18 @@ func (ctrl *V1Controller) HandleLabelsCreate() http.HandlerFunc { // @Security Bearer func (ctrl *V1Controller) HandleLabelDelete() http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { + uid, user, err := ctrl.partialParseIdAndUser(w, r) + if err != nil { + return + } + + err = ctrl.svc.Labels.Delete(r.Context(), user.GroupID, uid) + if err != nil { + ctrl.log.Error(err, nil) + server.RespondServerError(w) + return + } + server.Respond(w, http.StatusNoContent, nil) } } @@ -52,6 +93,18 @@ func (ctrl *V1Controller) HandleLabelDelete() http.HandlerFunc { // @Security Bearer func (ctrl *V1Controller) HandleLabelGet() http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { + uid, user, err := ctrl.partialParseIdAndUser(w, r) + if err != nil { + return + } + + labels, err := ctrl.svc.Labels.Get(r.Context(), user.GroupID, uid) + if err != nil { + ctrl.log.Error(err, nil) + server.RespondServerError(w) + return + } + server.Respond(w, http.StatusOK, labels) } } @@ -65,5 +118,24 @@ func (ctrl *V1Controller) HandleLabelGet() http.HandlerFunc { // @Security Bearer func (ctrl *V1Controller) HandleLabelUpdate() http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { + body := types.LabelUpdate{} + if err := server.Decode(r, &body); err != nil { + ctrl.log.Error(err, nil) + server.RespondError(w, http.StatusInternalServerError, err) + return + } + uid, user, err := ctrl.partialParseIdAndUser(w, r) + if err != nil { + return + } + + body.ID = uid + result, err := ctrl.svc.Labels.Update(r.Context(), user.GroupID, body) + if err != nil { + ctrl.log.Error(err, nil) + server.RespondServerError(w) + return + } + server.Respond(w, http.StatusOK, result) } } diff --git a/backend/internal/repo/repo_labels.go b/backend/internal/repo/repo_labels.go new file mode 100644 index 0000000..d54c852 --- /dev/null +++ b/backend/internal/repo/repo_labels.go @@ -0,0 +1,60 @@ +package repo + +import ( + "context" + + "github.com/google/uuid" + "github.com/hay-kot/content/backend/ent" + "github.com/hay-kot/content/backend/ent/group" + "github.com/hay-kot/content/backend/ent/label" + "github.com/hay-kot/content/backend/internal/types" +) + +type EntLabelRepository struct { + db *ent.Client +} + +func (r *EntLabelRepository) Get(ctx context.Context, ID uuid.UUID) (*ent.Label, error) { + return r.db.Label.Query(). + Where(label.ID(ID)). + WithGroup(). + WithItems(). + Only(ctx) +} + +func (r *EntLabelRepository) GetAll(ctx context.Context, groupId uuid.UUID) ([]*ent.Label, error) { + return r.db.Label.Query(). + Where(label.HasGroupWith(group.ID(groupId))). + WithGroup(). + All(ctx) +} + +func (r *EntLabelRepository) Create(ctx context.Context, groupdId uuid.UUID, data types.LabelCreate) (*ent.Label, error) { + label, err := r.db.Label.Create(). + SetName(data.Name). + SetDescription(data.Description). + SetColor(data.Color). + SetGroupID(groupdId). + Save(ctx) + + label.Edges.Group = &ent.Group{ID: groupdId} // bootstrap group ID + return label, err +} + +func (r *EntLabelRepository) Update(ctx context.Context, data types.LabelUpdate) (*ent.Label, error) { + _, err := r.db.Label.UpdateOneID(data.ID). + SetName(data.Name). + SetDescription(data.Description). + SetColor(data.Color). + Save(ctx) + + if err != nil { + return nil, err + } + + return r.Get(ctx, data.ID) +} + +func (r *EntLabelRepository) Delete(ctx context.Context, id uuid.UUID) error { + return r.db.Label.DeleteOneID(id).Exec(ctx) +} diff --git a/backend/internal/repo/repos_all.go b/backend/internal/repo/repos_all.go index 7de4e12..9fe144e 100644 --- a/backend/internal/repo/repos_all.go +++ b/backend/internal/repo/repos_all.go @@ -8,6 +8,7 @@ type AllRepos struct { AuthTokens *EntTokenRepository Groups *EntGroupRepository Locations *EntLocationRepository + Labels *EntLabelRepository } func EntAllRepos(db *ent.Client) *AllRepos { @@ -16,5 +17,6 @@ func EntAllRepos(db *ent.Client) *AllRepos { AuthTokens: &EntTokenRepository{db}, Groups: &EntGroupRepository{db}, Locations: &EntLocationRepository{db}, + Labels: &EntLabelRepository{db}, } } diff --git a/backend/internal/services/all.go b/backend/internal/services/all.go index cd4110a..4d26ae5 100644 --- a/backend/internal/services/all.go +++ b/backend/internal/services/all.go @@ -6,6 +6,7 @@ type AllServices struct { User *UserService Admin *AdminService Location *LocationService + Labels *LabelService } func NewServices(repos *repo.AllRepos) *AllServices { @@ -13,5 +14,6 @@ func NewServices(repos *repo.AllRepos) *AllServices { User: &UserService{repos}, Admin: &AdminService{repos}, Location: &LocationService{repos}, + Labels: &LabelService{repos}, } } diff --git a/backend/internal/services/mappers/labels.go b/backend/internal/services/mappers/labels.go new file mode 100644 index 0000000..a04e862 --- /dev/null +++ b/backend/internal/services/mappers/labels.go @@ -0,0 +1,32 @@ +package mappers + +import ( + "github.com/hay-kot/content/backend/ent" + "github.com/hay-kot/content/backend/internal/types" +) + +func ToLabelSummary(label *ent.Label) *types.LabelSummary { + return &types.LabelSummary{ + ID: label.ID, + GroupID: label.Edges.Group.ID, + Name: label.Name, + Description: label.Description, + CreatedAt: label.CreatedAt, + UpdatedAt: label.UpdatedAt, + } +} + +func ToLabelSummaryErr(label *ent.Label, err error) (*types.LabelSummary, error) { + return ToLabelSummary(label), err +} + +func ToLabelOut(label *ent.Label) *types.LabelOut { + return &types.LabelOut{ + LabelSummary: *ToLabelSummary(label), + Items: MapEach(label.Edges.Items, ToItemSummary), + } +} + +func ToLabelOutErr(label *ent.Label, err error) (*types.LabelOut, error) { + return ToLabelOut(label), err +} diff --git a/backend/internal/services/service_labels.go b/backend/internal/services/service_labels.go new file mode 100644 index 0000000..70bfa30 --- /dev/null +++ b/backend/internal/services/service_labels.go @@ -0,0 +1,63 @@ +package services + +import ( + "context" + + "github.com/google/uuid" + "github.com/hay-kot/content/backend/internal/repo" + "github.com/hay-kot/content/backend/internal/services/mappers" + "github.com/hay-kot/content/backend/internal/types" +) + +type LabelService struct { + repos *repo.AllRepos +} + +func (svc *LabelService) Create(ctx context.Context, groupId uuid.UUID, data types.LabelCreate) (*types.LabelSummary, error) { + label, err := svc.repos.Labels.Create(ctx, groupId, data) + return mappers.ToLabelSummaryErr(label, err) +} + +func (svc *LabelService) Update(ctx context.Context, groupId uuid.UUID, data types.LabelUpdate) (*types.LabelSummary, error) { + label, err := svc.repos.Labels.Update(ctx, data) + return mappers.ToLabelSummaryErr(label, err) +} + +func (svc *LabelService) Delete(ctx context.Context, groupId uuid.UUID, id uuid.UUID) error { + label, err := svc.repos.Labels.Get(ctx, id) + if err != nil { + return err + } + if label.Edges.Group.ID != groupId { + return ErrNotOwner + } + return svc.repos.Labels.Delete(ctx, id) +} + +func (svc *LabelService) Get(ctx context.Context, groupId uuid.UUID, id uuid.UUID) (*types.LabelOut, error) { + label, err := svc.repos.Labels.Get(ctx, id) + + if err != nil { + return nil, err + } + + if label.Edges.Group.ID != groupId { + return nil, ErrNotOwner + } + + return mappers.ToLabelOut(label), nil +} + +func (svc *LabelService) GetAll(ctx context.Context, groupId uuid.UUID) ([]*types.LabelSummary, error) { + labels, err := svc.repos.Labels.GetAll(ctx, groupId) + if err != nil { + return nil, err + } + + labelsOut := make([]*types.LabelSummary, len(labels)) + for i, label := range labels { + labelsOut[i] = mappers.ToLabelSummary(label) + } + + return labelsOut, nil +} diff --git a/backend/internal/types/label_types.go b/backend/internal/types/label_types.go index 029a375..301a3fd 100644 --- a/backend/internal/types/label_types.go +++ b/backend/internal/types/label_types.go @@ -1,7 +1,34 @@ package types -type LabelOut struct{} +import ( + "time" -type LabelCreate struct{} + "github.com/google/uuid" +) -type LabelSummary struct{} +type LabelCreate struct { + Name string `json:"name"` + Description string `json:"description"` + Color string `json:"color"` +} + +type LabelUpdate struct { + ID uuid.UUID `json:"id"` + Name string `json:"name"` + Description string `json:"description"` + Color string `json:"color"` +} + +type LabelSummary struct { + ID uuid.UUID `json:"id"` + GroupID uuid.UUID `json:"groupId"` + Name string `json:"name"` + Description string `json:"description"` + CreatedAt time.Time `json:"createdAt"` + UpdatedAt time.Time `json:"updatedAt"` +} + +type LabelOut struct { + LabelSummary + Items []*ItemSummary `json:"items"` +} diff --git a/frontend/components/App/Header.vue b/frontend/components/App/Header.vue index b352e5b..49c67bd 100644 --- a/frontend/components/App/Header.vue +++ b/frontend/components/App/Header.vue @@ -25,79 +25,50 @@ }, ]; + const modals = reactive({ + location: false, + label: false, + item: false, + }); + const dropdown = [ { name: 'Location', action: () => { - modal.value = true; + modals.location = true; }, }, { name: 'Item / Asset', - action: () => {}, + action: () => { + modals.item = true; + }, }, { name: 'Label', - action: () => {}, + action: () => { + modals.label = true; + }, }, ]; - - // ---------------------------- - // Location Stuff - // Should move to own component - const locationLoading = ref(false); - const locationForm = reactive({ - name: '', - description: '', - }); - - const locationNameRef = ref(null); - const triggerFocus = ref(false); - const modal = ref(false); - - whenever( - () => modal.value, - () => { - triggerFocus.value = true; - } - ); - - async function createLocation() { - locationLoading.value = true; - const { data } = await api.locations.create(locationForm); - - if (data) { - navigateTo(`/location/${data.id}`); - } - - locationLoading.value = false; - modal.value = false; - locationForm.name = ''; - locationForm.description = ''; - triggerFocus.value = false; - }