From 29f583e9361b18ac5ae81f7c8d8107b8bb06fb25 Mon Sep 17 00:00:00 2001 From: Hayden <64056131+hay-kot@users.noreply.github.com> Date: Mon, 29 Aug 2022 18:30:36 -0800 Subject: [PATCH] Initial commit --- .github/workflows/go.yaml | 60 + .github/workflows/publish.yaml | 23 + .gitignore | 35 + README.md | 260 ++ Taskfile.yml | 59 + backend/.dockerignore | 24 + backend/Dockerfile | 25 + backend/app/api/app.go | 46 + backend/app/api/base/base_ctrl.go | 48 + backend/app/api/base/base_ctrl_test.go | 35 + backend/app/api/docs/docs.go | 558 +++ backend/app/api/docs/swagger.json | 534 +++ backend/app/api/docs/swagger.yaml | 318 ++ backend/app/api/main.go | 116 + backend/app/api/middleware.go | 117 + backend/app/api/routes.go | 82 + backend/app/api/seed.go | 98 + backend/app/api/v1/controller.go | 29 + backend/app/api/v1/controller_test.go | 20 + backend/app/api/v1/main_test.go | 51 + backend/app/api/v1/v1_ctrl_admin.go | 207 ++ backend/app/api/v1/v1_ctrl_admin_test.go | 109 + backend/app/api/v1/v1_ctrl_auth.go | 136 + backend/app/api/v1/v1_ctrl_user.go | 80 + backend/app/cli/app.go | 9 + backend/app/cli/app_users.go | 105 + backend/app/cli/main.go | 82 + backend/app/cli/reader/reader.go | 65 + backend/app/generator/main.go | 72 + backend/config.template.yml | 31 + backend/docker-compose.yml | 10 + backend/ent/authtokens.go | 165 + backend/ent/authtokens/authtokens.go | 67 + backend/ent/authtokens/where.go | 403 +++ backend/ent/authtokens_create.go | 326 ++ backend/ent/authtokens_delete.go | 111 + backend/ent/authtokens_query.go | 1000 ++++++ backend/ent/authtokens_update.go | 472 +++ backend/ent/client.go | 344 ++ backend/ent/config.go | 60 + backend/ent/context.go | 33 + backend/ent/ent.go | 261 ++ backend/ent/enttest/enttest.go | 78 + backend/ent/generate.go | 3 + backend/ent/hook/hook.go | 217 ++ backend/ent/migrate/migrate.go | 71 + backend/ent/migrate/schema.go | 63 + backend/ent/mutation.go | 1091 ++++++ backend/ent/predicate/predicate.go | 13 + backend/ent/runtime.go | 50 + backend/ent/runtime/runtime.go | 10 + backend/ent/schema/authtokens.go | 43 + backend/ent/schema/user.go | 38 + backend/ent/tx.go | 213 ++ backend/ent/user.go | 157 + backend/ent/user/user.go | 65 + backend/ent/user/where.go | 528 +++ backend/ent/user_create.go | 363 ++ backend/ent/user_delete.go | 111 + backend/ent/user_query.go | 993 ++++++ backend/ent/user_update.go | 592 ++++ backend/go.mod | 49 + backend/go.sum | 154 + backend/internal/config/conf.go | 81 + backend/internal/config/conf_database.go | 27 + backend/internal/config/conf_database_test.go | 36 + backend/internal/config/conf_logger.go | 6 + backend/internal/config/conf_mailer.go | 15 + backend/internal/config/conf_mailer_test.go | 40 + backend/internal/config/conf_seed.go | 13 + backend/internal/mapper/users_automapper.go | 27 + backend/internal/mocks/chimocker/chimocker.go | 30 + backend/internal/mocks/factories/users.go | 16 + backend/internal/mocks/mock_logger.go | 11 + backend/internal/mocks/mocker_services.go | 10 + backend/internal/mocks/mocks_ent_repo.go | 22 + backend/internal/repo/main_test.go | 38 + backend/internal/repo/repos_all.go | 16 + backend/internal/repo/token_ent.go | 74 + backend/internal/repo/token_ent_test.go | 110 + backend/internal/repo/token_interface.go | 20 + backend/internal/repo/users_ent.go | 141 + backend/internal/repo/users_ent_test.go | 148 + backend/internal/repo/users_interface.go | 27 + backend/internal/services/all.go | 15 + backend/internal/services/contexts.go | 40 + backend/internal/services/contexts_test.go | 39 + backend/internal/services/service_admin.go | 47 + backend/internal/services/service_user.go | 84 + backend/internal/types/about_types.go | 11 + backend/internal/types/token_types.go | 39 + backend/internal/types/users_types.go | 58 + backend/internal/types/users_types_test.go | 76 + backend/pkgs/automapper/README.md | 56 + backend/pkgs/automapper/automapper.go | 92 + backend/pkgs/automapper/conf.go | 11 + backend/pkgs/automapper/main.go | 48 + backend/pkgs/automapper/templates.go | 22 + backend/pkgs/faker/random.go | 37 + backend/pkgs/faker/randoms_test.go | 95 + backend/pkgs/hasher/password.go | 13 + backend/pkgs/hasher/password_test.go | 40 + backend/pkgs/hasher/token.go | 30 + backend/pkgs/hasher/token_test.go | 44 + backend/pkgs/logger/struct_logger.go | 121 + backend/pkgs/logger/struct_logger_test.go | 119 + backend/pkgs/mailer/mailer.go | 51 + backend/pkgs/mailer/mailer_test.go | 66 + backend/pkgs/mailer/message.go | 56 + backend/pkgs/mailer/message_test.go | 26 + backend/pkgs/mailer/templates.go | 62 + backend/pkgs/mailer/templates/welcome.html | 444 +++ backend/pkgs/mailer/test-mailer-template.json | 7 + backend/pkgs/server/constants.go | 7 + backend/pkgs/server/request.go | 48 + backend/pkgs/server/request_test.go | 210 ++ backend/pkgs/server/response.go | 61 + backend/pkgs/server/response_error_builder.go | 51 + .../server/response_error_builder_test.go | 107 + backend/pkgs/server/response_test.go | 78 + backend/pkgs/server/result.go | 27 + backend/pkgs/server/server.go | 123 + backend/pkgs/server/server_test.go | 97 + backend/pkgs/server/worker.go | 20 + backend/static/favicon.ico | Bin 0 -> 1366 bytes client/client/index.ts | 5 + client/client/v1client.ts | 93 + client/package-lock.json | 3024 +++++++++++++++++ client/package.json | 17 + client/test/base/base.test.ts | 26 + client/test/config.ts | 4 + client/test/setup.ts | 20 + client/test/v1/login.test.ts | 75 + client/test/vitest.config.ts | 8 + client/tsconfig.json | 17 + 135 files changed, 18463 insertions(+) create mode 100644 .github/workflows/go.yaml create mode 100644 .github/workflows/publish.yaml create mode 100644 .gitignore create mode 100644 README.md create mode 100644 Taskfile.yml create mode 100644 backend/.dockerignore create mode 100644 backend/Dockerfile create mode 100644 backend/app/api/app.go create mode 100644 backend/app/api/base/base_ctrl.go create mode 100644 backend/app/api/base/base_ctrl_test.go create mode 100644 backend/app/api/docs/docs.go create mode 100644 backend/app/api/docs/swagger.json create mode 100644 backend/app/api/docs/swagger.yaml create mode 100644 backend/app/api/main.go create mode 100644 backend/app/api/middleware.go create mode 100644 backend/app/api/routes.go create mode 100644 backend/app/api/seed.go create mode 100644 backend/app/api/v1/controller.go create mode 100644 backend/app/api/v1/controller_test.go create mode 100644 backend/app/api/v1/main_test.go create mode 100644 backend/app/api/v1/v1_ctrl_admin.go create mode 100644 backend/app/api/v1/v1_ctrl_admin_test.go create mode 100644 backend/app/api/v1/v1_ctrl_auth.go create mode 100644 backend/app/api/v1/v1_ctrl_user.go create mode 100644 backend/app/cli/app.go create mode 100644 backend/app/cli/app_users.go create mode 100644 backend/app/cli/main.go create mode 100644 backend/app/cli/reader/reader.go create mode 100644 backend/app/generator/main.go create mode 100644 backend/config.template.yml create mode 100644 backend/docker-compose.yml create mode 100644 backend/ent/authtokens.go create mode 100644 backend/ent/authtokens/authtokens.go create mode 100644 backend/ent/authtokens/where.go create mode 100644 backend/ent/authtokens_create.go create mode 100644 backend/ent/authtokens_delete.go create mode 100644 backend/ent/authtokens_query.go create mode 100644 backend/ent/authtokens_update.go create mode 100644 backend/ent/client.go create mode 100644 backend/ent/config.go create mode 100644 backend/ent/context.go create mode 100644 backend/ent/ent.go create mode 100644 backend/ent/enttest/enttest.go create mode 100644 backend/ent/generate.go create mode 100644 backend/ent/hook/hook.go create mode 100644 backend/ent/migrate/migrate.go create mode 100644 backend/ent/migrate/schema.go create mode 100644 backend/ent/mutation.go create mode 100644 backend/ent/predicate/predicate.go create mode 100644 backend/ent/runtime.go create mode 100644 backend/ent/runtime/runtime.go create mode 100644 backend/ent/schema/authtokens.go create mode 100644 backend/ent/schema/user.go create mode 100644 backend/ent/tx.go create mode 100644 backend/ent/user.go create mode 100644 backend/ent/user/user.go create mode 100644 backend/ent/user/where.go create mode 100644 backend/ent/user_create.go create mode 100644 backend/ent/user_delete.go create mode 100644 backend/ent/user_query.go create mode 100644 backend/ent/user_update.go create mode 100644 backend/go.mod create mode 100644 backend/go.sum create mode 100644 backend/internal/config/conf.go create mode 100644 backend/internal/config/conf_database.go create mode 100644 backend/internal/config/conf_database_test.go create mode 100644 backend/internal/config/conf_logger.go create mode 100644 backend/internal/config/conf_mailer.go create mode 100644 backend/internal/config/conf_mailer_test.go create mode 100644 backend/internal/config/conf_seed.go create mode 100644 backend/internal/mapper/users_automapper.go create mode 100644 backend/internal/mocks/chimocker/chimocker.go create mode 100644 backend/internal/mocks/factories/users.go create mode 100644 backend/internal/mocks/mock_logger.go create mode 100644 backend/internal/mocks/mocker_services.go create mode 100644 backend/internal/mocks/mocks_ent_repo.go create mode 100644 backend/internal/repo/main_test.go create mode 100644 backend/internal/repo/repos_all.go create mode 100644 backend/internal/repo/token_ent.go create mode 100644 backend/internal/repo/token_ent_test.go create mode 100644 backend/internal/repo/token_interface.go create mode 100644 backend/internal/repo/users_ent.go create mode 100644 backend/internal/repo/users_ent_test.go create mode 100644 backend/internal/repo/users_interface.go create mode 100644 backend/internal/services/all.go create mode 100644 backend/internal/services/contexts.go create mode 100644 backend/internal/services/contexts_test.go create mode 100644 backend/internal/services/service_admin.go create mode 100644 backend/internal/services/service_user.go create mode 100644 backend/internal/types/about_types.go create mode 100644 backend/internal/types/token_types.go create mode 100644 backend/internal/types/users_types.go create mode 100644 backend/internal/types/users_types_test.go create mode 100644 backend/pkgs/automapper/README.md create mode 100644 backend/pkgs/automapper/automapper.go create mode 100644 backend/pkgs/automapper/conf.go create mode 100644 backend/pkgs/automapper/main.go create mode 100644 backend/pkgs/automapper/templates.go create mode 100644 backend/pkgs/faker/random.go create mode 100644 backend/pkgs/faker/randoms_test.go create mode 100644 backend/pkgs/hasher/password.go create mode 100644 backend/pkgs/hasher/password_test.go create mode 100644 backend/pkgs/hasher/token.go create mode 100644 backend/pkgs/hasher/token_test.go create mode 100644 backend/pkgs/logger/struct_logger.go create mode 100644 backend/pkgs/logger/struct_logger_test.go create mode 100644 backend/pkgs/mailer/mailer.go create mode 100644 backend/pkgs/mailer/mailer_test.go create mode 100644 backend/pkgs/mailer/message.go create mode 100644 backend/pkgs/mailer/message_test.go create mode 100644 backend/pkgs/mailer/templates.go create mode 100644 backend/pkgs/mailer/templates/welcome.html create mode 100644 backend/pkgs/mailer/test-mailer-template.json create mode 100644 backend/pkgs/server/constants.go create mode 100644 backend/pkgs/server/request.go create mode 100644 backend/pkgs/server/request_test.go create mode 100644 backend/pkgs/server/response.go create mode 100644 backend/pkgs/server/response_error_builder.go create mode 100644 backend/pkgs/server/response_error_builder_test.go create mode 100644 backend/pkgs/server/response_test.go create mode 100644 backend/pkgs/server/result.go create mode 100644 backend/pkgs/server/server.go create mode 100644 backend/pkgs/server/server_test.go create mode 100644 backend/pkgs/server/worker.go create mode 100644 backend/static/favicon.ico create mode 100644 client/client/index.ts create mode 100644 client/client/v1client.ts create mode 100644 client/package-lock.json create mode 100644 client/package.json create mode 100644 client/test/base/base.test.ts create mode 100644 client/test/config.ts create mode 100644 client/test/setup.ts create mode 100644 client/test/v1/login.test.ts create mode 100644 client/test/vitest.config.ts create mode 100644 client/tsconfig.json diff --git a/.github/workflows/go.yaml b/.github/workflows/go.yaml new file mode 100644 index 0000000..5674ede --- /dev/null +++ b/.github/workflows/go.yaml @@ -0,0 +1,60 @@ +name: Go Build/Test + +on: + push: + branches: [main] + paths: + - "**.go" + - "client/**/*.ts" + pull_request: + branches: [main] + paths: + - "**.go" + - "client/**/*.ts" + +jobs: + go-test: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + + - name: Set up Go + uses: actions/setup-go@v2 + with: + go-version: 1.18 + + - name: Install Task + uses: arduino/setup-task@v1 + + - name: Build API + run: task api:build + + - name: Build CLI + run: task cli:build + - name: Test + run: task api:coverage + + - name: Upload coverage to Codecov + run: cd backend && bash <(curl -s https://codecov.io/bash) + end-to-end: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + + - name: Install Task + uses: arduino/setup-task@v1 + + - name: Set up Go + uses: actions/setup-go@v2 + with: + go-version: 1.18 + + - name: Build API + run: task api:build + + - name: Setup Node + working-directory: ./client + run: npm install + + - name: Test + run: task client:test diff --git a/.github/workflows/publish.yaml b/.github/workflows/publish.yaml new file mode 100644 index 0000000..d1f25e9 --- /dev/null +++ b/.github/workflows/publish.yaml @@ -0,0 +1,23 @@ +name: Build Docker Image + +on: + push: + branches: [main] + +jobs: + publish: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Set up Go + uses: actions/setup-go@v2 + with: + go-version: 1.18 + - name: login to container registry + run: docker login ghcr.io --username hay-kot --password $CR_PAT + env: + CR_PAT: ${{ secrets.CR_PAT }} + - name: Build Docker Image + run: cd backend && docker build -t ghcr.io/hay-kot/go-web-template:latest . + - name: push to container registry + run: docker push ghcr.io/hay-kot/go-web-template:latest diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..5f78192 --- /dev/null +++ b/.gitignore @@ -0,0 +1,35 @@ +# Project Specific +api.log +config.yml +ent.db +.idea +.vscode + +.DS_Store +test-mailer.json +node_modules + + +# If you prefer the allow list template instead of the deny list, see community template: +# https://github.com/github/gitignore/blob/main/community/Golang/Go.AllowList.gitignore +# +# Binaries for programs and plugins +*.exe +*.exe~ +*.dll +*.so +*.dylib + +# Test binary, built with `go test -c` +*.test + +# Output of the go coverage tool, specifically when used with LiteIDE +*.out + +# Dependency directories (remove the comment below to include it) +# vendor/ + +# Go workspace file +go.work +.task/ +backend/.env diff --git a/README.md b/README.md new file mode 100644 index 0000000..9042c20 --- /dev/null +++ b/README.md @@ -0,0 +1,260 @@ +

Go Web Template

+

+ + + + + + +

+ +This Go Web Template is a simple starter template for a Go web application. It includes a web server API, as well as a starter CLI to manage the web server/database inside the container. It should be noted that while while use of the standard library is a high priority, this template does make use of multiple external packages. It does however abide by the standard http handler pattern. + +- [Template Features](#template-features) + - [General](#general) + - [Mailer](#mailer) + - [Admin / Superuser Management](#admin--superuser-management) + - [Admin](#admin) + - [Self Service](#self-service) + - [Logging](#logging) + - [App Router](#app-router) + - [Web Server](#web-server) + - [Database](#database) + - [Application Configuration](#application-configuration) +- [Management CLI](#management-cli) + - [Docker Setup](#docker-setup) +- [Makefile](#makefile) +- [How To Use: Application API](#how-to-use-application-api) + - [Package Structure (Backend)](#package-structure-backend) + - [app](#app) + - [internal](#internal) + - [pkgs](#pkgs) + - [ent](#ent) + - [Configuring The API](#configuring-the-api) +- [How To Use: Application CLI](#how-to-use-application-cli) + - [Manage Users](#manage-users) + - [List Users](#list-users) + - [Create User](#create-user) + - [Delete User](#delete-user) + +## Template Features + +### General + +- [ ] Test Coverage (WIP) +- [ ] End to End Testing Framework + - [x] Build with TS for ready to go frontend client + - [x] Github CI for end to end testing + - [ ] Basic route tests for end to end testing + - [x] User Auth + - [ ] Admin User Services + - [x] Base API Route +- [x] Basic Backend CI/CD Workflow + - [x] Lint + - [x] Test w/ Coverage + - [x] Build CLI and API +- [ ] Frontend Client + - [ ] Autogenerated types + - [ ] All API Routes (w/ Auth) + +### Mailer + +- [ ] Mailer builder for easy email sending +- [x] Starter email templates + - [x] Activate Account + - [ ] Password Reset +- [ ] Bulk Messages + +### Admin / Superuser Management + +#### Admin + +- [ ] CRUD Operations for Users + +#### Self Service + +- [ ] User sign-up +- [ ] Require Activation by Email +- [ ] Stateful Token Auth +- [ ] Login/Logout +- [ ] Password Reset by Email + +### Logging + +- [x] Logging +- [x] File Logging + STDOUT +- [x] Request Logging (sugar in development structured in prod) +- [x] Dependency Free +- [x] Basic Structured Logging + +### App Router + +- [x] Built on Chi Router +- [x] Basic Middleware Stack + - [x] Logging/Structured Logging + - [x] RealIP + - [x] RequestID + - [x] Strip Trailing Slash + - [x] Panic Recovery + - [x] Timeout + - [x] User Auth + - [ ] Admin Auth +- [x] Auto log registered routes for easy debugging + +### Web Server + +- [x] Router agnostic +- [x] Background Tasks +- [ ] Limited Worker Pool +- [x] Graceful shutdown + - [x] Finish HTTP requests with timeout + - [x] Finish background tasks (no timeout) +- [x] Response Helpers + - [x] Error response builder + - [x] Utility responses + - [x] Wrapper class for uniform responses + +### Database + +- [x] [Ent for Database](https://entgo.io/) + +### Application Configuration + +- [x] Yaml/CLI/ENV Configuration + +
+ CLI Args + +``` +Usage: api [options] [arguments] + +OPTIONS + --mode/$API_MODE (default: development) + --web-port/$API_WEB_PORT (default: 3000) + --web-host/$API_WEB_HOST (default: 127.0.0.1) + --database-driver/$API_DATABASE_DRIVER (default: sqlite3) + --database-sqlite-url/$API_DATABASE_SQLITE_URL (default: file:ent?mode=memory&cache=shared&_fk=1) + --database-postgres-url/$API_DATABASE_POSTGRES_URL + --log-level/$API_LOG_LEVEL (default: debug) + --log-file/$API_LOG_FILE + --mailer-host/$API_MAILER_HOST + --mailer-port/$API_MAILER_PORT + --mailer-username/$API_MAILER_USERNAME + --mailer-password/$API_MAILER_PASSWORD + --mailer-from/$API_MAILER_FROM + --seed-enabled/$API_SEED_ENABLED (default: false) + --seed-users/$API_SEED_USERS ,[value...] + --help/-h + display this help message +``` + +
+ +
+ YAML Config + +```yaml +# config.yml +--- +mode: development +web: + port: 3915 + host: 127.0.0.1 +database: + driver: sqlite3 + sqlite-url: ./ent.db?_fk=1 +logger: + level: debug + file: api.log +mailer: + host: smtp.example.com + port: 465 + username: + password: + from: example@email.com +``` + +
+ +## Management CLI + +- [ ] CLI Interface (Partial) + +### Docker Setup + +- [x] Build and Run API +- [x] Build and Setup CLI in path + +## Makefile + +- **Build and Run API:** `make api` +- **Build Production Image** `make prod` +- **Build CLI** `make cli` +- **Test** `make test` +- **Coverage** `make coverage` + +## How To Use: Application API + +### Package Structure (Backend) + +#### app + +The App folder contains the main modules packages/applications that utilize the other packages. These are the applications that are compiled and shipped with the docker-image. + +#### internal + +Internal packages are used to provide the core functionality of the application that need to be shared across Applications _but_ are still tightly coupled to other packages or applications. These can often be bridges from the pkgs folder to the app folder to provide a common interface. + +#### pkgs + +The packages directory contains packages that are considered drop-in and are not tightly coupled to the application. These packages should provide a simple and easily describable feature. For example. The `hasher` package provides a Password Hashing function and checker and can easily be used in this application or any other. + +A good rule to follow is, if you can copy the code from one package to a completely. different project with no-modifications, it belongs here. + +#### ent + +As an exception to the above, this project adhears to the convention set by `Ent` we use a `ent` folder to contain the database schema. If you'd like to replace the Ent package with an alternative, you can review the repository layer in the `internal` folder. + +[Checkout the Entgo.io Getting Started Page](https://entgo.io/docs/getting-started) + +### Configuring The API + +See the [Application Configuration](#application-configuration) section for more information. + +## How To Use: Application CLI + +### Manage Users + +#### List Users + +```bash +go run ./app/cli/*.go users list +``` + +#### Create User + +**Development** + +```bash +go run ./app/cli/*.go users add --name=hay-kot --password=password --email=hay-kot@pm.me --is-super +``` + +**Docker** + +```bash +manage users add --name=hay-kot --password=password --email=hay-kot@pm.me +``` + +#### Delete User + +**Development** + +```bash +go run ./app/cli/*.go users delete --id=2 +``` + +**Docker** + +```bash +manage users delete --id=2 +``` diff --git a/Taskfile.yml b/Taskfile.yml new file mode 100644 index 0000000..ce64fff --- /dev/null +++ b/Taskfile.yml @@ -0,0 +1,59 @@ +version: "3" + +tasks: + cli: + cmds: + - cd backend && go run ./app/cli/ {{.CLI_ARGS}} + silent: false + + cli:build: + cmds: + - cd backend && go build ./app/cli/ + silent: false + + api: + cmds: + - cd backend/app/api/ && swag fmt + - cd backend/app/api/ && swag init --dir=./,../../internal,../../pkgs + # - | + # npx swagger-typescript-api \ + # --path ./backend/app/api/docs/swagger.json \ + # --output ./client/auto-client \ + # --module-name-first-tag \ + # --modular + - cd backend && go run ./app/api/ {{.CLI_ARGS}} + silent: false + sources: + - ./backend/**/*.go + + api:build: + cmds: + - cd backend && go build ./app/api/ + silent: true + + api:test: + cmds: + - cd backend && go test ./app/api/ + silent: true + + api:coverage: + cmds: + - cd backend && go test -race -coverprofile=coverage.out -covermode=atomic ./app/... ./internal/... ./pkgs/... -v -cover + silent: true + + client:test: + cmds: + - cd backend && go run ./app/api/ & + - sleep 5 + - cd client && npm run test:ci + silent: true + + docker:build: + cmds: + - cd backend && docker-compose up --build + silent: true + + generate:types: + cmds: + - cd backend && go run ./app/generator + silent: true diff --git a/backend/.dockerignore b/backend/.dockerignore new file mode 100644 index 0000000..720e7a0 --- /dev/null +++ b/backend/.dockerignore @@ -0,0 +1,24 @@ +**/.classpath +**/.dockerignore +**/.env +**/.git +**/.gitignore +**/.project +**/.settings +**/.toolstarget +**/.vs +**/.vscode +**/*.*proj.user +**/*.dbmdl +**/*.jfm +**/bin +**/charts +**/docker-compose* +**/compose* +**/Dockerfile* +**/node_modules +**/npm-debug.log +**/obj +**/secrets.dev.yaml +**/values.dev.yaml +README.md diff --git a/backend/Dockerfile b/backend/Dockerfile new file mode 100644 index 0000000..3602d98 --- /dev/null +++ b/backend/Dockerfile @@ -0,0 +1,25 @@ +# Build API +FROM golang:alpine AS builder +RUN apk add --no-cache git build-base +WORKDIR /go/src/app +COPY . . +RUN go get -d -v ./... +RUN go build -o /go/bin/api -v ./app/api/*.go +RUN go build -o /go/bin/manage -v ./app/cli/*.go + + +# Production Stage +FROM alpine:latest + +RUN apk --no-cache add ca-certificates +COPY ./config.template.yml /app/config.yml +COPY --from=builder /go/bin/api /app +COPY --from=builder /go/bin/manage /bin + +RUN chmod +x /app/api +RUN chmod +x /bin/manage + +LABEL Name=gowebtemplate Version=0.0.1 +EXPOSE 7745 +WORKDIR /app +CMD [ "./api" ] diff --git a/backend/app/api/app.go b/backend/app/api/app.go new file mode 100644 index 0000000..5a062b0 --- /dev/null +++ b/backend/app/api/app.go @@ -0,0 +1,46 @@ +package main + +import ( + "time" + + "github.com/hay-kot/git-web-template/backend/ent" + "github.com/hay-kot/git-web-template/backend/internal/config" + "github.com/hay-kot/git-web-template/backend/internal/repo" + "github.com/hay-kot/git-web-template/backend/internal/services" + "github.com/hay-kot/git-web-template/backend/pkgs/logger" + "github.com/hay-kot/git-web-template/backend/pkgs/mailer" + "github.com/hay-kot/git-web-template/backend/pkgs/server" +) + +type app struct { + conf *config.Config + logger *logger.Logger + mailer mailer.Mailer + db *ent.Client + server *server.Server + repos *repo.AllRepos + services *services.AllServices +} + +func NewApp(conf *config.Config) *app { + s := &app{ + conf: conf, + } + + s.mailer = mailer.Mailer{ + Host: s.conf.Mailer.Host, + Port: s.conf.Mailer.Port, + Username: s.conf.Mailer.Username, + Password: s.conf.Mailer.Password, + From: s.conf.Mailer.From, + } + + return s +} + +func (a *app) StartReoccurringTasks(t time.Duration, fn func()) { + for { + a.server.Background(fn) + time.Sleep(t) + } +} diff --git a/backend/app/api/base/base_ctrl.go b/backend/app/api/base/base_ctrl.go new file mode 100644 index 0000000..7649b8e --- /dev/null +++ b/backend/app/api/base/base_ctrl.go @@ -0,0 +1,48 @@ +package base + +import ( + "net/http" + + "github.com/hay-kot/git-web-template/backend/internal/types" + "github.com/hay-kot/git-web-template/backend/pkgs/logger" + "github.com/hay-kot/git-web-template/backend/pkgs/server" +) + +type ReadyFunc func() bool + +type BaseController struct { + log *logger.Logger + svr *server.Server +} + +func NewBaseController(log *logger.Logger, svr *server.Server) *BaseController { + h := &BaseController{ + log: log, + svr: svr, + } + return h +} + +// HandleBase godoc +// @Summary Retrieves the basic information about the API +// @Tags Base +// @Produce json +// @Success 200 {object} server.Result{item=types.ApiSummary} +// @Router /status [GET] +func (ctrl *BaseController) HandleBase(ready ReadyFunc, versions ...string) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + data := types.ApiSummary{ + Healthy: ready(), + Versions: versions, + Title: "Go API Template", + Message: "Welcome to the Go API Template Application!", + } + + err := server.Respond(w, http.StatusOK, server.Wrap(data)) + + if err != nil { + ctrl.log.Error(err, nil) + server.RespondInternalServerError(w) + } + } +} diff --git a/backend/app/api/base/base_ctrl_test.go b/backend/app/api/base/base_ctrl_test.go new file mode 100644 index 0000000..972f55e --- /dev/null +++ b/backend/app/api/base/base_ctrl_test.go @@ -0,0 +1,35 @@ +package base + +import ( + "net/http" + "net/http/httptest" + "testing" + + "github.com/hay-kot/git-web-template/backend/internal/mocks" +) + +func GetTestHandler(t *testing.T) *BaseController { + return NewBaseController(mocks.GetStructLogger(), nil) +} + +func TestHandlersv1_HandleBase(t *testing.T) { + // Setup + hdlrFunc := GetTestHandler(t).HandleBase(func() bool { return true }, "v1") + + // Call Handler Func + rr := httptest.NewRecorder() + hdlrFunc(rr, nil) + + // Validate Status Code + if rr.Code != http.StatusOK { + t.Errorf("Expected status code to be %d, got %d", http.StatusOK, rr.Code) + } + + // Validate Json Payload + expected := `{"item":{"health":true,"versions":["v1"],"title":"Go API Template","message":"Welcome to the Go API Template Application!"}}` + + if rr.Body.String() != expected { + t.Errorf("Expected json to be %s, got %s", expected, rr.Body.String()) + } + +} diff --git a/backend/app/api/docs/docs.go b/backend/app/api/docs/docs.go new file mode 100644 index 0000000..9a371a6 --- /dev/null +++ b/backend/app/api/docs/docs.go @@ -0,0 +1,558 @@ +// Package docs GENERATED BY SWAG; DO NOT EDIT +// This file was generated by swaggo/swag +package docs + +import "github.com/swaggo/swag" + +const docTemplate = `{ + "schemes": {{ marshal .Schemes }}, + "swagger": "2.0", + "info": { + "description": "{{escape .Description}}", + "title": "{{.Title}}", + "contact": { + "name": "Don't" + }, + "license": { + "name": "MIT" + }, + "version": "{{.Version}}" + }, + "host": "{{.Host}}", + "basePath": "{{.BasePath}}", + "paths": { + "/status": { + "get": { + "produces": [ + "application/json" + ], + "tags": [ + "Base" + ], + "summary": "Retrieves the basic information about the API", + "responses": { + "200": { + "description": "OK", + "schema": { + "allOf": [ + { + "$ref": "#/definitions/server.Result" + }, + { + "type": "object", + "properties": { + "item": { + "$ref": "#/definitions/types.ApiSummary" + } + } + } + ] + } + } + } + } + }, + "/v1/admin/users": { + "get": { + "security": [ + { + "Bearer": [] + } + ], + "produces": [ + "application/json" + ], + "tags": [ + "Admin: Users" + ], + "summary": "Gets all users from the database", + "responses": { + "200": { + "description": "OK", + "schema": { + "allOf": [ + { + "$ref": "#/definitions/server.Result" + }, + { + "type": "object", + "properties": { + "item": { + "type": "array", + "items": { + "$ref": "#/definitions/types.UserOut" + } + } + } + } + ] + } + } + } + }, + "post": { + "security": [ + { + "Bearer": [] + } + ], + "produces": [ + "application/json" + ], + "tags": [ + "Admin: Users" + ], + "summary": "Create a new user", + "parameters": [ + { + "description": "User Data", + "name": "payload", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/types.UserCreate" + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "allOf": [ + { + "$ref": "#/definitions/server.Result" + }, + { + "type": "object", + "properties": { + "item": { + "$ref": "#/definitions/types.UserOut" + } + } + } + ] + } + } + } + } + }, + "/v1/admin/users/{id}": { + "get": { + "security": [ + { + "Bearer": [] + } + ], + "produces": [ + "application/json" + ], + "tags": [ + "Admin: Users" + ], + "summary": "Get a user from the database", + "parameters": [ + { + "type": "string", + "description": "User ID", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "allOf": [ + { + "$ref": "#/definitions/server.Result" + }, + { + "type": "object", + "properties": { + "item": { + "$ref": "#/definitions/types.UserOut" + } + } + } + ] + } + } + } + }, + "put": { + "security": [ + { + "Bearer": [] + } + ], + "produces": [ + "application/json" + ], + "tags": [ + "Admin: Users" + ], + "summary": "Update a User", + "parameters": [ + { + "type": "string", + "description": "User ID", + "name": "id", + "in": "path", + "required": true + }, + { + "description": "User Data", + "name": "payload", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/types.UserUpdate" + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "allOf": [ + { + "$ref": "#/definitions/server.Result" + }, + { + "type": "object", + "properties": { + "item": { + "$ref": "#/definitions/types.UserOut" + } + } + } + ] + } + } + } + }, + "delete": { + "security": [ + { + "Bearer": [] + } + ], + "produces": [ + "application/json" + ], + "tags": [ + "Admin: Users" + ], + "summary": "Delete a User", + "parameters": [ + { + "type": "string", + "description": "User ID", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "204": { + "description": "" + } + } + } + }, + "/v1/users/login": { + "post": { + "consumes": [ + "application/x-www-form-urlencoded", + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Authentication" + ], + "summary": "User Login", + "parameters": [ + { + "type": "string", + "example": "admin@admin.com", + "description": "string", + "name": "username", + "in": "formData" + }, + { + "type": "string", + "example": "admin", + "description": "string", + "name": "password", + "in": "formData" + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/types.TokenResponse" + } + } + } + } + }, + "/v1/users/logout": { + "post": { + "security": [ + { + "Bearer": [] + } + ], + "tags": [ + "Authentication" + ], + "summary": "User Logout", + "responses": { + "204": { + "description": "" + } + } + } + }, + "/v1/users/refresh": { + "get": { + "security": [ + { + "Bearer": [] + } + ], + "description": "handleAuthRefresh returns a handler that will issue a new token from an existing token.\nThis does not validate that the user still exists within the database.", + "tags": [ + "Authentication" + ], + "summary": "User Token Refresh", + "responses": { + "200": { + "description": "" + } + } + } + }, + "/v1/users/self": { + "get": { + "security": [ + { + "Bearer": [] + } + ], + "produces": [ + "application/json" + ], + "tags": [ + "User" + ], + "summary": "Get the current user", + "responses": { + "200": { + "description": "OK", + "schema": { + "allOf": [ + { + "$ref": "#/definitions/server.Result" + }, + { + "type": "object", + "properties": { + "item": { + "$ref": "#/definitions/types.UserOut" + } + } + } + ] + } + } + } + }, + "put": { + "security": [ + { + "Bearer": [] + } + ], + "produces": [ + "application/json" + ], + "tags": [ + "User" + ], + "summary": "Update the current user", + "parameters": [ + { + "description": "User Data", + "name": "payload", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/types.UserUpdate" + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "allOf": [ + { + "$ref": "#/definitions/server.Result" + }, + { + "type": "object", + "properties": { + "item": { + "$ref": "#/definitions/types.UserUpdate" + } + } + } + ] + } + } + } + } + }, + "/v1/users/self/password": { + "put": { + "security": [ + { + "Bearer": [] + } + ], + "produces": [ + "application/json" + ], + "tags": [ + "User" + ], + "summary": "Update the current user's password // TODO:", + "responses": { + "204": { + "description": "" + } + } + } + } + }, + "definitions": { + "server.Result": { + "type": "object", + "properties": { + "details": {}, + "error": { + "type": "boolean" + }, + "item": {}, + "message": { + "type": "string" + } + } + }, + "types.ApiSummary": { + "type": "object", + "properties": { + "health": { + "type": "boolean" + }, + "message": { + "type": "string" + }, + "title": { + "type": "string" + }, + "versions": { + "type": "array", + "items": { + "type": "string" + } + } + } + }, + "types.TokenResponse": { + "type": "object", + "properties": { + "expiresAt": { + "type": "string" + }, + "token": { + "type": "string" + } + } + }, + "types.UserCreate": { + "type": "object", + "properties": { + "email": { + "type": "string" + }, + "isSuperuser": { + "type": "boolean" + }, + "name": { + "type": "string" + }, + "password": { + "type": "string" + } + } + }, + "types.UserOut": { + "type": "object", + "properties": { + "email": { + "type": "string" + }, + "id": { + "type": "string" + }, + "isSuperuser": { + "type": "boolean" + }, + "name": { + "type": "string" + } + } + }, + "types.UserUpdate": { + "type": "object", + "properties": { + "email": { + "type": "string" + }, + "name": { + "type": "string" + } + } + } + }, + "securityDefinitions": { + "Bearer": { + "description": "\"Type 'Bearer TOKEN' to correctly set the API Key\"", + "type": "apiKey", + "name": "Authorization", + "in": "header" + } + } +}` + +// SwaggerInfo holds exported Swagger Info so clients can modify it +var SwaggerInfo = &swag.Spec{ + Version: "1.0", + Host: "", + BasePath: "/api", + Schemes: []string{}, + Title: "Go API Templates", + Description: "This is a simple Rest API Server Template that implements some basic User and Authentication patterns to help you get started and bootstrap your next project!.", + InfoInstanceName: "swagger", + SwaggerTemplate: docTemplate, +} + +func init() { + swag.Register(SwaggerInfo.InstanceName(), SwaggerInfo) +} diff --git a/backend/app/api/docs/swagger.json b/backend/app/api/docs/swagger.json new file mode 100644 index 0000000..51932ec --- /dev/null +++ b/backend/app/api/docs/swagger.json @@ -0,0 +1,534 @@ +{ + "swagger": "2.0", + "info": { + "description": "This is a simple Rest API Server Template that implements some basic User and Authentication patterns to help you get started and bootstrap your next project!.", + "title": "Go API Templates", + "contact": { + "name": "Don't" + }, + "license": { + "name": "MIT" + }, + "version": "1.0" + }, + "basePath": "/api", + "paths": { + "/status": { + "get": { + "produces": [ + "application/json" + ], + "tags": [ + "Base" + ], + "summary": "Retrieves the basic information about the API", + "responses": { + "200": { + "description": "OK", + "schema": { + "allOf": [ + { + "$ref": "#/definitions/server.Result" + }, + { + "type": "object", + "properties": { + "item": { + "$ref": "#/definitions/types.ApiSummary" + } + } + } + ] + } + } + } + } + }, + "/v1/admin/users": { + "get": { + "security": [ + { + "Bearer": [] + } + ], + "produces": [ + "application/json" + ], + "tags": [ + "Admin: Users" + ], + "summary": "Gets all users from the database", + "responses": { + "200": { + "description": "OK", + "schema": { + "allOf": [ + { + "$ref": "#/definitions/server.Result" + }, + { + "type": "object", + "properties": { + "item": { + "type": "array", + "items": { + "$ref": "#/definitions/types.UserOut" + } + } + } + } + ] + } + } + } + }, + "post": { + "security": [ + { + "Bearer": [] + } + ], + "produces": [ + "application/json" + ], + "tags": [ + "Admin: Users" + ], + "summary": "Create a new user", + "parameters": [ + { + "description": "User Data", + "name": "payload", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/types.UserCreate" + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "allOf": [ + { + "$ref": "#/definitions/server.Result" + }, + { + "type": "object", + "properties": { + "item": { + "$ref": "#/definitions/types.UserOut" + } + } + } + ] + } + } + } + } + }, + "/v1/admin/users/{id}": { + "get": { + "security": [ + { + "Bearer": [] + } + ], + "produces": [ + "application/json" + ], + "tags": [ + "Admin: Users" + ], + "summary": "Get a user from the database", + "parameters": [ + { + "type": "string", + "description": "User ID", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "allOf": [ + { + "$ref": "#/definitions/server.Result" + }, + { + "type": "object", + "properties": { + "item": { + "$ref": "#/definitions/types.UserOut" + } + } + } + ] + } + } + } + }, + "put": { + "security": [ + { + "Bearer": [] + } + ], + "produces": [ + "application/json" + ], + "tags": [ + "Admin: Users" + ], + "summary": "Update a User", + "parameters": [ + { + "type": "string", + "description": "User ID", + "name": "id", + "in": "path", + "required": true + }, + { + "description": "User Data", + "name": "payload", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/types.UserUpdate" + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "allOf": [ + { + "$ref": "#/definitions/server.Result" + }, + { + "type": "object", + "properties": { + "item": { + "$ref": "#/definitions/types.UserOut" + } + } + } + ] + } + } + } + }, + "delete": { + "security": [ + { + "Bearer": [] + } + ], + "produces": [ + "application/json" + ], + "tags": [ + "Admin: Users" + ], + "summary": "Delete a User", + "parameters": [ + { + "type": "string", + "description": "User ID", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "204": { + "description": "" + } + } + } + }, + "/v1/users/login": { + "post": { + "consumes": [ + "application/x-www-form-urlencoded", + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Authentication" + ], + "summary": "User Login", + "parameters": [ + { + "type": "string", + "example": "admin@admin.com", + "description": "string", + "name": "username", + "in": "formData" + }, + { + "type": "string", + "example": "admin", + "description": "string", + "name": "password", + "in": "formData" + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/types.TokenResponse" + } + } + } + } + }, + "/v1/users/logout": { + "post": { + "security": [ + { + "Bearer": [] + } + ], + "tags": [ + "Authentication" + ], + "summary": "User Logout", + "responses": { + "204": { + "description": "" + } + } + } + }, + "/v1/users/refresh": { + "get": { + "security": [ + { + "Bearer": [] + } + ], + "description": "handleAuthRefresh returns a handler that will issue a new token from an existing token.\nThis does not validate that the user still exists within the database.", + "tags": [ + "Authentication" + ], + "summary": "User Token Refresh", + "responses": { + "200": { + "description": "" + } + } + } + }, + "/v1/users/self": { + "get": { + "security": [ + { + "Bearer": [] + } + ], + "produces": [ + "application/json" + ], + "tags": [ + "User" + ], + "summary": "Get the current user", + "responses": { + "200": { + "description": "OK", + "schema": { + "allOf": [ + { + "$ref": "#/definitions/server.Result" + }, + { + "type": "object", + "properties": { + "item": { + "$ref": "#/definitions/types.UserOut" + } + } + } + ] + } + } + } + }, + "put": { + "security": [ + { + "Bearer": [] + } + ], + "produces": [ + "application/json" + ], + "tags": [ + "User" + ], + "summary": "Update the current user", + "parameters": [ + { + "description": "User Data", + "name": "payload", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/types.UserUpdate" + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "allOf": [ + { + "$ref": "#/definitions/server.Result" + }, + { + "type": "object", + "properties": { + "item": { + "$ref": "#/definitions/types.UserUpdate" + } + } + } + ] + } + } + } + } + }, + "/v1/users/self/password": { + "put": { + "security": [ + { + "Bearer": [] + } + ], + "produces": [ + "application/json" + ], + "tags": [ + "User" + ], + "summary": "Update the current user's password // TODO:", + "responses": { + "204": { + "description": "" + } + } + } + } + }, + "definitions": { + "server.Result": { + "type": "object", + "properties": { + "details": {}, + "error": { + "type": "boolean" + }, + "item": {}, + "message": { + "type": "string" + } + } + }, + "types.ApiSummary": { + "type": "object", + "properties": { + "health": { + "type": "boolean" + }, + "message": { + "type": "string" + }, + "title": { + "type": "string" + }, + "versions": { + "type": "array", + "items": { + "type": "string" + } + } + } + }, + "types.TokenResponse": { + "type": "object", + "properties": { + "expiresAt": { + "type": "string" + }, + "token": { + "type": "string" + } + } + }, + "types.UserCreate": { + "type": "object", + "properties": { + "email": { + "type": "string" + }, + "isSuperuser": { + "type": "boolean" + }, + "name": { + "type": "string" + }, + "password": { + "type": "string" + } + } + }, + "types.UserOut": { + "type": "object", + "properties": { + "email": { + "type": "string" + }, + "id": { + "type": "string" + }, + "isSuperuser": { + "type": "boolean" + }, + "name": { + "type": "string" + } + } + }, + "types.UserUpdate": { + "type": "object", + "properties": { + "email": { + "type": "string" + }, + "name": { + "type": "string" + } + } + } + }, + "securityDefinitions": { + "Bearer": { + "description": "\"Type 'Bearer TOKEN' to correctly set the API Key\"", + "type": "apiKey", + "name": "Authorization", + "in": "header" + } + } +} \ No newline at end of file diff --git a/backend/app/api/docs/swagger.yaml b/backend/app/api/docs/swagger.yaml new file mode 100644 index 0000000..b268aa0 --- /dev/null +++ b/backend/app/api/docs/swagger.yaml @@ -0,0 +1,318 @@ +basePath: /api +definitions: + server.Result: + properties: + details: {} + error: + type: boolean + item: {} + message: + type: string + type: object + types.ApiSummary: + properties: + health: + type: boolean + message: + type: string + title: + type: string + versions: + items: + type: string + type: array + type: object + types.TokenResponse: + properties: + expiresAt: + type: string + token: + type: string + type: object + types.UserCreate: + properties: + email: + type: string + isSuperuser: + type: boolean + name: + type: string + password: + type: string + type: object + types.UserOut: + properties: + email: + type: string + id: + type: string + isSuperuser: + type: boolean + name: + type: string + type: object + types.UserUpdate: + properties: + email: + type: string + name: + type: string + type: object +info: + contact: + name: Don't + description: This is a simple Rest API Server Template that implements some basic + User and Authentication patterns to help you get started and bootstrap your next + project!. + license: + name: MIT + title: Go API Templates + version: "1.0" +paths: + /status: + get: + produces: + - application/json + responses: + "200": + description: OK + schema: + allOf: + - $ref: '#/definitions/server.Result' + - properties: + item: + $ref: '#/definitions/types.ApiSummary' + type: object + summary: Retrieves the basic information about the API + tags: + - Base + /v1/admin/users: + get: + produces: + - application/json + responses: + "200": + description: OK + schema: + allOf: + - $ref: '#/definitions/server.Result' + - properties: + item: + items: + $ref: '#/definitions/types.UserOut' + type: array + type: object + security: + - Bearer: [] + summary: Gets all users from the database + tags: + - 'Admin: Users' + post: + parameters: + - description: User Data + in: body + name: payload + required: true + schema: + $ref: '#/definitions/types.UserCreate' + produces: + - application/json + responses: + "200": + description: OK + schema: + allOf: + - $ref: '#/definitions/server.Result' + - properties: + item: + $ref: '#/definitions/types.UserOut' + type: object + security: + - Bearer: [] + summary: Create a new user + tags: + - 'Admin: Users' + /v1/admin/users/{id}: + delete: + parameters: + - description: User ID + in: path + name: id + required: true + type: string + produces: + - application/json + responses: + "204": + description: "" + security: + - Bearer: [] + summary: Delete a User + tags: + - 'Admin: Users' + get: + parameters: + - description: User ID + in: path + name: id + required: true + type: string + produces: + - application/json + responses: + "200": + description: OK + schema: + allOf: + - $ref: '#/definitions/server.Result' + - properties: + item: + $ref: '#/definitions/types.UserOut' + type: object + security: + - Bearer: [] + summary: Get a user from the database + tags: + - 'Admin: Users' + put: + parameters: + - description: User ID + in: path + name: id + required: true + type: string + - description: User Data + in: body + name: payload + required: true + schema: + $ref: '#/definitions/types.UserUpdate' + produces: + - application/json + responses: + "200": + description: OK + schema: + allOf: + - $ref: '#/definitions/server.Result' + - properties: + item: + $ref: '#/definitions/types.UserOut' + type: object + security: + - Bearer: [] + summary: Update a User + tags: + - 'Admin: Users' + /v1/users/login: + post: + consumes: + - application/x-www-form-urlencoded + - application/json + parameters: + - description: string + example: admin@admin.com + in: formData + name: username + type: string + - description: string + example: admin + in: formData + name: password + type: string + produces: + - application/json + responses: + "200": + description: OK + schema: + $ref: '#/definitions/types.TokenResponse' + summary: User Login + tags: + - Authentication + /v1/users/logout: + post: + responses: + "204": + description: "" + security: + - Bearer: [] + summary: User Logout + tags: + - Authentication + /v1/users/refresh: + get: + description: |- + handleAuthRefresh returns a handler that will issue a new token from an existing token. + This does not validate that the user still exists within the database. + responses: + "200": + description: "" + security: + - Bearer: [] + summary: User Token Refresh + tags: + - Authentication + /v1/users/self: + get: + produces: + - application/json + responses: + "200": + description: OK + schema: + allOf: + - $ref: '#/definitions/server.Result' + - properties: + item: + $ref: '#/definitions/types.UserOut' + type: object + security: + - Bearer: [] + summary: Get the current user + tags: + - User + put: + parameters: + - description: User Data + in: body + name: payload + required: true + schema: + $ref: '#/definitions/types.UserUpdate' + produces: + - application/json + responses: + "200": + description: OK + schema: + allOf: + - $ref: '#/definitions/server.Result' + - properties: + item: + $ref: '#/definitions/types.UserUpdate' + type: object + security: + - Bearer: [] + summary: Update the current user + tags: + - User + /v1/users/self/password: + put: + produces: + - application/json + responses: + "204": + description: "" + security: + - Bearer: [] + summary: 'Update the current user''s password // TODO:' + tags: + - User +securityDefinitions: + Bearer: + description: '"Type ''Bearer TOKEN'' to correctly set the API Key"' + in: header + name: Authorization + type: apiKey +swagger: "2.0" diff --git a/backend/app/api/main.go b/backend/app/api/main.go new file mode 100644 index 0000000..12570bd --- /dev/null +++ b/backend/app/api/main.go @@ -0,0 +1,116 @@ +package main + +import ( + "context" + "io" + "log" + "os" + "time" + + "github.com/hay-kot/git-web-template/backend/app/api/docs" + "github.com/hay-kot/git-web-template/backend/ent" + "github.com/hay-kot/git-web-template/backend/internal/config" + "github.com/hay-kot/git-web-template/backend/internal/repo" + "github.com/hay-kot/git-web-template/backend/internal/services" + "github.com/hay-kot/git-web-template/backend/pkgs/logger" + "github.com/hay-kot/git-web-template/backend/pkgs/server" + _ "github.com/mattn/go-sqlite3" +) + +// @title Go API Templates +// @version 1.0 +// @description This is a simple Rest API Server Template that implements some basic User and Authentication patterns to help you get started and bootstrap your next project!. +// @contact.name Don't +// @license.name MIT +// @BasePath /api +// @securityDefinitions.apikey Bearer +// @in header +// @name Authorization +// @description "Type 'Bearer TOKEN' to correctly set the API Key" +func main() { + cfgFile := "config.yml" + + cfg, err := config.NewConfig(cfgFile) + if err != nil { + panic(err) + } + + docs.SwaggerInfo.Host = cfg.Swagger.Host + + if err := run(cfg); err != nil { + panic(err) + } +} + +func run(cfg *config.Config) error { + app := NewApp(cfg) + + // ========================================================================= + // Setup Logger + + var wrt io.Writer + wrt = os.Stdout + if app.conf.Log.File != "" { + f, err := os.OpenFile(app.conf.Log.File, os.O_RDWR|os.O_CREATE|os.O_APPEND, 0666) + if err != nil { + log.Fatalf("error opening file: %v", err) + } + defer func(f *os.File) { + _ = f.Close() + }(f) + wrt = io.MultiWriter(wrt, f) + } + + app.logger = logger.New(wrt, logger.LevelDebug) + + // ========================================================================= + // Initialize Database & Repos + + c, err := ent.Open(cfg.Database.GetDriver(), cfg.Database.GetUrl()) + if err != nil { + app.logger.Fatal(err, logger.Props{ + "details": "failed to connect to database", + "database": cfg.Database.GetDriver(), + "url": cfg.Database.GetUrl(), + }) + } + defer func(c *ent.Client) { + _ = c.Close() + }(c) + if err := c.Schema.Create(context.Background()); err != nil { + app.logger.Fatal(err, logger.Props{ + "details": "failed to create schema", + }) + } + + app.db = c + app.repos = repo.EntAllRepos(c) + app.services = services.NewServices(app.repos) + + // ========================================================================= + // Start Server + + app.conf.Print() + + app.server = server.NewServer(app.conf.Web.Host, app.conf.Web.Port) + + routes := app.newRouter(app.repos) + app.LogRoutes(routes) + + app.EnsureAdministrator() + app.SeedDatabase(app.repos) + + app.logger.Info("Starting HTTP Server", logger.Props{ + "host": app.server.Host, + "port": app.server.Port, + }) + + // ========================================================================= + // Start Reoccurring Tasks + + go app.StartReoccurringTasks(time.Duration(24)*time.Hour, func() { + app.repos.AuthTokens.PurgeExpiredTokens(context.Background()) + }) + + return app.server.Start(routes) +} diff --git a/backend/app/api/middleware.go b/backend/app/api/middleware.go new file mode 100644 index 0000000..1a0d813 --- /dev/null +++ b/backend/app/api/middleware.go @@ -0,0 +1,117 @@ +package main + +import ( + "fmt" + "net/http" + "strings" + "time" + + "github.com/go-chi/chi/v5" + "github.com/go-chi/chi/v5/middleware" + "github.com/hay-kot/git-web-template/backend/internal/config" + "github.com/hay-kot/git-web-template/backend/internal/services" + "github.com/hay-kot/git-web-template/backend/pkgs/hasher" + "github.com/hay-kot/git-web-template/backend/pkgs/logger" + "github.com/hay-kot/git-web-template/backend/pkgs/server" +) + +func (a *app) setGlobalMiddleware(r *chi.Mux) { + // ========================================================================= + // Middleware + r.Use(middleware.RequestID) + r.Use(middleware.RealIP) + r.Use(mwStripTrailingSlash) + + // Use struct logger in production for requests, but use + // pretty console logger in development. + if a.conf.Mode == config.ModeDevelopment { + r.Use(middleware.Logger) + } else { + r.Use(a.mwStructLogger) + } + r.Use(middleware.Recoverer) + + // Set a timeout value on the request context (ctx), that will signal + // through ctx.Done() that the request has timed out and further + // processing should be stopped. + r.Use(middleware.Timeout(60 * time.Second)) +} + +// mwAuthToken is a middleware that will check the database for a stateful token +// and attach it to the request context with the user, or return a 401 if it doesn't exist. +func (a *app) mwAuthToken(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + requestToken := r.Header.Get("Authorization") + + if requestToken == "" { + server.RespondUnauthorized(w) + return + } + + requestToken = strings.TrimPrefix(requestToken, "Bearer ") + + hash := hasher.HashToken(requestToken) + + // Check the database for the token + usr, err := a.repos.AuthTokens.GetUserFromToken(r.Context(), hash) + + if err != nil { + a.logger.Error(err, logger.Props{ + "token": requestToken, + "hash": fmt.Sprintf("%x", hash), + }) + server.RespondUnauthorized(w) + return + } + + r = r.WithContext(services.SetUserCtx(r.Context(), &usr, requestToken)) + + next.ServeHTTP(w, r) + }) +} + +// mwAdminOnly is a middleware that extends the mwAuthToken middleware to only allow +// requests from superusers. +func (a *app) mwAdminOnly(next http.Handler) http.Handler { + + mw := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + usr := services.UseUserCtx(r.Context()) + + if !usr.IsSuperuser { + server.RespondUnauthorized(w) + return + } + + next.ServeHTTP(w, r) + }) + + return a.mwAuthToken(mw) +} + +// mqStripTrailingSlash is a middleware that will strip trailing slashes from the request path. +func mwStripTrailingSlash(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + r.URL.Path = strings.TrimSuffix(r.URL.Path, "/") + next.ServeHTTP(w, r) + }) +} + +func (a *app) mwStructLogger(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + scheme := "http" + if r.TLS != nil { + scheme = "https" + } + + url := fmt.Sprintf("%s://%s%s %s", scheme, r.Host, r.RequestURI, r.Proto) + + a.logger.Info(fmt.Sprintf("[%s] %s", r.Method, url), logger.Props{ + "id": middleware.GetReqID(r.Context()), + "method": r.Method, + "url": url, + "remote": r.RemoteAddr, + }) + + next.ServeHTTP(w, r) + }) +} diff --git a/backend/app/api/routes.go b/backend/app/api/routes.go new file mode 100644 index 0000000..e8887a5 --- /dev/null +++ b/backend/app/api/routes.go @@ -0,0 +1,82 @@ +package main + +import ( + "fmt" + "net/http" + + "github.com/go-chi/chi/v5" + "github.com/hay-kot/git-web-template/backend/app/api/base" + _ "github.com/hay-kot/git-web-template/backend/app/api/docs" + v1 "github.com/hay-kot/git-web-template/backend/app/api/v1" + "github.com/hay-kot/git-web-template/backend/internal/repo" + httpSwagger "github.com/swaggo/http-swagger" // http-swagger middleware +) + +const prefix = "/api" + +// registerRoutes registers all the routes for the API +func (a *app) newRouter(repos *repo.AllRepos) *chi.Mux { + r := chi.NewRouter() + a.setGlobalMiddleware(r) + + // ========================================================================= + // Base Routes + + r.Get("/swagger/*", httpSwagger.Handler( + httpSwagger.URL(fmt.Sprintf("%s://%s/swagger/doc.json", a.conf.Swagger.Scheme, a.conf.Swagger.Host)), + )) + + // Server Favicon + r.Get("/favicon.ico", func(w http.ResponseWriter, r *http.Request) { + http.ServeFile(w, r, "static/favicon.ico") + }) + + baseHandler := base.NewBaseController(a.logger, a.server) + r.Get(prefix+"/status", baseHandler.HandleBase(func() bool { return true }, "v1")) + + // ========================================================================= + // API Version 1 + v1Base := v1.BaseUrlFunc(prefix) + v1Handlers := v1.NewControllerV1(a.logger, a.services) + r.Post(v1Base("/users/login"), v1Handlers.HandleAuthLogin()) + r.Group(func(r chi.Router) { + r.Use(a.mwAuthToken) + r.Get(v1Base("/users/self"), v1Handlers.HandleUserSelf()) + r.Put(v1Base("/users/self"), v1Handlers.HandleUserUpdate()) + r.Put(v1Base("/users/self/password"), v1Handlers.HandleUserUpdatePassword()) + r.Post(v1Base("/users/logout"), v1Handlers.HandleAuthLogout()) + r.Get(v1Base("/users/refresh"), v1Handlers.HandleAuthRefresh()) + }) + + r.Group(func(r chi.Router) { + r.Use(a.mwAdminOnly) + r.Get(v1Base("/admin/users"), v1Handlers.HandleAdminUserGetAll()) + r.Post(v1Base("/admin/users"), v1Handlers.HandleAdminUserCreate()) + r.Get(v1Base("/admin/users/{id}"), v1Handlers.HandleAdminUserGet()) + r.Put(v1Base("/admin/users/{id}"), v1Handlers.HandleAdminUserUpdate()) + r.Delete(v1Base("/admin/users/{id}"), v1Handlers.HandleAdminUserDelete()) + }) + + return r +} + +// LogRoutes logs the routes of the server that are registered within Server.registerRoutes(). This is useful for debugging. +// See https://github.com/go-chi/chi/issues/332 for details and inspiration. +func (a *app) LogRoutes(r *chi.Mux) { + desiredSpaces := 10 + + walkFunc := func(method string, route string, handler http.Handler, middlewares ...func(http.Handler) http.Handler) error { + text := "[" + method + "]" + + for len(text) < desiredSpaces { + text = text + " " + } + + fmt.Printf("Registered Route: %s%s\n", text, route) + return nil + } + + if err := chi.Walk(r, walkFunc); err != nil { + fmt.Printf("Logging err: %s\n", err.Error()) + } +} diff --git a/backend/app/api/seed.go b/backend/app/api/seed.go new file mode 100644 index 0000000..3fbea74 --- /dev/null +++ b/backend/app/api/seed.go @@ -0,0 +1,98 @@ +package main + +import ( + "context" + + "github.com/google/uuid" + "github.com/hay-kot/git-web-template/backend/internal/repo" + "github.com/hay-kot/git-web-template/backend/internal/types" + "github.com/hay-kot/git-web-template/backend/pkgs/hasher" + "github.com/hay-kot/git-web-template/backend/pkgs/logger" +) + +const ( + DefaultName = "Admin" + DefaultEmail = "admin@admin.com" + DefaultPassword = "admin" +) + +// EnsureAdministrator ensures that there is at least one superuser in the database +// if one isn't found a default is generate using the default credentials +func (a *app) EnsureAdministrator() { + superusers, err := a.repos.Users.GetSuperusers(context.Background()) + + if err != nil { + a.logger.Error(err, nil) + } + + if len(superusers) > 0 { + return + } + + pw, _ := hasher.HashPassword(DefaultPassword) + + newSuperUser := types.UserCreate{ + Name: DefaultName, + Email: DefaultEmail, + IsSuperuser: true, + Password: pw, + } + + a.logger.Info("creating default superuser", logger.Props{ + "name": newSuperUser.Name, + "email": newSuperUser.Email, + }) + + _, err = a.repos.Users.Create(context.Background(), newSuperUser) + + if err != nil { + a.logger.Fatal(err, nil) + } + +} + +func (a *app) SeedDatabase(repos *repo.AllRepos) { + if !a.conf.Seed.Enabled { + return + } + + for _, user := range a.conf.Seed.Users { + + // Check if User Exists + usr, _ := repos.Users.GetOneEmail(context.Background(), user.Email) + + if usr.ID != uuid.Nil { + a.logger.Info("seed user already exists", logger.Props{ + "user": user.Name, + }) + continue + } + + hashedPw, err := hasher.HashPassword(user.Password) + + if err != nil { + a.logger.Error(err, logger.Props{ + "details": "failed to hash password", + "user": user.Name, + }) + } + + _, err = repos.Users.Create(context.Background(), types.UserCreate{ + Name: user.Name, + Email: user.Email, + IsSuperuser: user.IsSuperuser, + Password: hashedPw, + }) + + if err != nil { + a.logger.Error(err, logger.Props{ + "details": "failed to create seed user", + "name": user.Name, + }) + } + + a.logger.Info("creating seed user", logger.Props{ + "name": user.Name, + }) + } +} diff --git a/backend/app/api/v1/controller.go b/backend/app/api/v1/controller.go new file mode 100644 index 0000000..2d13045 --- /dev/null +++ b/backend/app/api/v1/controller.go @@ -0,0 +1,29 @@ +package v1 + +import ( + "github.com/hay-kot/git-web-template/backend/internal/services" + "github.com/hay-kot/git-web-template/backend/pkgs/logger" +) + +type V1Controller struct { + log *logger.Logger + svc *services.AllServices +} + +func BaseUrlFunc(prefix string) func(s string) string { + v1Base := prefix + "/v1" + prefixFunc := func(s string) string { + return v1Base + s + } + + return prefixFunc +} + +func NewControllerV1(log *logger.Logger, svc *services.AllServices) *V1Controller { + ctrl := &V1Controller{ + log: log, + svc: svc, + } + + return ctrl +} diff --git a/backend/app/api/v1/controller_test.go b/backend/app/api/v1/controller_test.go new file mode 100644 index 0000000..685d8c6 --- /dev/null +++ b/backend/app/api/v1/controller_test.go @@ -0,0 +1,20 @@ +package v1 + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_NewHandlerV1(t *testing.T) { + + v1Base := BaseUrlFunc("/testing/v1") + ctrl := NewControllerV1(mockHandler.log, mockHandler.svc) + + assert.NotNil(t, ctrl) + + assert.Equal(t, ctrl.log, mockHandler.log) + + assert.Equal(t, "/testing/v1/v1/abc123", v1Base("/abc123")) + assert.Equal(t, "/testing/v1/v1/abc123", v1Base("/abc123")) +} diff --git a/backend/app/api/v1/main_test.go b/backend/app/api/v1/main_test.go new file mode 100644 index 0000000..c9a1276 --- /dev/null +++ b/backend/app/api/v1/main_test.go @@ -0,0 +1,51 @@ +package v1 + +import ( + "context" + "testing" + + "github.com/hay-kot/git-web-template/backend/internal/mocks" + "github.com/hay-kot/git-web-template/backend/internal/mocks/factories" + "github.com/hay-kot/git-web-template/backend/internal/types" +) + +var mockHandler = &V1Controller{} +var users = []types.UserOut{} + +func userPool() func() { + create := []types.UserCreate{ + factories.UserFactory(), + factories.UserFactory(), + factories.UserFactory(), + factories.UserFactory(), + } + + userOut := []types.UserOut{} + + for _, user := range create { + usrOut, _ := mockHandler.svc.Admin.Create(context.Background(), user) + userOut = append(userOut, usrOut) + } + + users = userOut + + purge := func() { + mockHandler.svc.Admin.DeleteAll(context.Background()) + } + + return purge +} + +func TestMain(m *testing.M) { + // Set Handler Vars + mockHandler.log = mocks.GetStructLogger() + repos, closeDb := mocks.GetEntRepos() + mockHandler.svc = mocks.GetMockServices(repos) + + defer closeDb() + + purge := userPool() + defer purge() + + m.Run() +} diff --git a/backend/app/api/v1/v1_ctrl_admin.go b/backend/app/api/v1/v1_ctrl_admin.go new file mode 100644 index 0000000..81afd43 --- /dev/null +++ b/backend/app/api/v1/v1_ctrl_admin.go @@ -0,0 +1,207 @@ +package v1 + +import ( + "errors" + "net/http" + + "github.com/go-chi/chi/v5" + "github.com/google/uuid" + "github.com/hay-kot/git-web-template/backend/internal/services" + "github.com/hay-kot/git-web-template/backend/internal/types" + "github.com/hay-kot/git-web-template/backend/pkgs/hasher" + "github.com/hay-kot/git-web-template/backend/pkgs/logger" + "github.com/hay-kot/git-web-template/backend/pkgs/server" +) + +// HandleAdminUserGetAll godoc +// @Summary Gets all users from the database +// @Tags Admin: Users +// @Produce json +// @Success 200 {object} server.Result{item=[]types.UserOut} +// @Router /v1/admin/users [get] +// @Security Bearer +func (ctrl *V1Controller) HandleAdminUserGetAll() http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + users, err := ctrl.svc.Admin.GetAll(r.Context()) + + if err != nil { + server.RespondError(w, http.StatusInternalServerError, err) + return + } + + server.Respond(w, http.StatusOK, server.Wrap(users)) + } +} + +// HandleAdminUserGet godoc +// @Summary Get a user from the database +// @Tags Admin: Users +// @Produce json +// @Param id path string true "User ID" +// @Success 200 {object} server.Result{item=types.UserOut} +// @Router /v1/admin/users/{id} [get] +// @Security Bearer +func (ctrl *V1Controller) HandleAdminUserGet() http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + uid, err := uuid.Parse(chi.URLParam(r, "id")) + + if err != nil { + ctrl.log.Debug(err.Error(), logger.Props{ + "scope": "admin", + "details": "failed to convert id to valid UUID", + }) + server.RespondError(w, http.StatusBadRequest, err) + return + } + + user, err := ctrl.svc.Admin.GetByID(r.Context(), uid) + + if err != nil { + ctrl.log.Error(err, nil) + server.RespondError(w, http.StatusInternalServerError, err) + return + } + server.Respond(w, http.StatusOK, server.Wrap(user)) + + } +} + +// HandleAdminUserCreate godoc +// @Summary Create a new user +// @Tags Admin: Users +// @Produce json +// @Param payload body types.UserCreate true "User Data" +// @Success 200 {object} server.Result{item=types.UserOut} +// @Router /v1/admin/users [POST] +// @Security Bearer +func (ctrl *V1Controller) HandleAdminUserCreate() http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + createData := types.UserCreate{} + + if err := server.Decode(r, &createData); err != nil { + ctrl.log.Error(err, logger.Props{ + "scope": "admin", + "details": "failed to decode user create data", + }) + server.RespondError(w, http.StatusBadRequest, err) + return + } + + err := createData.Validate() + + if err != nil { + server.RespondError(w, http.StatusUnprocessableEntity, err) + return + } + + hashedPw, err := hasher.HashPassword(createData.Password) + + if err != nil { + ctrl.log.Error(err, logger.Props{ + "scope": "admin", + "details": "failed to hash password", + }) + + server.RespondError(w, http.StatusInternalServerError, err) + return + } + + createData.Password = hashedPw + userOut, err := ctrl.svc.Admin.Create(r.Context(), createData) + + if err != nil { + ctrl.log.Error(err, logger.Props{ + "scope": "admin", + "details": "failed to create user", + }) + + server.RespondError(w, http.StatusInternalServerError, err) + return + } + + server.Respond(w, http.StatusCreated, server.Wrap(userOut)) + } +} + +// HandleAdminUserUpdate godoc +// @Summary Update a User +// @Tags Admin: Users +// @Param id path string true "User ID" +// @Param payload body types.UserUpdate true "User Data" +// @Produce json +// @Success 200 {object} server.Result{item=types.UserOut} +// @Router /v1/admin/users/{id} [PUT] +// @Security Bearer +func (ctrl *V1Controller) HandleAdminUserUpdate() http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + uid, err := uuid.Parse(chi.URLParam(r, "id")) + if err != nil { + ctrl.log.Debug(err.Error(), logger.Props{ + "scope": "admin", + "details": "failed to convert id to valid UUID", + }) + } + + updateData := types.UserUpdate{} + + if err := server.Decode(r, &updateData); err != nil { + ctrl.log.Error(err, logger.Props{ + "scope": "admin", + "details": "failed to decode user update data", + }) + server.RespondError(w, http.StatusBadRequest, err) + return + } + + newData, err := ctrl.svc.Admin.UpdateProperties(r.Context(), uid, updateData) + + if err != nil { + ctrl.log.Error(err, logger.Props{ + "scope": "admin", + "details": "failed to update user", + }) + server.RespondError(w, http.StatusInternalServerError, err) + return + } + + server.Respond(w, http.StatusOK, server.Wrap(newData)) + } +} + +// HandleAdminUserDelete godoc +// @Summary Delete a User +// @Tags Admin: Users +// @Param id path string true "User ID" +// @Produce json +// @Success 204 +// @Router /v1/admin/users/{id} [DELETE] +// @Security Bearer +func (ctrl *V1Controller) HandleAdminUserDelete() http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + uid, err := uuid.Parse(chi.URLParam(r, "id")) + if err != nil { + ctrl.log.Debug(err.Error(), logger.Props{ + "scope": "admin", + "details": "failed to convert id to valid UUID", + }) + } + + actor := services.UseUserCtx(r.Context()) + + if actor.ID == uid { + server.RespondError(w, http.StatusBadRequest, errors.New("cannot delete yourself")) + return + } + + err = ctrl.svc.Admin.Delete(r.Context(), uid) + + if err != nil { + ctrl.log.Error(err, logger.Props{ + "scope": "admin", + "details": "failed to delete user", + }) + server.RespondError(w, http.StatusInternalServerError, err) + return + } + } +} diff --git a/backend/app/api/v1/v1_ctrl_admin_test.go b/backend/app/api/v1/v1_ctrl_admin_test.go new file mode 100644 index 0000000..c0066c7 --- /dev/null +++ b/backend/app/api/v1/v1_ctrl_admin_test.go @@ -0,0 +1,109 @@ +package v1 + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "net/http" + "net/http/httptest" + "testing" + + "github.com/hay-kot/git-web-template/backend/internal/mocks/chimocker" + "github.com/hay-kot/git-web-template/backend/internal/mocks/factories" + "github.com/hay-kot/git-web-template/backend/internal/types" + "github.com/hay-kot/git-web-template/backend/pkgs/server" + "github.com/stretchr/testify/assert" +) + +const ( + UrlUser = "/api/v1/admin/users" + UrlUserId = "/api/v1/admin/users/%v" + UrlUserIdChi = "/api/v1/admin/users/{id}" +) + +type usersResponse struct { + Users []types.UserOut `json:"item"` +} + +type userResponse struct { + User types.UserOut `json:"item"` +} + +func Test_HandleAdminUserGetAll_Success(t *testing.T) { + r := httptest.NewRecorder() + req := httptest.NewRequest(http.MethodGet, UrlUser, nil) + + mockHandler.HandleAdminUserGetAll()(r, req) + + response := usersResponse{ + Users: []types.UserOut{}, + } + + _ = json.Unmarshal(r.Body.Bytes(), &response) + assert.Equal(t, http.StatusOK, r.Code) + assert.Equal(t, len(users), len(response.Users)) + + knowEmail := []string{ + users[0].Email, + users[1].Email, + users[2].Email, + users[3].Email, + } + + for _, user := range users { + assert.Contains(t, knowEmail, user.Email) + } + +} + +func Test_HandleAdminUserGet_Success(t *testing.T) { + targetUser := users[2] + res := httptest.NewRecorder() + req := httptest.NewRequest(http.MethodGet, fmt.Sprintf(UrlUserId, targetUser.ID), nil) + + req = chimocker.WithUrlParam(req, "id", fmt.Sprintf("%v", targetUser.ID)) + + mockHandler.HandleAdminUserGet()(res, req) + assert.Equal(t, http.StatusOK, res.Code) + + response := userResponse{ + User: types.UserOut{}, + } + + _ = json.Unmarshal(res.Body.Bytes(), &response) + assert.Equal(t, targetUser.ID, response.User.ID) +} + +func Test_HandleAdminUserCreate_Success(t *testing.T) { + payload := factories.UserFactory() + + r := httptest.NewRecorder() + + body, err := json.Marshal(payload) + assert.NoError(t, err) + + req := httptest.NewRequest(http.MethodGet, UrlUser, bytes.NewBuffer(body)) + req.Header.Set(server.ContentType, server.ContentJSON) + + mockHandler.HandleAdminUserCreate()(r, req) + + assert.Equal(t, http.StatusCreated, r.Code) + + usr, err := mockHandler.svc.Admin.GetByEmail(context.Background(), payload.Email) + + assert.NoError(t, err) + assert.Equal(t, payload.Email, usr.Email) + assert.Equal(t, payload.Name, usr.Name) + assert.NotEqual(t, payload.Password, usr.Password) // smoke test - check password is hashed + + _ = mockHandler.svc.Admin.Delete(context.Background(), usr.ID) +} + +func Test_HandleAdminUserUpdate_Success(t *testing.T) { + t.Skip() +} + +func Test_HandleAdminUserUpdate_Delete(t *testing.T) { + t.Skip() +} diff --git a/backend/app/api/v1/v1_ctrl_auth.go b/backend/app/api/v1/v1_ctrl_auth.go new file mode 100644 index 0000000..f204e1e --- /dev/null +++ b/backend/app/api/v1/v1_ctrl_auth.go @@ -0,0 +1,136 @@ +package v1 + +import ( + "errors" + "net/http" + + "github.com/hay-kot/git-web-template/backend/internal/services" + "github.com/hay-kot/git-web-template/backend/internal/types" + "github.com/hay-kot/git-web-template/backend/pkgs/logger" + "github.com/hay-kot/git-web-template/backend/pkgs/server" +) + +var ( + HeaderFormData = "application/x-www-form-urlencoded" + HeaderJSON = "application/json" +) + +// HandleAuthLogin godoc +// @Summary User Login +// @Tags Authentication +// @Accept x-www-form-urlencoded +// @Accept application/json +// @Param username formData string false "string" example(admin@admin.com) +// @Param password formData string false "string" example(admin) +// @Produce json +// @Success 200 {object} types.TokenResponse +// @Router /v1/users/login [POST] +func (ctrl *V1Controller) HandleAuthLogin() http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + loginForm := &types.LoginForm{} + + if r.Header.Get("Content-Type") == HeaderFormData { + err := r.ParseForm() + + if err != nil { + server.Respond(w, http.StatusBadRequest, server.Wrap(err)) + return + } + + loginForm.Username = r.PostFormValue("username") + loginForm.Password = r.PostFormValue("password") + } else if r.Header.Get("Content-Type") == HeaderJSON { + err := server.Decode(r, loginForm) + + if err != nil { + server.Respond(w, http.StatusBadRequest, server.Wrap(err)) + return + } + } else { + server.Respond(w, http.StatusBadRequest, errors.New("invalid content type")) + return + } + + if loginForm.Username == "" || loginForm.Password == "" { + server.RespondError(w, http.StatusBadRequest, errors.New("username and password are required")) + return + } + + newToken, err := ctrl.svc.User.Login(r.Context(), loginForm.Username, loginForm.Password) + + if err != nil { + server.RespondError(w, http.StatusUnauthorized, err) + return + } + + err = server.Respond(w, http.StatusOK, types.TokenResponse{ + BearerToken: "Bearer " + newToken.Raw, + ExpiresAt: newToken.ExpiresAt, + }) + + if err != nil { + ctrl.log.Error(err, logger.Props{ + "user": loginForm.Username, + }) + return + } + } +} + +// HandleAuthLogout godoc +// @Summary User Logout +// @Tags Authentication +// @Success 204 +// @Router /v1/users/logout [POST] +// @Security Bearer +func (ctrl *V1Controller) HandleAuthLogout() http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + token := services.UseTokenCtx(r.Context()) + + if token == "" { + server.RespondError(w, http.StatusUnauthorized, errors.New("no token within request context")) + return + } + + err := ctrl.svc.User.Logout(r.Context(), token) + + if err != nil { + server.RespondError(w, http.StatusInternalServerError, err) + return + } + + err = server.Respond(w, http.StatusNoContent, nil) + } +} + +// HandleAuthLogout godoc +// @Summary User Token Refresh +// @Description handleAuthRefresh returns a handler that will issue a new token from an existing token. +// @Description This does not validate that the user still exists within the database. +// @Tags Authentication +// @Success 200 +// @Router /v1/users/refresh [GET] +// @Security Bearer +func (ctrl *V1Controller) HandleAuthRefresh() http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + requestToken := services.UseTokenCtx(r.Context()) + + if requestToken == "" { + server.RespondError(w, http.StatusUnauthorized, errors.New("no user token found")) + return + } + + newToken, err := ctrl.svc.User.RenewToken(r.Context(), requestToken) + + if err != nil { + server.RespondUnauthorized(w) + return + } + + err = server.Respond(w, http.StatusOK, newToken) + + if err != nil { + return + } + } +} diff --git a/backend/app/api/v1/v1_ctrl_user.go b/backend/app/api/v1/v1_ctrl_user.go new file mode 100644 index 0000000..aed64b6 --- /dev/null +++ b/backend/app/api/v1/v1_ctrl_user.go @@ -0,0 +1,80 @@ +package v1 + +import ( + "errors" + "net/http" + + "github.com/hay-kot/git-web-template/backend/internal/services" + "github.com/hay-kot/git-web-template/backend/internal/types" + "github.com/hay-kot/git-web-template/backend/pkgs/logger" + "github.com/hay-kot/git-web-template/backend/pkgs/server" +) + +// HandleUserSelf godoc +// @Summary Get the current user +// @Tags User +// @Produce json +// @Success 200 {object} server.Result{item=types.UserOut} +// @Router /v1/users/self [GET] +// @Security Bearer +func (ctrl *V1Controller) HandleUserSelf() http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + token := services.UseTokenCtx(r.Context()) + usr, err := ctrl.svc.User.GetSelf(r.Context(), token) + if usr.IsNull() || err != nil { + ctrl.log.Error(errors.New("no user within request context"), nil) + server.RespondInternalServerError(w) + return + } + + _ = server.Respond(w, http.StatusOK, server.Wrap(usr)) + } +} + +// HandleUserUpdate godoc +// @Summary Update the current user +// @Tags User +// @Produce json +// @Param payload body types.UserUpdate true "User Data" +// @Success 200 {object} server.Result{item=types.UserUpdate} +// @Router /v1/users/self [PUT] +// @Security Bearer +func (ctrl *V1Controller) HandleUserUpdate() http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + updateData := types.UserUpdate{} + if err := server.Decode(r, &updateData); err != nil { + ctrl.log.Error(err, logger.Props{ + "scope": "user", + "details": "failed to decode user update data", + }) + server.RespondError(w, http.StatusBadRequest, err) + return + } + + actor := services.UseUserCtx(r.Context()) + newData, err := ctrl.svc.User.UpdateSelf(r.Context(), actor.ID, updateData) + + if err != nil { + ctrl.log.Error(err, logger.Props{ + "scope": "user", + "details": "failed to update user", + }) + server.RespondError(w, http.StatusInternalServerError, err) + return + } + + _ = server.Respond(w, http.StatusOK, server.Wrap(newData)) + } +} + +// HandleUserUpdatePassword godoc +// @Summary Update the current user's password // TODO: +// @Tags User +// @Produce json +// @Success 204 +// @Router /v1/users/self/password [PUT] +// @Security Bearer +func (ctrl *V1Controller) HandleUserUpdatePassword() http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + } +} diff --git a/backend/app/cli/app.go b/backend/app/cli/app.go new file mode 100644 index 0000000..dd31ed9 --- /dev/null +++ b/backend/app/cli/app.go @@ -0,0 +1,9 @@ +package main + +import ( + "github.com/hay-kot/git-web-template/backend/internal/repo" +) + +type app struct { + repos *repo.AllRepos +} diff --git a/backend/app/cli/app_users.go b/backend/app/cli/app_users.go new file mode 100644 index 0000000..c13ac29 --- /dev/null +++ b/backend/app/cli/app_users.go @@ -0,0 +1,105 @@ +package main + +import ( + "context" + "fmt" + "os" + "text/tabwriter" + + "github.com/google/uuid" + "github.com/hay-kot/git-web-template/backend/app/cli/reader" + "github.com/hay-kot/git-web-template/backend/internal/types" + "github.com/hay-kot/git-web-template/backend/pkgs/hasher" + "github.com/urfave/cli/v2" +) + +func (a *app) UserCreate(c *cli.Context) error { + var defaultValidators = []reader.StringValidator{ + reader.StringRequired, + reader.StringNoLeadingOrTrailingWhitespace, + } + // Get Flags + name := reader.ReadString("Name: ", + defaultValidators..., + ) + password := reader.ReadString("Password: ", + defaultValidators..., + ) + + email := reader.ReadString("Email: ", + reader.StringRequired, + reader.StringNoLeadingOrTrailingWhitespace, + reader.StringContainsAt, + ) + isSuper := reader.ReadBool("Is Superuser?") + + pwHash, err := hasher.HashPassword(password) + if err != nil { + return err + } + + usr := types.UserCreate{ + Name: name, + Email: email, + Password: pwHash, + IsSuperuser: isSuper, + } + + _, err = a.repos.Users.Create(context.Background(), usr) + + if err == nil { + fmt.Println("Super user created") + } + return err +} + +func (a *app) UserDelete(c *cli.Context) error { + // Get Flags + id := c.String("id") + uid := uuid.MustParse(id) + + fmt.Printf("Deleting user with id: %s\n", id) + + // Confirm Action + fmt.Printf("Are you sure you want to delete this user? (y/n) ") + var answer string + _, err := fmt.Scanln(&answer) + if answer != "y" || err != nil { + fmt.Println("Aborting") + return nil + } + + err = a.repos.Users.Delete(context.Background(), uid) + + if err == nil { + fmt.Printf("%v User(s) deleted (id=%v)\n", 1, id) + } + return err +} + +func (a *app) UserList(c *cli.Context) error { + fmt.Println("Superuser List") + + users, err := a.repos.Users.GetAll(context.Background()) + + if err != nil { + return err + } + + tabWriter := tabwriter.NewWriter(os.Stdout, 0, 0, 2, ' ', 0) + defer func(tabWriter *tabwriter.Writer) { + _ = tabWriter.Flush() + }(tabWriter) + + _, err = fmt.Fprintln(tabWriter, "Id\tName\tEmail\tIsSuper") + + if err != nil { + return err + } + + for _, u := range users { + _, _ = fmt.Fprintf(tabWriter, "%v\t%s\t%s\t%v\n", u.ID, u.Name, u.Email, u.IsSuperuser) + } + + return nil +} diff --git a/backend/app/cli/main.go b/backend/app/cli/main.go new file mode 100644 index 0000000..d778b1f --- /dev/null +++ b/backend/app/cli/main.go @@ -0,0 +1,82 @@ +package main + +import ( + "context" + "log" + "os" + + "github.com/hay-kot/git-web-template/backend/ent" + "github.com/hay-kot/git-web-template/backend/internal/config" + "github.com/hay-kot/git-web-template/backend/internal/repo" + _ "github.com/mattn/go-sqlite3" + + "github.com/urfave/cli/v2" +) + +func main() { + cfg, err := config.NewConfig("config.yml") + + if err != nil { + panic(err) + } + + if err := run(cfg); err != nil { + log.Fatal(err) + } +} + +func run(cfg *config.Config) error { + // ========================================================================= + // Initialize Database + c, err := ent.Open(cfg.Database.GetDriver(), cfg.Database.GetUrl()) + if err != nil { + log.Fatalf("failed opening connection to sqlite: %v", err) + } + defer func(c *ent.Client) { + _ = c.Close() + }(c) + if err := c.Schema.Create(context.Background()); err != nil { + log.Fatalf("failed creating schema resources: %v", err) + } + + // Create App + a := &app{ + repos: repo.EntAllRepos(c), + } + + app := &cli.App{ + Commands: []*cli.Command{ + { + Name: "users", + Aliases: []string{"u"}, + Usage: "options to manage users", + Subcommands: []*cli.Command{ + { + Name: "list", + Usage: "list users in database", + Action: a.UserList, + }, + { + Name: "add", + Usage: "add a new user", + Action: a.UserCreate, + }, + { + Name: "delete", + Usage: "delete user in database", + Action: a.UserDelete, + Flags: []cli.Flag{ + &cli.IntFlag{ + Name: "id", + Usage: "name of the user to add", + Required: true, + }, + }, + }, + }, + }, + }, + } + + return app.Run(os.Args) +} diff --git a/backend/app/cli/reader/reader.go b/backend/app/cli/reader/reader.go new file mode 100644 index 0000000..a8cc92c --- /dev/null +++ b/backend/app/cli/reader/reader.go @@ -0,0 +1,65 @@ +package reader + +import "fmt" + +type StringValidator func(s string) bool + +func StringRequired(s string) bool { + return s != "" +} + +func StringNoLeadingOrTrailingWhitespace(s string) bool { + return s != "" && len(s) > 0 && s[0] != ' ' && s[len(s)-1] != ' ' +} + +func StringContainsAt(s string) bool { + for _, c := range s { + if c == '@' { + return true + } + } + return false +} + +func ReadString(message string, sv ...StringValidator) string { + for { + fmt.Print(message) + var input string + fmt.Scanln(&input) + + if len(sv) == 0 { + return input + } + + isValid := true + for _, validator := range sv { + if !validator(input) { + isValid = false + fmt.Println("Invalid input") + continue + } + + } + + if isValid { + return input + } + + } +} + +func ReadBool(message string) bool { + for { + fmt.Print(message + " (y/n) ") + var input string + fmt.Scanln(&input) + + if input == "y" { + return true + } else if input == "n" { + return false + } else { + fmt.Println("Invalid input") + } + } +} diff --git a/backend/app/generator/main.go b/backend/app/generator/main.go new file mode 100644 index 0000000..d1d6efb --- /dev/null +++ b/backend/app/generator/main.go @@ -0,0 +1,72 @@ +package main + +import ( + "time" + + "github.com/google/uuid" + "github.com/hay-kot/git-web-template/backend/ent" + "github.com/hay-kot/git-web-template/backend/internal/types" + "github.com/hay-kot/git-web-template/backend/pkgs/automapper" + "github.com/tkrajina/typescriptify-golang-structs/typescriptify" +) + +// generateMappers serialized the config file into a list of automapper struct +func generateMappers() []automapper.AutoMapper { + return []automapper.AutoMapper{ + { + Package: "mapper", + Prefix: "users", + Name: "User Out", + Schema: automapper.Schema{ + Type: types.UserOut{}, + Prefix: "types", + }, + Model: automapper.Model{ + Type: ent.User{}, + Prefix: "ent", + }, + Imports: []string{}, + }, + } +} + +func generateTypeScript() { + // Configuration + converter := typescriptify.New() + converter.CreateInterface = true + converter.ManageType(uuid.UUID{}, typescriptify.TypeOptions{TSType: "string"}) + converter.ManageType(time.Time{}, typescriptify.TypeOptions{TSType: "Date", TSTransform: "new Date(__VALUE__)"}) + + // General + public := []any{ + // Base Types + types.ApiSummary{}, + + // User Types + types.UserOut{}, + types.UserCreate{}, + types.UserIn{}, + types.UserUpdate{}, + + // Auth Types + types.LoginForm{}, + types.TokenResponse{}, + } + + for i := 0; i < len(public); i++ { + converter.Add(public[i]) + } + + // Creation + converter.ConvertToFile("./generated-types.ts") + +} + +func main() { + automappers := generateMappers() + conf := automapper.DefaultConf() + + automapper.Generate(automappers, conf) + + generateTypeScript() +} diff --git a/backend/config.template.yml b/backend/config.template.yml new file mode 100644 index 0000000..0dc2626 --- /dev/null +++ b/backend/config.template.yml @@ -0,0 +1,31 @@ +--- +mode: development +swagger: + host: localhost:7745 + scheme: http +web: + port: 3915 + host: 127.0.0.1 +database: + driver: sqlite3 + sqlite-url: ./ent.db?_fk=1 +logger: + level: debug + file: api.log +mailer: + host: smtp.example.com + port: 465 + username: + password: + from: example@email.com +seed: + enabled: true + users: + - name: Admin + email: admin@admin.com + password: admin + isSuperuser: true + - name: User + email: user@user.com + password: user + isSuperuser: false diff --git a/backend/docker-compose.yml b/backend/docker-compose.yml new file mode 100644 index 0000000..298153c --- /dev/null +++ b/backend/docker-compose.yml @@ -0,0 +1,10 @@ +version: "3.4" + +services: + gowebtemplate: + image: gowebtemplate + build: + context: . + dockerfile: ./Dockerfile + ports: + - 3001:7745 diff --git a/backend/ent/authtokens.go b/backend/ent/authtokens.go new file mode 100644 index 0000000..ecf611c --- /dev/null +++ b/backend/ent/authtokens.go @@ -0,0 +1,165 @@ +// Code generated by entc, DO NOT EDIT. + +package ent + +import ( + "fmt" + "strings" + "time" + + "entgo.io/ent/dialect/sql" + "github.com/google/uuid" + "github.com/hay-kot/git-web-template/backend/ent/authtokens" + "github.com/hay-kot/git-web-template/backend/ent/user" +) + +// AuthTokens is the model entity for the AuthTokens schema. +type AuthTokens struct { + config `json:"-"` + // ID of the ent. + ID int `json:"id,omitempty"` + // Token holds the value of the "token" field. + Token []byte `json:"token,omitempty"` + // ExpiresAt holds the value of the "expires_at" field. + ExpiresAt time.Time `json:"expires_at,omitempty"` + // CreatedAt holds the value of the "created_at" field. + CreatedAt time.Time `json:"created_at,omitempty"` + // Edges holds the relations/edges for other nodes in the graph. + // The values are being populated by the AuthTokensQuery when eager-loading is set. + Edges AuthTokensEdges `json:"edges"` + user_auth_tokens *uuid.UUID +} + +// AuthTokensEdges holds the relations/edges for other nodes in the graph. +type AuthTokensEdges struct { + // User holds the value of the user edge. + User *User `json:"user,omitempty"` + // loadedTypes holds the information for reporting if a + // type was loaded (or requested) in eager-loading or not. + loadedTypes [1]bool +} + +// UserOrErr returns the User value or an error if the edge +// was not loaded in eager-loading, or loaded but was not found. +func (e AuthTokensEdges) UserOrErr() (*User, error) { + if e.loadedTypes[0] { + if e.User == nil { + // The edge user was loaded in eager-loading, + // but was not found. + return nil, &NotFoundError{label: user.Label} + } + return e.User, nil + } + return nil, &NotLoadedError{edge: "user"} +} + +// scanValues returns the types for scanning values from sql.Rows. +func (*AuthTokens) scanValues(columns []string) ([]interface{}, error) { + values := make([]interface{}, len(columns)) + for i := range columns { + switch columns[i] { + case authtokens.FieldToken: + values[i] = new([]byte) + case authtokens.FieldID: + values[i] = new(sql.NullInt64) + case authtokens.FieldExpiresAt, authtokens.FieldCreatedAt: + values[i] = new(sql.NullTime) + case authtokens.ForeignKeys[0]: // user_auth_tokens + values[i] = &sql.NullScanner{S: new(uuid.UUID)} + default: + return nil, fmt.Errorf("unexpected column %q for type AuthTokens", columns[i]) + } + } + return values, nil +} + +// assignValues assigns the values that were returned from sql.Rows (after scanning) +// to the AuthTokens fields. +func (at *AuthTokens) assignValues(columns []string, values []interface{}) error { + if m, n := len(values), len(columns); m < n { + return fmt.Errorf("mismatch number of scan values: %d != %d", m, n) + } + for i := range columns { + switch columns[i] { + case authtokens.FieldID: + value, ok := values[i].(*sql.NullInt64) + if !ok { + return fmt.Errorf("unexpected type %T for field id", value) + } + at.ID = int(value.Int64) + case authtokens.FieldToken: + if value, ok := values[i].(*[]byte); !ok { + return fmt.Errorf("unexpected type %T for field token", values[i]) + } else if value != nil { + at.Token = *value + } + case authtokens.FieldExpiresAt: + if value, ok := values[i].(*sql.NullTime); !ok { + return fmt.Errorf("unexpected type %T for field expires_at", values[i]) + } else if value.Valid { + at.ExpiresAt = value.Time + } + case authtokens.FieldCreatedAt: + if value, ok := values[i].(*sql.NullTime); !ok { + return fmt.Errorf("unexpected type %T for field created_at", values[i]) + } else if value.Valid { + at.CreatedAt = value.Time + } + case authtokens.ForeignKeys[0]: + if value, ok := values[i].(*sql.NullScanner); !ok { + return fmt.Errorf("unexpected type %T for field user_auth_tokens", values[i]) + } else if value.Valid { + at.user_auth_tokens = new(uuid.UUID) + *at.user_auth_tokens = *value.S.(*uuid.UUID) + } + } + } + return nil +} + +// QueryUser queries the "user" edge of the AuthTokens entity. +func (at *AuthTokens) QueryUser() *UserQuery { + return (&AuthTokensClient{config: at.config}).QueryUser(at) +} + +// Update returns a builder for updating this AuthTokens. +// Note that you need to call AuthTokens.Unwrap() before calling this method if this AuthTokens +// was returned from a transaction, and the transaction was committed or rolled back. +func (at *AuthTokens) Update() *AuthTokensUpdateOne { + return (&AuthTokensClient{config: at.config}).UpdateOne(at) +} + +// Unwrap unwraps the AuthTokens entity that was returned from a transaction after it was closed, +// so that all future queries will be executed through the driver which created the transaction. +func (at *AuthTokens) Unwrap() *AuthTokens { + tx, ok := at.config.driver.(*txDriver) + if !ok { + panic("ent: AuthTokens is not a transactional entity") + } + at.config.driver = tx.drv + return at +} + +// String implements the fmt.Stringer. +func (at *AuthTokens) String() string { + var builder strings.Builder + builder.WriteString("AuthTokens(") + builder.WriteString(fmt.Sprintf("id=%v", at.ID)) + builder.WriteString(", token=") + builder.WriteString(fmt.Sprintf("%v", at.Token)) + builder.WriteString(", expires_at=") + builder.WriteString(at.ExpiresAt.Format(time.ANSIC)) + builder.WriteString(", created_at=") + builder.WriteString(at.CreatedAt.Format(time.ANSIC)) + builder.WriteByte(')') + return builder.String() +} + +// AuthTokensSlice is a parsable slice of AuthTokens. +type AuthTokensSlice []*AuthTokens + +func (at AuthTokensSlice) config(cfg config) { + for _i := range at { + at[_i].config = cfg + } +} diff --git a/backend/ent/authtokens/authtokens.go b/backend/ent/authtokens/authtokens.go new file mode 100644 index 0000000..5c10d3a --- /dev/null +++ b/backend/ent/authtokens/authtokens.go @@ -0,0 +1,67 @@ +// Code generated by entc, DO NOT EDIT. + +package authtokens + +import ( + "time" +) + +const ( + // Label holds the string label denoting the authtokens type in the database. + Label = "auth_tokens" + // FieldID holds the string denoting the id field in the database. + FieldID = "id" + // FieldToken holds the string denoting the token field in the database. + FieldToken = "token" + // FieldExpiresAt holds the string denoting the expires_at field in the database. + FieldExpiresAt = "expires_at" + // FieldCreatedAt holds the string denoting the created_at field in the database. + FieldCreatedAt = "created_at" + // EdgeUser holds the string denoting the user edge name in mutations. + EdgeUser = "user" + // Table holds the table name of the authtokens in the database. + Table = "auth_tokens" + // UserTable is the table that holds the user relation/edge. + UserTable = "auth_tokens" + // UserInverseTable is the table name for the User entity. + // It exists in this package in order to avoid circular dependency with the "user" package. + UserInverseTable = "users" + // UserColumn is the table column denoting the user relation/edge. + UserColumn = "user_auth_tokens" +) + +// Columns holds all SQL columns for authtokens fields. +var Columns = []string{ + FieldID, + FieldToken, + FieldExpiresAt, + FieldCreatedAt, +} + +// ForeignKeys holds the SQL foreign-keys that are owned by the "auth_tokens" +// table and are not defined as standalone fields in the schema. +var ForeignKeys = []string{ + "user_auth_tokens", +} + +// ValidColumn reports if the column name is valid (part of the table columns). +func ValidColumn(column string) bool { + for i := range Columns { + if column == Columns[i] { + return true + } + } + for i := range ForeignKeys { + if column == ForeignKeys[i] { + return true + } + } + return false +} + +var ( + // DefaultExpiresAt holds the default value on creation for the "expires_at" field. + DefaultExpiresAt func() time.Time + // DefaultCreatedAt holds the default value on creation for the "created_at" field. + DefaultCreatedAt func() time.Time +) diff --git a/backend/ent/authtokens/where.go b/backend/ent/authtokens/where.go new file mode 100644 index 0000000..5fda3f0 --- /dev/null +++ b/backend/ent/authtokens/where.go @@ -0,0 +1,403 @@ +// Code generated by entc, DO NOT EDIT. + +package authtokens + +import ( + "time" + + "entgo.io/ent/dialect/sql" + "entgo.io/ent/dialect/sql/sqlgraph" + "github.com/hay-kot/git-web-template/backend/ent/predicate" +) + +// ID filters vertices based on their ID field. +func ID(id int) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldID), id)) + }) +} + +// IDEQ applies the EQ predicate on the ID field. +func IDEQ(id int) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldID), id)) + }) +} + +// IDNEQ applies the NEQ predicate on the ID field. +func IDNEQ(id int) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldID), id)) + }) +} + +// IDIn applies the In predicate on the ID field. +func IDIn(ids ...int) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + // if not arguments were provided, append the FALSE constants, + // since we can't apply "IN ()". This will make this predicate falsy. + if len(ids) == 0 { + s.Where(sql.False()) + return + } + v := make([]interface{}, len(ids)) + for i := range v { + v[i] = ids[i] + } + s.Where(sql.In(s.C(FieldID), v...)) + }) +} + +// IDNotIn applies the NotIn predicate on the ID field. +func IDNotIn(ids ...int) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + // if not arguments were provided, append the FALSE constants, + // since we can't apply "IN ()". This will make this predicate falsy. + if len(ids) == 0 { + s.Where(sql.False()) + return + } + v := make([]interface{}, len(ids)) + for i := range v { + v[i] = ids[i] + } + s.Where(sql.NotIn(s.C(FieldID), v...)) + }) +} + +// IDGT applies the GT predicate on the ID field. +func IDGT(id int) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.GT(s.C(FieldID), id)) + }) +} + +// IDGTE applies the GTE predicate on the ID field. +func IDGTE(id int) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.GTE(s.C(FieldID), id)) + }) +} + +// IDLT applies the LT predicate on the ID field. +func IDLT(id int) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.LT(s.C(FieldID), id)) + }) +} + +// IDLTE applies the LTE predicate on the ID field. +func IDLTE(id int) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.LTE(s.C(FieldID), id)) + }) +} + +// Token applies equality check predicate on the "token" field. It's identical to TokenEQ. +func Token(v []byte) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldToken), v)) + }) +} + +// ExpiresAt applies equality check predicate on the "expires_at" field. It's identical to ExpiresAtEQ. +func ExpiresAt(v time.Time) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldExpiresAt), v)) + }) +} + +// CreatedAt applies equality check predicate on the "created_at" field. It's identical to CreatedAtEQ. +func CreatedAt(v time.Time) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldCreatedAt), v)) + }) +} + +// TokenEQ applies the EQ predicate on the "token" field. +func TokenEQ(v []byte) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldToken), v)) + }) +} + +// TokenNEQ applies the NEQ predicate on the "token" field. +func TokenNEQ(v []byte) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldToken), v)) + }) +} + +// TokenIn applies the In predicate on the "token" field. +func TokenIn(vs ...[]byte) predicate.AuthTokens { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.AuthTokens(func(s *sql.Selector) { + // if not arguments were provided, append the FALSE constants, + // since we can't apply "IN ()". This will make this predicate falsy. + if len(v) == 0 { + s.Where(sql.False()) + return + } + s.Where(sql.In(s.C(FieldToken), v...)) + }) +} + +// TokenNotIn applies the NotIn predicate on the "token" field. +func TokenNotIn(vs ...[]byte) predicate.AuthTokens { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.AuthTokens(func(s *sql.Selector) { + // if not arguments were provided, append the FALSE constants, + // since we can't apply "IN ()". This will make this predicate falsy. + if len(v) == 0 { + s.Where(sql.False()) + return + } + s.Where(sql.NotIn(s.C(FieldToken), v...)) + }) +} + +// TokenGT applies the GT predicate on the "token" field. +func TokenGT(v []byte) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.GT(s.C(FieldToken), v)) + }) +} + +// TokenGTE applies the GTE predicate on the "token" field. +func TokenGTE(v []byte) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.GTE(s.C(FieldToken), v)) + }) +} + +// TokenLT applies the LT predicate on the "token" field. +func TokenLT(v []byte) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.LT(s.C(FieldToken), v)) + }) +} + +// TokenLTE applies the LTE predicate on the "token" field. +func TokenLTE(v []byte) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.LTE(s.C(FieldToken), v)) + }) +} + +// ExpiresAtEQ applies the EQ predicate on the "expires_at" field. +func ExpiresAtEQ(v time.Time) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldExpiresAt), v)) + }) +} + +// ExpiresAtNEQ applies the NEQ predicate on the "expires_at" field. +func ExpiresAtNEQ(v time.Time) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldExpiresAt), v)) + }) +} + +// ExpiresAtIn applies the In predicate on the "expires_at" field. +func ExpiresAtIn(vs ...time.Time) predicate.AuthTokens { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.AuthTokens(func(s *sql.Selector) { + // if not arguments were provided, append the FALSE constants, + // since we can't apply "IN ()". This will make this predicate falsy. + if len(v) == 0 { + s.Where(sql.False()) + return + } + s.Where(sql.In(s.C(FieldExpiresAt), v...)) + }) +} + +// ExpiresAtNotIn applies the NotIn predicate on the "expires_at" field. +func ExpiresAtNotIn(vs ...time.Time) predicate.AuthTokens { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.AuthTokens(func(s *sql.Selector) { + // if not arguments were provided, append the FALSE constants, + // since we can't apply "IN ()". This will make this predicate falsy. + if len(v) == 0 { + s.Where(sql.False()) + return + } + s.Where(sql.NotIn(s.C(FieldExpiresAt), v...)) + }) +} + +// ExpiresAtGT applies the GT predicate on the "expires_at" field. +func ExpiresAtGT(v time.Time) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.GT(s.C(FieldExpiresAt), v)) + }) +} + +// ExpiresAtGTE applies the GTE predicate on the "expires_at" field. +func ExpiresAtGTE(v time.Time) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.GTE(s.C(FieldExpiresAt), v)) + }) +} + +// ExpiresAtLT applies the LT predicate on the "expires_at" field. +func ExpiresAtLT(v time.Time) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.LT(s.C(FieldExpiresAt), v)) + }) +} + +// ExpiresAtLTE applies the LTE predicate on the "expires_at" field. +func ExpiresAtLTE(v time.Time) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.LTE(s.C(FieldExpiresAt), v)) + }) +} + +// CreatedAtEQ applies the EQ predicate on the "created_at" field. +func CreatedAtEQ(v time.Time) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldCreatedAt), v)) + }) +} + +// CreatedAtNEQ applies the NEQ predicate on the "created_at" field. +func CreatedAtNEQ(v time.Time) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldCreatedAt), v)) + }) +} + +// CreatedAtIn applies the In predicate on the "created_at" field. +func CreatedAtIn(vs ...time.Time) predicate.AuthTokens { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.AuthTokens(func(s *sql.Selector) { + // if not arguments were provided, append the FALSE constants, + // since we can't apply "IN ()". This will make this predicate falsy. + if len(v) == 0 { + s.Where(sql.False()) + return + } + s.Where(sql.In(s.C(FieldCreatedAt), v...)) + }) +} + +// CreatedAtNotIn applies the NotIn predicate on the "created_at" field. +func CreatedAtNotIn(vs ...time.Time) predicate.AuthTokens { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.AuthTokens(func(s *sql.Selector) { + // if not arguments were provided, append the FALSE constants, + // since we can't apply "IN ()". This will make this predicate falsy. + if len(v) == 0 { + s.Where(sql.False()) + return + } + s.Where(sql.NotIn(s.C(FieldCreatedAt), v...)) + }) +} + +// CreatedAtGT applies the GT predicate on the "created_at" field. +func CreatedAtGT(v time.Time) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.GT(s.C(FieldCreatedAt), v)) + }) +} + +// CreatedAtGTE applies the GTE predicate on the "created_at" field. +func CreatedAtGTE(v time.Time) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.GTE(s.C(FieldCreatedAt), v)) + }) +} + +// CreatedAtLT applies the LT predicate on the "created_at" field. +func CreatedAtLT(v time.Time) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.LT(s.C(FieldCreatedAt), v)) + }) +} + +// CreatedAtLTE applies the LTE predicate on the "created_at" field. +func CreatedAtLTE(v time.Time) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.LTE(s.C(FieldCreatedAt), v)) + }) +} + +// HasUser applies the HasEdge predicate on the "user" edge. +func HasUser() predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + step := sqlgraph.NewStep( + sqlgraph.From(Table, FieldID), + sqlgraph.To(UserTable, FieldID), + sqlgraph.Edge(sqlgraph.M2O, true, UserTable, UserColumn), + ) + sqlgraph.HasNeighbors(s, step) + }) +} + +// HasUserWith applies the HasEdge predicate on the "user" edge with a given conditions (other predicates). +func HasUserWith(preds ...predicate.User) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + step := sqlgraph.NewStep( + sqlgraph.From(Table, FieldID), + sqlgraph.To(UserInverseTable, FieldID), + sqlgraph.Edge(sqlgraph.M2O, true, UserTable, UserColumn), + ) + sqlgraph.HasNeighborsWith(s, step, func(s *sql.Selector) { + for _, p := range preds { + p(s) + } + }) + }) +} + +// And groups predicates with the AND operator between them. +func And(predicates ...predicate.AuthTokens) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s1 := s.Clone().SetP(nil) + for _, p := range predicates { + p(s1) + } + s.Where(s1.P()) + }) +} + +// Or groups predicates with the OR operator between them. +func Or(predicates ...predicate.AuthTokens) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + s1 := s.Clone().SetP(nil) + for i, p := range predicates { + if i > 0 { + s1.Or() + } + p(s1) + } + s.Where(s1.P()) + }) +} + +// Not applies the not operator on the given predicate. +func Not(p predicate.AuthTokens) predicate.AuthTokens { + return predicate.AuthTokens(func(s *sql.Selector) { + p(s.Not()) + }) +} diff --git a/backend/ent/authtokens_create.go b/backend/ent/authtokens_create.go new file mode 100644 index 0000000..4cca125 --- /dev/null +++ b/backend/ent/authtokens_create.go @@ -0,0 +1,326 @@ +// Code generated by entc, DO NOT EDIT. + +package ent + +import ( + "context" + "errors" + "fmt" + "time" + + "entgo.io/ent/dialect/sql/sqlgraph" + "entgo.io/ent/schema/field" + "github.com/google/uuid" + "github.com/hay-kot/git-web-template/backend/ent/authtokens" + "github.com/hay-kot/git-web-template/backend/ent/user" +) + +// AuthTokensCreate is the builder for creating a AuthTokens entity. +type AuthTokensCreate struct { + config + mutation *AuthTokensMutation + hooks []Hook +} + +// SetToken sets the "token" field. +func (atc *AuthTokensCreate) SetToken(b []byte) *AuthTokensCreate { + atc.mutation.SetToken(b) + return atc +} + +// SetExpiresAt sets the "expires_at" field. +func (atc *AuthTokensCreate) SetExpiresAt(t time.Time) *AuthTokensCreate { + atc.mutation.SetExpiresAt(t) + return atc +} + +// SetNillableExpiresAt sets the "expires_at" field if the given value is not nil. +func (atc *AuthTokensCreate) SetNillableExpiresAt(t *time.Time) *AuthTokensCreate { + if t != nil { + atc.SetExpiresAt(*t) + } + return atc +} + +// SetCreatedAt sets the "created_at" field. +func (atc *AuthTokensCreate) SetCreatedAt(t time.Time) *AuthTokensCreate { + atc.mutation.SetCreatedAt(t) + return atc +} + +// SetNillableCreatedAt sets the "created_at" field if the given value is not nil. +func (atc *AuthTokensCreate) SetNillableCreatedAt(t *time.Time) *AuthTokensCreate { + if t != nil { + atc.SetCreatedAt(*t) + } + return atc +} + +// SetUserID sets the "user" edge to the User entity by ID. +func (atc *AuthTokensCreate) SetUserID(id uuid.UUID) *AuthTokensCreate { + atc.mutation.SetUserID(id) + return atc +} + +// SetNillableUserID sets the "user" edge to the User entity by ID if the given value is not nil. +func (atc *AuthTokensCreate) SetNillableUserID(id *uuid.UUID) *AuthTokensCreate { + if id != nil { + atc = atc.SetUserID(*id) + } + return atc +} + +// SetUser sets the "user" edge to the User entity. +func (atc *AuthTokensCreate) SetUser(u *User) *AuthTokensCreate { + return atc.SetUserID(u.ID) +} + +// Mutation returns the AuthTokensMutation object of the builder. +func (atc *AuthTokensCreate) Mutation() *AuthTokensMutation { + return atc.mutation +} + +// Save creates the AuthTokens in the database. +func (atc *AuthTokensCreate) Save(ctx context.Context) (*AuthTokens, error) { + var ( + err error + node *AuthTokens + ) + atc.defaults() + if len(atc.hooks) == 0 { + if err = atc.check(); err != nil { + return nil, err + } + node, err = atc.sqlSave(ctx) + } else { + var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) { + mutation, ok := m.(*AuthTokensMutation) + if !ok { + return nil, fmt.Errorf("unexpected mutation type %T", m) + } + if err = atc.check(); err != nil { + return nil, err + } + atc.mutation = mutation + if node, err = atc.sqlSave(ctx); err != nil { + return nil, err + } + mutation.id = &node.ID + mutation.done = true + return node, err + }) + for i := len(atc.hooks) - 1; i >= 0; i-- { + if atc.hooks[i] == nil { + return nil, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)") + } + mut = atc.hooks[i](mut) + } + if _, err := mut.Mutate(ctx, atc.mutation); err != nil { + return nil, err + } + } + return node, err +} + +// SaveX calls Save and panics if Save returns an error. +func (atc *AuthTokensCreate) SaveX(ctx context.Context) *AuthTokens { + v, err := atc.Save(ctx) + if err != nil { + panic(err) + } + return v +} + +// Exec executes the query. +func (atc *AuthTokensCreate) Exec(ctx context.Context) error { + _, err := atc.Save(ctx) + return err +} + +// ExecX is like Exec, but panics if an error occurs. +func (atc *AuthTokensCreate) ExecX(ctx context.Context) { + if err := atc.Exec(ctx); err != nil { + panic(err) + } +} + +// defaults sets the default values of the builder before save. +func (atc *AuthTokensCreate) defaults() { + if _, ok := atc.mutation.ExpiresAt(); !ok { + v := authtokens.DefaultExpiresAt() + atc.mutation.SetExpiresAt(v) + } + if _, ok := atc.mutation.CreatedAt(); !ok { + v := authtokens.DefaultCreatedAt() + atc.mutation.SetCreatedAt(v) + } +} + +// check runs all checks and user-defined validators on the builder. +func (atc *AuthTokensCreate) check() error { + if _, ok := atc.mutation.Token(); !ok { + return &ValidationError{Name: "token", err: errors.New(`ent: missing required field "AuthTokens.token"`)} + } + if _, ok := atc.mutation.ExpiresAt(); !ok { + return &ValidationError{Name: "expires_at", err: errors.New(`ent: missing required field "AuthTokens.expires_at"`)} + } + if _, ok := atc.mutation.CreatedAt(); !ok { + return &ValidationError{Name: "created_at", err: errors.New(`ent: missing required field "AuthTokens.created_at"`)} + } + return nil +} + +func (atc *AuthTokensCreate) sqlSave(ctx context.Context) (*AuthTokens, error) { + _node, _spec := atc.createSpec() + if err := sqlgraph.CreateNode(ctx, atc.driver, _spec); err != nil { + if sqlgraph.IsConstraintError(err) { + err = &ConstraintError{err.Error(), err} + } + return nil, err + } + id := _spec.ID.Value.(int64) + _node.ID = int(id) + return _node, nil +} + +func (atc *AuthTokensCreate) createSpec() (*AuthTokens, *sqlgraph.CreateSpec) { + var ( + _node = &AuthTokens{config: atc.config} + _spec = &sqlgraph.CreateSpec{ + Table: authtokens.Table, + ID: &sqlgraph.FieldSpec{ + Type: field.TypeInt, + Column: authtokens.FieldID, + }, + } + ) + if value, ok := atc.mutation.Token(); ok { + _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ + Type: field.TypeBytes, + Value: value, + Column: authtokens.FieldToken, + }) + _node.Token = value + } + if value, ok := atc.mutation.ExpiresAt(); ok { + _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ + Type: field.TypeTime, + Value: value, + Column: authtokens.FieldExpiresAt, + }) + _node.ExpiresAt = value + } + if value, ok := atc.mutation.CreatedAt(); ok { + _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ + Type: field.TypeTime, + Value: value, + Column: authtokens.FieldCreatedAt, + }) + _node.CreatedAt = value + } + if nodes := atc.mutation.UserIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2O, + Inverse: true, + Table: authtokens.UserTable, + Columns: []string{authtokens.UserColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: user.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _node.user_auth_tokens = &nodes[0] + _spec.Edges = append(_spec.Edges, edge) + } + return _node, _spec +} + +// AuthTokensCreateBulk is the builder for creating many AuthTokens entities in bulk. +type AuthTokensCreateBulk struct { + config + builders []*AuthTokensCreate +} + +// Save creates the AuthTokens entities in the database. +func (atcb *AuthTokensCreateBulk) Save(ctx context.Context) ([]*AuthTokens, error) { + specs := make([]*sqlgraph.CreateSpec, len(atcb.builders)) + nodes := make([]*AuthTokens, len(atcb.builders)) + mutators := make([]Mutator, len(atcb.builders)) + for i := range atcb.builders { + func(i int, root context.Context) { + builder := atcb.builders[i] + builder.defaults() + var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) { + mutation, ok := m.(*AuthTokensMutation) + if !ok { + return nil, fmt.Errorf("unexpected mutation type %T", m) + } + if err := builder.check(); err != nil { + return nil, err + } + builder.mutation = mutation + nodes[i], specs[i] = builder.createSpec() + var err error + if i < len(mutators)-1 { + _, err = mutators[i+1].Mutate(root, atcb.builders[i+1].mutation) + } else { + spec := &sqlgraph.BatchCreateSpec{Nodes: specs} + // Invoke the actual operation on the latest mutation in the chain. + if err = sqlgraph.BatchCreate(ctx, atcb.driver, spec); err != nil { + if sqlgraph.IsConstraintError(err) { + err = &ConstraintError{err.Error(), err} + } + } + } + if err != nil { + return nil, err + } + mutation.id = &nodes[i].ID + mutation.done = true + if specs[i].ID.Value != nil { + id := specs[i].ID.Value.(int64) + nodes[i].ID = int(id) + } + return nodes[i], nil + }) + for i := len(builder.hooks) - 1; i >= 0; i-- { + mut = builder.hooks[i](mut) + } + mutators[i] = mut + }(i, ctx) + } + if len(mutators) > 0 { + if _, err := mutators[0].Mutate(ctx, atcb.builders[0].mutation); err != nil { + return nil, err + } + } + return nodes, nil +} + +// SaveX is like Save, but panics if an error occurs. +func (atcb *AuthTokensCreateBulk) SaveX(ctx context.Context) []*AuthTokens { + v, err := atcb.Save(ctx) + if err != nil { + panic(err) + } + return v +} + +// Exec executes the query. +func (atcb *AuthTokensCreateBulk) Exec(ctx context.Context) error { + _, err := atcb.Save(ctx) + return err +} + +// ExecX is like Exec, but panics if an error occurs. +func (atcb *AuthTokensCreateBulk) ExecX(ctx context.Context) { + if err := atcb.Exec(ctx); err != nil { + panic(err) + } +} diff --git a/backend/ent/authtokens_delete.go b/backend/ent/authtokens_delete.go new file mode 100644 index 0000000..123ee17 --- /dev/null +++ b/backend/ent/authtokens_delete.go @@ -0,0 +1,111 @@ +// Code generated by entc, DO NOT EDIT. + +package ent + +import ( + "context" + "fmt" + + "entgo.io/ent/dialect/sql" + "entgo.io/ent/dialect/sql/sqlgraph" + "entgo.io/ent/schema/field" + "github.com/hay-kot/git-web-template/backend/ent/authtokens" + "github.com/hay-kot/git-web-template/backend/ent/predicate" +) + +// AuthTokensDelete is the builder for deleting a AuthTokens entity. +type AuthTokensDelete struct { + config + hooks []Hook + mutation *AuthTokensMutation +} + +// Where appends a list predicates to the AuthTokensDelete builder. +func (atd *AuthTokensDelete) Where(ps ...predicate.AuthTokens) *AuthTokensDelete { + atd.mutation.Where(ps...) + return atd +} + +// Exec executes the deletion query and returns how many vertices were deleted. +func (atd *AuthTokensDelete) Exec(ctx context.Context) (int, error) { + var ( + err error + affected int + ) + if len(atd.hooks) == 0 { + affected, err = atd.sqlExec(ctx) + } else { + var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) { + mutation, ok := m.(*AuthTokensMutation) + if !ok { + return nil, fmt.Errorf("unexpected mutation type %T", m) + } + atd.mutation = mutation + affected, err = atd.sqlExec(ctx) + mutation.done = true + return affected, err + }) + for i := len(atd.hooks) - 1; i >= 0; i-- { + if atd.hooks[i] == nil { + return 0, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)") + } + mut = atd.hooks[i](mut) + } + if _, err := mut.Mutate(ctx, atd.mutation); err != nil { + return 0, err + } + } + return affected, err +} + +// ExecX is like Exec, but panics if an error occurs. +func (atd *AuthTokensDelete) ExecX(ctx context.Context) int { + n, err := atd.Exec(ctx) + if err != nil { + panic(err) + } + return n +} + +func (atd *AuthTokensDelete) sqlExec(ctx context.Context) (int, error) { + _spec := &sqlgraph.DeleteSpec{ + Node: &sqlgraph.NodeSpec{ + Table: authtokens.Table, + ID: &sqlgraph.FieldSpec{ + Type: field.TypeInt, + Column: authtokens.FieldID, + }, + }, + } + if ps := atd.mutation.predicates; len(ps) > 0 { + _spec.Predicate = func(selector *sql.Selector) { + for i := range ps { + ps[i](selector) + } + } + } + return sqlgraph.DeleteNodes(ctx, atd.driver, _spec) +} + +// AuthTokensDeleteOne is the builder for deleting a single AuthTokens entity. +type AuthTokensDeleteOne struct { + atd *AuthTokensDelete +} + +// Exec executes the deletion query. +func (atdo *AuthTokensDeleteOne) Exec(ctx context.Context) error { + n, err := atdo.atd.Exec(ctx) + switch { + case err != nil: + return err + case n == 0: + return &NotFoundError{authtokens.Label} + default: + return nil + } +} + +// ExecX is like Exec, but panics if an error occurs. +func (atdo *AuthTokensDeleteOne) ExecX(ctx context.Context) { + atdo.atd.ExecX(ctx) +} diff --git a/backend/ent/authtokens_query.go b/backend/ent/authtokens_query.go new file mode 100644 index 0000000..9e309c8 --- /dev/null +++ b/backend/ent/authtokens_query.go @@ -0,0 +1,1000 @@ +// Code generated by entc, DO NOT EDIT. + +package ent + +import ( + "context" + "errors" + "fmt" + "math" + + "entgo.io/ent/dialect/sql" + "entgo.io/ent/dialect/sql/sqlgraph" + "entgo.io/ent/schema/field" + "github.com/google/uuid" + "github.com/hay-kot/git-web-template/backend/ent/authtokens" + "github.com/hay-kot/git-web-template/backend/ent/predicate" + "github.com/hay-kot/git-web-template/backend/ent/user" +) + +// AuthTokensQuery is the builder for querying AuthTokens entities. +type AuthTokensQuery struct { + config + limit *int + offset *int + unique *bool + order []OrderFunc + fields []string + predicates []predicate.AuthTokens + // eager-loading edges. + withUser *UserQuery + withFKs bool + // intermediate query (i.e. traversal path). + sql *sql.Selector + path func(context.Context) (*sql.Selector, error) +} + +// Where adds a new predicate for the AuthTokensQuery builder. +func (atq *AuthTokensQuery) Where(ps ...predicate.AuthTokens) *AuthTokensQuery { + atq.predicates = append(atq.predicates, ps...) + return atq +} + +// Limit adds a limit step to the query. +func (atq *AuthTokensQuery) Limit(limit int) *AuthTokensQuery { + atq.limit = &limit + return atq +} + +// Offset adds an offset step to the query. +func (atq *AuthTokensQuery) Offset(offset int) *AuthTokensQuery { + atq.offset = &offset + return atq +} + +// Unique configures the query builder to filter duplicate records on query. +// By default, unique is set to true, and can be disabled using this method. +func (atq *AuthTokensQuery) Unique(unique bool) *AuthTokensQuery { + atq.unique = &unique + return atq +} + +// Order adds an order step to the query. +func (atq *AuthTokensQuery) Order(o ...OrderFunc) *AuthTokensQuery { + atq.order = append(atq.order, o...) + return atq +} + +// QueryUser chains the current query on the "user" edge. +func (atq *AuthTokensQuery) QueryUser() *UserQuery { + query := &UserQuery{config: atq.config} + query.path = func(ctx context.Context) (fromU *sql.Selector, err error) { + if err := atq.prepareQuery(ctx); err != nil { + return nil, err + } + selector := atq.sqlQuery(ctx) + if err := selector.Err(); err != nil { + return nil, err + } + step := sqlgraph.NewStep( + sqlgraph.From(authtokens.Table, authtokens.FieldID, selector), + sqlgraph.To(user.Table, user.FieldID), + sqlgraph.Edge(sqlgraph.M2O, true, authtokens.UserTable, authtokens.UserColumn), + ) + fromU = sqlgraph.SetNeighbors(atq.driver.Dialect(), step) + return fromU, nil + } + return query +} + +// First returns the first AuthTokens entity from the query. +// Returns a *NotFoundError when no AuthTokens was found. +func (atq *AuthTokensQuery) First(ctx context.Context) (*AuthTokens, error) { + nodes, err := atq.Limit(1).All(ctx) + if err != nil { + return nil, err + } + if len(nodes) == 0 { + return nil, &NotFoundError{authtokens.Label} + } + return nodes[0], nil +} + +// FirstX is like First, but panics if an error occurs. +func (atq *AuthTokensQuery) FirstX(ctx context.Context) *AuthTokens { + node, err := atq.First(ctx) + if err != nil && !IsNotFound(err) { + panic(err) + } + return node +} + +// FirstID returns the first AuthTokens ID from the query. +// Returns a *NotFoundError when no AuthTokens ID was found. +func (atq *AuthTokensQuery) FirstID(ctx context.Context) (id int, err error) { + var ids []int + if ids, err = atq.Limit(1).IDs(ctx); err != nil { + return + } + if len(ids) == 0 { + err = &NotFoundError{authtokens.Label} + return + } + return ids[0], nil +} + +// FirstIDX is like FirstID, but panics if an error occurs. +func (atq *AuthTokensQuery) FirstIDX(ctx context.Context) int { + id, err := atq.FirstID(ctx) + if err != nil && !IsNotFound(err) { + panic(err) + } + return id +} + +// Only returns a single AuthTokens entity found by the query, ensuring it only returns one. +// Returns a *NotSingularError when exactly one AuthTokens entity is not found. +// Returns a *NotFoundError when no AuthTokens entities are found. +func (atq *AuthTokensQuery) Only(ctx context.Context) (*AuthTokens, error) { + nodes, err := atq.Limit(2).All(ctx) + if err != nil { + return nil, err + } + switch len(nodes) { + case 1: + return nodes[0], nil + case 0: + return nil, &NotFoundError{authtokens.Label} + default: + return nil, &NotSingularError{authtokens.Label} + } +} + +// OnlyX is like Only, but panics if an error occurs. +func (atq *AuthTokensQuery) OnlyX(ctx context.Context) *AuthTokens { + node, err := atq.Only(ctx) + if err != nil { + panic(err) + } + return node +} + +// OnlyID is like Only, but returns the only AuthTokens ID in the query. +// Returns a *NotSingularError when exactly one AuthTokens ID is not found. +// Returns a *NotFoundError when no entities are found. +func (atq *AuthTokensQuery) OnlyID(ctx context.Context) (id int, err error) { + var ids []int + if ids, err = atq.Limit(2).IDs(ctx); err != nil { + return + } + switch len(ids) { + case 1: + id = ids[0] + case 0: + err = &NotFoundError{authtokens.Label} + default: + err = &NotSingularError{authtokens.Label} + } + return +} + +// OnlyIDX is like OnlyID, but panics if an error occurs. +func (atq *AuthTokensQuery) OnlyIDX(ctx context.Context) int { + id, err := atq.OnlyID(ctx) + if err != nil { + panic(err) + } + return id +} + +// All executes the query and returns a list of AuthTokensSlice. +func (atq *AuthTokensQuery) All(ctx context.Context) ([]*AuthTokens, error) { + if err := atq.prepareQuery(ctx); err != nil { + return nil, err + } + return atq.sqlAll(ctx) +} + +// AllX is like All, but panics if an error occurs. +func (atq *AuthTokensQuery) AllX(ctx context.Context) []*AuthTokens { + nodes, err := atq.All(ctx) + if err != nil { + panic(err) + } + return nodes +} + +// IDs executes the query and returns a list of AuthTokens IDs. +func (atq *AuthTokensQuery) IDs(ctx context.Context) ([]int, error) { + var ids []int + if err := atq.Select(authtokens.FieldID).Scan(ctx, &ids); err != nil { + return nil, err + } + return ids, nil +} + +// IDsX is like IDs, but panics if an error occurs. +func (atq *AuthTokensQuery) IDsX(ctx context.Context) []int { + ids, err := atq.IDs(ctx) + if err != nil { + panic(err) + } + return ids +} + +// Count returns the count of the given query. +func (atq *AuthTokensQuery) Count(ctx context.Context) (int, error) { + if err := atq.prepareQuery(ctx); err != nil { + return 0, err + } + return atq.sqlCount(ctx) +} + +// CountX is like Count, but panics if an error occurs. +func (atq *AuthTokensQuery) CountX(ctx context.Context) int { + count, err := atq.Count(ctx) + if err != nil { + panic(err) + } + return count +} + +// Exist returns true if the query has elements in the graph. +func (atq *AuthTokensQuery) Exist(ctx context.Context) (bool, error) { + if err := atq.prepareQuery(ctx); err != nil { + return false, err + } + return atq.sqlExist(ctx) +} + +// ExistX is like Exist, but panics if an error occurs. +func (atq *AuthTokensQuery) ExistX(ctx context.Context) bool { + exist, err := atq.Exist(ctx) + if err != nil { + panic(err) + } + return exist +} + +// Clone returns a duplicate of the AuthTokensQuery builder, including all associated steps. It can be +// used to prepare common query builders and use them differently after the clone is made. +func (atq *AuthTokensQuery) Clone() *AuthTokensQuery { + if atq == nil { + return nil + } + return &AuthTokensQuery{ + config: atq.config, + limit: atq.limit, + offset: atq.offset, + order: append([]OrderFunc{}, atq.order...), + predicates: append([]predicate.AuthTokens{}, atq.predicates...), + withUser: atq.withUser.Clone(), + // clone intermediate query. + sql: atq.sql.Clone(), + path: atq.path, + } +} + +// WithUser tells the query-builder to eager-load the nodes that are connected to +// the "user" edge. The optional arguments are used to configure the query builder of the edge. +func (atq *AuthTokensQuery) WithUser(opts ...func(*UserQuery)) *AuthTokensQuery { + query := &UserQuery{config: atq.config} + for _, opt := range opts { + opt(query) + } + atq.withUser = query + return atq +} + +// GroupBy is used to group vertices by one or more fields/columns. +// It is often used with aggregate functions, like: count, max, mean, min, sum. +// +// Example: +// +// var v []struct { +// Token []byte `json:"token,omitempty"` +// Count int `json:"count,omitempty"` +// } +// +// client.AuthTokens.Query(). +// GroupBy(authtokens.FieldToken). +// Aggregate(ent.Count()). +// Scan(ctx, &v) +// +func (atq *AuthTokensQuery) GroupBy(field string, fields ...string) *AuthTokensGroupBy { + group := &AuthTokensGroupBy{config: atq.config} + group.fields = append([]string{field}, fields...) + group.path = func(ctx context.Context) (prev *sql.Selector, err error) { + if err := atq.prepareQuery(ctx); err != nil { + return nil, err + } + return atq.sqlQuery(ctx), nil + } + return group +} + +// Select allows the selection one or more fields/columns for the given query, +// instead of selecting all fields in the entity. +// +// Example: +// +// var v []struct { +// Token []byte `json:"token,omitempty"` +// } +// +// client.AuthTokens.Query(). +// Select(authtokens.FieldToken). +// Scan(ctx, &v) +// +func (atq *AuthTokensQuery) Select(fields ...string) *AuthTokensSelect { + atq.fields = append(atq.fields, fields...) + return &AuthTokensSelect{AuthTokensQuery: atq} +} + +func (atq *AuthTokensQuery) prepareQuery(ctx context.Context) error { + for _, f := range atq.fields { + if !authtokens.ValidColumn(f) { + return &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)} + } + } + if atq.path != nil { + prev, err := atq.path(ctx) + if err != nil { + return err + } + atq.sql = prev + } + return nil +} + +func (atq *AuthTokensQuery) sqlAll(ctx context.Context) ([]*AuthTokens, error) { + var ( + nodes = []*AuthTokens{} + withFKs = atq.withFKs + _spec = atq.querySpec() + loadedTypes = [1]bool{ + atq.withUser != nil, + } + ) + if atq.withUser != nil { + withFKs = true + } + if withFKs { + _spec.Node.Columns = append(_spec.Node.Columns, authtokens.ForeignKeys...) + } + _spec.ScanValues = func(columns []string) ([]interface{}, error) { + node := &AuthTokens{config: atq.config} + nodes = append(nodes, node) + return node.scanValues(columns) + } + _spec.Assign = func(columns []string, values []interface{}) error { + if len(nodes) == 0 { + return fmt.Errorf("ent: Assign called without calling ScanValues") + } + node := nodes[len(nodes)-1] + node.Edges.loadedTypes = loadedTypes + return node.assignValues(columns, values) + } + if err := sqlgraph.QueryNodes(ctx, atq.driver, _spec); err != nil { + return nil, err + } + if len(nodes) == 0 { + return nodes, nil + } + + if query := atq.withUser; query != nil { + ids := make([]uuid.UUID, 0, len(nodes)) + nodeids := make(map[uuid.UUID][]*AuthTokens) + for i := range nodes { + if nodes[i].user_auth_tokens == nil { + continue + } + fk := *nodes[i].user_auth_tokens + if _, ok := nodeids[fk]; !ok { + ids = append(ids, fk) + } + nodeids[fk] = append(nodeids[fk], nodes[i]) + } + query.Where(user.IDIn(ids...)) + neighbors, err := query.All(ctx) + if err != nil { + return nil, err + } + for _, n := range neighbors { + nodes, ok := nodeids[n.ID] + if !ok { + return nil, fmt.Errorf(`unexpected foreign-key "user_auth_tokens" returned %v`, n.ID) + } + for i := range nodes { + nodes[i].Edges.User = n + } + } + } + + return nodes, nil +} + +func (atq *AuthTokensQuery) sqlCount(ctx context.Context) (int, error) { + _spec := atq.querySpec() + _spec.Node.Columns = atq.fields + if len(atq.fields) > 0 { + _spec.Unique = atq.unique != nil && *atq.unique + } + return sqlgraph.CountNodes(ctx, atq.driver, _spec) +} + +func (atq *AuthTokensQuery) sqlExist(ctx context.Context) (bool, error) { + n, err := atq.sqlCount(ctx) + if err != nil { + return false, fmt.Errorf("ent: check existence: %w", err) + } + return n > 0, nil +} + +func (atq *AuthTokensQuery) querySpec() *sqlgraph.QuerySpec { + _spec := &sqlgraph.QuerySpec{ + Node: &sqlgraph.NodeSpec{ + Table: authtokens.Table, + Columns: authtokens.Columns, + ID: &sqlgraph.FieldSpec{ + Type: field.TypeInt, + Column: authtokens.FieldID, + }, + }, + From: atq.sql, + Unique: true, + } + if unique := atq.unique; unique != nil { + _spec.Unique = *unique + } + if fields := atq.fields; len(fields) > 0 { + _spec.Node.Columns = make([]string, 0, len(fields)) + _spec.Node.Columns = append(_spec.Node.Columns, authtokens.FieldID) + for i := range fields { + if fields[i] != authtokens.FieldID { + _spec.Node.Columns = append(_spec.Node.Columns, fields[i]) + } + } + } + if ps := atq.predicates; len(ps) > 0 { + _spec.Predicate = func(selector *sql.Selector) { + for i := range ps { + ps[i](selector) + } + } + } + if limit := atq.limit; limit != nil { + _spec.Limit = *limit + } + if offset := atq.offset; offset != nil { + _spec.Offset = *offset + } + if ps := atq.order; len(ps) > 0 { + _spec.Order = func(selector *sql.Selector) { + for i := range ps { + ps[i](selector) + } + } + } + return _spec +} + +func (atq *AuthTokensQuery) sqlQuery(ctx context.Context) *sql.Selector { + builder := sql.Dialect(atq.driver.Dialect()) + t1 := builder.Table(authtokens.Table) + columns := atq.fields + if len(columns) == 0 { + columns = authtokens.Columns + } + selector := builder.Select(t1.Columns(columns...)...).From(t1) + if atq.sql != nil { + selector = atq.sql + selector.Select(selector.Columns(columns...)...) + } + if atq.unique != nil && *atq.unique { + selector.Distinct() + } + for _, p := range atq.predicates { + p(selector) + } + for _, p := range atq.order { + p(selector) + } + if offset := atq.offset; offset != nil { + // limit is mandatory for offset clause. We start + // with default value, and override it below if needed. + selector.Offset(*offset).Limit(math.MaxInt32) + } + if limit := atq.limit; limit != nil { + selector.Limit(*limit) + } + return selector +} + +// AuthTokensGroupBy is the group-by builder for AuthTokens entities. +type AuthTokensGroupBy struct { + config + fields []string + fns []AggregateFunc + // intermediate query (i.e. traversal path). + sql *sql.Selector + path func(context.Context) (*sql.Selector, error) +} + +// Aggregate adds the given aggregation functions to the group-by query. +func (atgb *AuthTokensGroupBy) Aggregate(fns ...AggregateFunc) *AuthTokensGroupBy { + atgb.fns = append(atgb.fns, fns...) + return atgb +} + +// Scan applies the group-by query and scans the result into the given value. +func (atgb *AuthTokensGroupBy) Scan(ctx context.Context, v interface{}) error { + query, err := atgb.path(ctx) + if err != nil { + return err + } + atgb.sql = query + return atgb.sqlScan(ctx, v) +} + +// ScanX is like Scan, but panics if an error occurs. +func (atgb *AuthTokensGroupBy) ScanX(ctx context.Context, v interface{}) { + if err := atgb.Scan(ctx, v); err != nil { + panic(err) + } +} + +// Strings returns list of strings from group-by. +// It is only allowed when executing a group-by query with one field. +func (atgb *AuthTokensGroupBy) Strings(ctx context.Context) ([]string, error) { + if len(atgb.fields) > 1 { + return nil, errors.New("ent: AuthTokensGroupBy.Strings is not achievable when grouping more than 1 field") + } + var v []string + if err := atgb.Scan(ctx, &v); err != nil { + return nil, err + } + return v, nil +} + +// StringsX is like Strings, but panics if an error occurs. +func (atgb *AuthTokensGroupBy) StringsX(ctx context.Context) []string { + v, err := atgb.Strings(ctx) + if err != nil { + panic(err) + } + return v +} + +// String returns a single string from a group-by query. +// It is only allowed when executing a group-by query with one field. +func (atgb *AuthTokensGroupBy) String(ctx context.Context) (_ string, err error) { + var v []string + if v, err = atgb.Strings(ctx); err != nil { + return + } + switch len(v) { + case 1: + return v[0], nil + case 0: + err = &NotFoundError{authtokens.Label} + default: + err = fmt.Errorf("ent: AuthTokensGroupBy.Strings returned %d results when one was expected", len(v)) + } + return +} + +// StringX is like String, but panics if an error occurs. +func (atgb *AuthTokensGroupBy) StringX(ctx context.Context) string { + v, err := atgb.String(ctx) + if err != nil { + panic(err) + } + return v +} + +// Ints returns list of ints from group-by. +// It is only allowed when executing a group-by query with one field. +func (atgb *AuthTokensGroupBy) Ints(ctx context.Context) ([]int, error) { + if len(atgb.fields) > 1 { + return nil, errors.New("ent: AuthTokensGroupBy.Ints is not achievable when grouping more than 1 field") + } + var v []int + if err := atgb.Scan(ctx, &v); err != nil { + return nil, err + } + return v, nil +} + +// IntsX is like Ints, but panics if an error occurs. +func (atgb *AuthTokensGroupBy) IntsX(ctx context.Context) []int { + v, err := atgb.Ints(ctx) + if err != nil { + panic(err) + } + return v +} + +// Int returns a single int from a group-by query. +// It is only allowed when executing a group-by query with one field. +func (atgb *AuthTokensGroupBy) Int(ctx context.Context) (_ int, err error) { + var v []int + if v, err = atgb.Ints(ctx); err != nil { + return + } + switch len(v) { + case 1: + return v[0], nil + case 0: + err = &NotFoundError{authtokens.Label} + default: + err = fmt.Errorf("ent: AuthTokensGroupBy.Ints returned %d results when one was expected", len(v)) + } + return +} + +// IntX is like Int, but panics if an error occurs. +func (atgb *AuthTokensGroupBy) IntX(ctx context.Context) int { + v, err := atgb.Int(ctx) + if err != nil { + panic(err) + } + return v +} + +// Float64s returns list of float64s from group-by. +// It is only allowed when executing a group-by query with one field. +func (atgb *AuthTokensGroupBy) Float64s(ctx context.Context) ([]float64, error) { + if len(atgb.fields) > 1 { + return nil, errors.New("ent: AuthTokensGroupBy.Float64s is not achievable when grouping more than 1 field") + } + var v []float64 + if err := atgb.Scan(ctx, &v); err != nil { + return nil, err + } + return v, nil +} + +// Float64sX is like Float64s, but panics if an error occurs. +func (atgb *AuthTokensGroupBy) Float64sX(ctx context.Context) []float64 { + v, err := atgb.Float64s(ctx) + if err != nil { + panic(err) + } + return v +} + +// Float64 returns a single float64 from a group-by query. +// It is only allowed when executing a group-by query with one field. +func (atgb *AuthTokensGroupBy) Float64(ctx context.Context) (_ float64, err error) { + var v []float64 + if v, err = atgb.Float64s(ctx); err != nil { + return + } + switch len(v) { + case 1: + return v[0], nil + case 0: + err = &NotFoundError{authtokens.Label} + default: + err = fmt.Errorf("ent: AuthTokensGroupBy.Float64s returned %d results when one was expected", len(v)) + } + return +} + +// Float64X is like Float64, but panics if an error occurs. +func (atgb *AuthTokensGroupBy) Float64X(ctx context.Context) float64 { + v, err := atgb.Float64(ctx) + if err != nil { + panic(err) + } + return v +} + +// Bools returns list of bools from group-by. +// It is only allowed when executing a group-by query with one field. +func (atgb *AuthTokensGroupBy) Bools(ctx context.Context) ([]bool, error) { + if len(atgb.fields) > 1 { + return nil, errors.New("ent: AuthTokensGroupBy.Bools is not achievable when grouping more than 1 field") + } + var v []bool + if err := atgb.Scan(ctx, &v); err != nil { + return nil, err + } + return v, nil +} + +// BoolsX is like Bools, but panics if an error occurs. +func (atgb *AuthTokensGroupBy) BoolsX(ctx context.Context) []bool { + v, err := atgb.Bools(ctx) + if err != nil { + panic(err) + } + return v +} + +// Bool returns a single bool from a group-by query. +// It is only allowed when executing a group-by query with one field. +func (atgb *AuthTokensGroupBy) Bool(ctx context.Context) (_ bool, err error) { + var v []bool + if v, err = atgb.Bools(ctx); err != nil { + return + } + switch len(v) { + case 1: + return v[0], nil + case 0: + err = &NotFoundError{authtokens.Label} + default: + err = fmt.Errorf("ent: AuthTokensGroupBy.Bools returned %d results when one was expected", len(v)) + } + return +} + +// BoolX is like Bool, but panics if an error occurs. +func (atgb *AuthTokensGroupBy) BoolX(ctx context.Context) bool { + v, err := atgb.Bool(ctx) + if err != nil { + panic(err) + } + return v +} + +func (atgb *AuthTokensGroupBy) sqlScan(ctx context.Context, v interface{}) error { + for _, f := range atgb.fields { + if !authtokens.ValidColumn(f) { + return &ValidationError{Name: f, err: fmt.Errorf("invalid field %q for group-by", f)} + } + } + selector := atgb.sqlQuery() + if err := selector.Err(); err != nil { + return err + } + rows := &sql.Rows{} + query, args := selector.Query() + if err := atgb.driver.Query(ctx, query, args, rows); err != nil { + return err + } + defer rows.Close() + return sql.ScanSlice(rows, v) +} + +func (atgb *AuthTokensGroupBy) sqlQuery() *sql.Selector { + selector := atgb.sql.Select() + aggregation := make([]string, 0, len(atgb.fns)) + for _, fn := range atgb.fns { + aggregation = append(aggregation, fn(selector)) + } + // If no columns were selected in a custom aggregation function, the default + // selection is the fields used for "group-by", and the aggregation functions. + if len(selector.SelectedColumns()) == 0 { + columns := make([]string, 0, len(atgb.fields)+len(atgb.fns)) + for _, f := range atgb.fields { + columns = append(columns, selector.C(f)) + } + columns = append(columns, aggregation...) + selector.Select(columns...) + } + return selector.GroupBy(selector.Columns(atgb.fields...)...) +} + +// AuthTokensSelect is the builder for selecting fields of AuthTokens entities. +type AuthTokensSelect struct { + *AuthTokensQuery + // intermediate query (i.e. traversal path). + sql *sql.Selector +} + +// Scan applies the selector query and scans the result into the given value. +func (ats *AuthTokensSelect) Scan(ctx context.Context, v interface{}) error { + if err := ats.prepareQuery(ctx); err != nil { + return err + } + ats.sql = ats.AuthTokensQuery.sqlQuery(ctx) + return ats.sqlScan(ctx, v) +} + +// ScanX is like Scan, but panics if an error occurs. +func (ats *AuthTokensSelect) ScanX(ctx context.Context, v interface{}) { + if err := ats.Scan(ctx, v); err != nil { + panic(err) + } +} + +// Strings returns list of strings from a selector. It is only allowed when selecting one field. +func (ats *AuthTokensSelect) Strings(ctx context.Context) ([]string, error) { + if len(ats.fields) > 1 { + return nil, errors.New("ent: AuthTokensSelect.Strings is not achievable when selecting more than 1 field") + } + var v []string + if err := ats.Scan(ctx, &v); err != nil { + return nil, err + } + return v, nil +} + +// StringsX is like Strings, but panics if an error occurs. +func (ats *AuthTokensSelect) StringsX(ctx context.Context) []string { + v, err := ats.Strings(ctx) + if err != nil { + panic(err) + } + return v +} + +// String returns a single string from a selector. It is only allowed when selecting one field. +func (ats *AuthTokensSelect) String(ctx context.Context) (_ string, err error) { + var v []string + if v, err = ats.Strings(ctx); err != nil { + return + } + switch len(v) { + case 1: + return v[0], nil + case 0: + err = &NotFoundError{authtokens.Label} + default: + err = fmt.Errorf("ent: AuthTokensSelect.Strings returned %d results when one was expected", len(v)) + } + return +} + +// StringX is like String, but panics if an error occurs. +func (ats *AuthTokensSelect) StringX(ctx context.Context) string { + v, err := ats.String(ctx) + if err != nil { + panic(err) + } + return v +} + +// Ints returns list of ints from a selector. It is only allowed when selecting one field. +func (ats *AuthTokensSelect) Ints(ctx context.Context) ([]int, error) { + if len(ats.fields) > 1 { + return nil, errors.New("ent: AuthTokensSelect.Ints is not achievable when selecting more than 1 field") + } + var v []int + if err := ats.Scan(ctx, &v); err != nil { + return nil, err + } + return v, nil +} + +// IntsX is like Ints, but panics if an error occurs. +func (ats *AuthTokensSelect) IntsX(ctx context.Context) []int { + v, err := ats.Ints(ctx) + if err != nil { + panic(err) + } + return v +} + +// Int returns a single int from a selector. It is only allowed when selecting one field. +func (ats *AuthTokensSelect) Int(ctx context.Context) (_ int, err error) { + var v []int + if v, err = ats.Ints(ctx); err != nil { + return + } + switch len(v) { + case 1: + return v[0], nil + case 0: + err = &NotFoundError{authtokens.Label} + default: + err = fmt.Errorf("ent: AuthTokensSelect.Ints returned %d results when one was expected", len(v)) + } + return +} + +// IntX is like Int, but panics if an error occurs. +func (ats *AuthTokensSelect) IntX(ctx context.Context) int { + v, err := ats.Int(ctx) + if err != nil { + panic(err) + } + return v +} + +// Float64s returns list of float64s from a selector. It is only allowed when selecting one field. +func (ats *AuthTokensSelect) Float64s(ctx context.Context) ([]float64, error) { + if len(ats.fields) > 1 { + return nil, errors.New("ent: AuthTokensSelect.Float64s is not achievable when selecting more than 1 field") + } + var v []float64 + if err := ats.Scan(ctx, &v); err != nil { + return nil, err + } + return v, nil +} + +// Float64sX is like Float64s, but panics if an error occurs. +func (ats *AuthTokensSelect) Float64sX(ctx context.Context) []float64 { + v, err := ats.Float64s(ctx) + if err != nil { + panic(err) + } + return v +} + +// Float64 returns a single float64 from a selector. It is only allowed when selecting one field. +func (ats *AuthTokensSelect) Float64(ctx context.Context) (_ float64, err error) { + var v []float64 + if v, err = ats.Float64s(ctx); err != nil { + return + } + switch len(v) { + case 1: + return v[0], nil + case 0: + err = &NotFoundError{authtokens.Label} + default: + err = fmt.Errorf("ent: AuthTokensSelect.Float64s returned %d results when one was expected", len(v)) + } + return +} + +// Float64X is like Float64, but panics if an error occurs. +func (ats *AuthTokensSelect) Float64X(ctx context.Context) float64 { + v, err := ats.Float64(ctx) + if err != nil { + panic(err) + } + return v +} + +// Bools returns list of bools from a selector. It is only allowed when selecting one field. +func (ats *AuthTokensSelect) Bools(ctx context.Context) ([]bool, error) { + if len(ats.fields) > 1 { + return nil, errors.New("ent: AuthTokensSelect.Bools is not achievable when selecting more than 1 field") + } + var v []bool + if err := ats.Scan(ctx, &v); err != nil { + return nil, err + } + return v, nil +} + +// BoolsX is like Bools, but panics if an error occurs. +func (ats *AuthTokensSelect) BoolsX(ctx context.Context) []bool { + v, err := ats.Bools(ctx) + if err != nil { + panic(err) + } + return v +} + +// Bool returns a single bool from a selector. It is only allowed when selecting one field. +func (ats *AuthTokensSelect) Bool(ctx context.Context) (_ bool, err error) { + var v []bool + if v, err = ats.Bools(ctx); err != nil { + return + } + switch len(v) { + case 1: + return v[0], nil + case 0: + err = &NotFoundError{authtokens.Label} + default: + err = fmt.Errorf("ent: AuthTokensSelect.Bools returned %d results when one was expected", len(v)) + } + return +} + +// BoolX is like Bool, but panics if an error occurs. +func (ats *AuthTokensSelect) BoolX(ctx context.Context) bool { + v, err := ats.Bool(ctx) + if err != nil { + panic(err) + } + return v +} + +func (ats *AuthTokensSelect) sqlScan(ctx context.Context, v interface{}) error { + rows := &sql.Rows{} + query, args := ats.sql.Query() + if err := ats.driver.Query(ctx, query, args, rows); err != nil { + return err + } + defer rows.Close() + return sql.ScanSlice(rows, v) +} diff --git a/backend/ent/authtokens_update.go b/backend/ent/authtokens_update.go new file mode 100644 index 0000000..243db3f --- /dev/null +++ b/backend/ent/authtokens_update.go @@ -0,0 +1,472 @@ +// Code generated by entc, DO NOT EDIT. + +package ent + +import ( + "context" + "errors" + "fmt" + "time" + + "entgo.io/ent/dialect/sql" + "entgo.io/ent/dialect/sql/sqlgraph" + "entgo.io/ent/schema/field" + "github.com/google/uuid" + "github.com/hay-kot/git-web-template/backend/ent/authtokens" + "github.com/hay-kot/git-web-template/backend/ent/predicate" + "github.com/hay-kot/git-web-template/backend/ent/user" +) + +// AuthTokensUpdate is the builder for updating AuthTokens entities. +type AuthTokensUpdate struct { + config + hooks []Hook + mutation *AuthTokensMutation +} + +// Where appends a list predicates to the AuthTokensUpdate builder. +func (atu *AuthTokensUpdate) Where(ps ...predicate.AuthTokens) *AuthTokensUpdate { + atu.mutation.Where(ps...) + return atu +} + +// SetToken sets the "token" field. +func (atu *AuthTokensUpdate) SetToken(b []byte) *AuthTokensUpdate { + atu.mutation.SetToken(b) + return atu +} + +// SetExpiresAt sets the "expires_at" field. +func (atu *AuthTokensUpdate) SetExpiresAt(t time.Time) *AuthTokensUpdate { + atu.mutation.SetExpiresAt(t) + return atu +} + +// SetNillableExpiresAt sets the "expires_at" field if the given value is not nil. +func (atu *AuthTokensUpdate) SetNillableExpiresAt(t *time.Time) *AuthTokensUpdate { + if t != nil { + atu.SetExpiresAt(*t) + } + return atu +} + +// SetCreatedAt sets the "created_at" field. +func (atu *AuthTokensUpdate) SetCreatedAt(t time.Time) *AuthTokensUpdate { + atu.mutation.SetCreatedAt(t) + return atu +} + +// SetNillableCreatedAt sets the "created_at" field if the given value is not nil. +func (atu *AuthTokensUpdate) SetNillableCreatedAt(t *time.Time) *AuthTokensUpdate { + if t != nil { + atu.SetCreatedAt(*t) + } + return atu +} + +// SetUserID sets the "user" edge to the User entity by ID. +func (atu *AuthTokensUpdate) SetUserID(id uuid.UUID) *AuthTokensUpdate { + atu.mutation.SetUserID(id) + return atu +} + +// SetNillableUserID sets the "user" edge to the User entity by ID if the given value is not nil. +func (atu *AuthTokensUpdate) SetNillableUserID(id *uuid.UUID) *AuthTokensUpdate { + if id != nil { + atu = atu.SetUserID(*id) + } + return atu +} + +// SetUser sets the "user" edge to the User entity. +func (atu *AuthTokensUpdate) SetUser(u *User) *AuthTokensUpdate { + return atu.SetUserID(u.ID) +} + +// Mutation returns the AuthTokensMutation object of the builder. +func (atu *AuthTokensUpdate) Mutation() *AuthTokensMutation { + return atu.mutation +} + +// ClearUser clears the "user" edge to the User entity. +func (atu *AuthTokensUpdate) ClearUser() *AuthTokensUpdate { + atu.mutation.ClearUser() + return atu +} + +// Save executes the query and returns the number of nodes affected by the update operation. +func (atu *AuthTokensUpdate) Save(ctx context.Context) (int, error) { + var ( + err error + affected int + ) + if len(atu.hooks) == 0 { + affected, err = atu.sqlSave(ctx) + } else { + var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) { + mutation, ok := m.(*AuthTokensMutation) + if !ok { + return nil, fmt.Errorf("unexpected mutation type %T", m) + } + atu.mutation = mutation + affected, err = atu.sqlSave(ctx) + mutation.done = true + return affected, err + }) + for i := len(atu.hooks) - 1; i >= 0; i-- { + if atu.hooks[i] == nil { + return 0, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)") + } + mut = atu.hooks[i](mut) + } + if _, err := mut.Mutate(ctx, atu.mutation); err != nil { + return 0, err + } + } + return affected, err +} + +// SaveX is like Save, but panics if an error occurs. +func (atu *AuthTokensUpdate) SaveX(ctx context.Context) int { + affected, err := atu.Save(ctx) + if err != nil { + panic(err) + } + return affected +} + +// Exec executes the query. +func (atu *AuthTokensUpdate) Exec(ctx context.Context) error { + _, err := atu.Save(ctx) + return err +} + +// ExecX is like Exec, but panics if an error occurs. +func (atu *AuthTokensUpdate) ExecX(ctx context.Context) { + if err := atu.Exec(ctx); err != nil { + panic(err) + } +} + +func (atu *AuthTokensUpdate) sqlSave(ctx context.Context) (n int, err error) { + _spec := &sqlgraph.UpdateSpec{ + Node: &sqlgraph.NodeSpec{ + Table: authtokens.Table, + Columns: authtokens.Columns, + ID: &sqlgraph.FieldSpec{ + Type: field.TypeInt, + Column: authtokens.FieldID, + }, + }, + } + if ps := atu.mutation.predicates; len(ps) > 0 { + _spec.Predicate = func(selector *sql.Selector) { + for i := range ps { + ps[i](selector) + } + } + } + if value, ok := atu.mutation.Token(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeBytes, + Value: value, + Column: authtokens.FieldToken, + }) + } + if value, ok := atu.mutation.ExpiresAt(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeTime, + Value: value, + Column: authtokens.FieldExpiresAt, + }) + } + if value, ok := atu.mutation.CreatedAt(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeTime, + Value: value, + Column: authtokens.FieldCreatedAt, + }) + } + if atu.mutation.UserCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2O, + Inverse: true, + Table: authtokens.UserTable, + Columns: []string{authtokens.UserColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: user.FieldID, + }, + }, + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := atu.mutation.UserIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2O, + Inverse: true, + Table: authtokens.UserTable, + Columns: []string{authtokens.UserColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: user.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Add = append(_spec.Edges.Add, edge) + } + if n, err = sqlgraph.UpdateNodes(ctx, atu.driver, _spec); err != nil { + if _, ok := err.(*sqlgraph.NotFoundError); ok { + err = &NotFoundError{authtokens.Label} + } else if sqlgraph.IsConstraintError(err) { + err = &ConstraintError{err.Error(), err} + } + return 0, err + } + return n, nil +} + +// AuthTokensUpdateOne is the builder for updating a single AuthTokens entity. +type AuthTokensUpdateOne struct { + config + fields []string + hooks []Hook + mutation *AuthTokensMutation +} + +// SetToken sets the "token" field. +func (atuo *AuthTokensUpdateOne) SetToken(b []byte) *AuthTokensUpdateOne { + atuo.mutation.SetToken(b) + return atuo +} + +// SetExpiresAt sets the "expires_at" field. +func (atuo *AuthTokensUpdateOne) SetExpiresAt(t time.Time) *AuthTokensUpdateOne { + atuo.mutation.SetExpiresAt(t) + return atuo +} + +// SetNillableExpiresAt sets the "expires_at" field if the given value is not nil. +func (atuo *AuthTokensUpdateOne) SetNillableExpiresAt(t *time.Time) *AuthTokensUpdateOne { + if t != nil { + atuo.SetExpiresAt(*t) + } + return atuo +} + +// SetCreatedAt sets the "created_at" field. +func (atuo *AuthTokensUpdateOne) SetCreatedAt(t time.Time) *AuthTokensUpdateOne { + atuo.mutation.SetCreatedAt(t) + return atuo +} + +// SetNillableCreatedAt sets the "created_at" field if the given value is not nil. +func (atuo *AuthTokensUpdateOne) SetNillableCreatedAt(t *time.Time) *AuthTokensUpdateOne { + if t != nil { + atuo.SetCreatedAt(*t) + } + return atuo +} + +// SetUserID sets the "user" edge to the User entity by ID. +func (atuo *AuthTokensUpdateOne) SetUserID(id uuid.UUID) *AuthTokensUpdateOne { + atuo.mutation.SetUserID(id) + return atuo +} + +// SetNillableUserID sets the "user" edge to the User entity by ID if the given value is not nil. +func (atuo *AuthTokensUpdateOne) SetNillableUserID(id *uuid.UUID) *AuthTokensUpdateOne { + if id != nil { + atuo = atuo.SetUserID(*id) + } + return atuo +} + +// SetUser sets the "user" edge to the User entity. +func (atuo *AuthTokensUpdateOne) SetUser(u *User) *AuthTokensUpdateOne { + return atuo.SetUserID(u.ID) +} + +// Mutation returns the AuthTokensMutation object of the builder. +func (atuo *AuthTokensUpdateOne) Mutation() *AuthTokensMutation { + return atuo.mutation +} + +// ClearUser clears the "user" edge to the User entity. +func (atuo *AuthTokensUpdateOne) ClearUser() *AuthTokensUpdateOne { + atuo.mutation.ClearUser() + return atuo +} + +// Select allows selecting one or more fields (columns) of the returned entity. +// The default is selecting all fields defined in the entity schema. +func (atuo *AuthTokensUpdateOne) Select(field string, fields ...string) *AuthTokensUpdateOne { + atuo.fields = append([]string{field}, fields...) + return atuo +} + +// Save executes the query and returns the updated AuthTokens entity. +func (atuo *AuthTokensUpdateOne) Save(ctx context.Context) (*AuthTokens, error) { + var ( + err error + node *AuthTokens + ) + if len(atuo.hooks) == 0 { + node, err = atuo.sqlSave(ctx) + } else { + var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) { + mutation, ok := m.(*AuthTokensMutation) + if !ok { + return nil, fmt.Errorf("unexpected mutation type %T", m) + } + atuo.mutation = mutation + node, err = atuo.sqlSave(ctx) + mutation.done = true + return node, err + }) + for i := len(atuo.hooks) - 1; i >= 0; i-- { + if atuo.hooks[i] == nil { + return nil, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)") + } + mut = atuo.hooks[i](mut) + } + if _, err := mut.Mutate(ctx, atuo.mutation); err != nil { + return nil, err + } + } + return node, err +} + +// SaveX is like Save, but panics if an error occurs. +func (atuo *AuthTokensUpdateOne) SaveX(ctx context.Context) *AuthTokens { + node, err := atuo.Save(ctx) + if err != nil { + panic(err) + } + return node +} + +// Exec executes the query on the entity. +func (atuo *AuthTokensUpdateOne) Exec(ctx context.Context) error { + _, err := atuo.Save(ctx) + return err +} + +// ExecX is like Exec, but panics if an error occurs. +func (atuo *AuthTokensUpdateOne) ExecX(ctx context.Context) { + if err := atuo.Exec(ctx); err != nil { + panic(err) + } +} + +func (atuo *AuthTokensUpdateOne) sqlSave(ctx context.Context) (_node *AuthTokens, err error) { + _spec := &sqlgraph.UpdateSpec{ + Node: &sqlgraph.NodeSpec{ + Table: authtokens.Table, + Columns: authtokens.Columns, + ID: &sqlgraph.FieldSpec{ + Type: field.TypeInt, + Column: authtokens.FieldID, + }, + }, + } + id, ok := atuo.mutation.ID() + if !ok { + return nil, &ValidationError{Name: "id", err: errors.New(`ent: missing "AuthTokens.id" for update`)} + } + _spec.Node.ID.Value = id + if fields := atuo.fields; len(fields) > 0 { + _spec.Node.Columns = make([]string, 0, len(fields)) + _spec.Node.Columns = append(_spec.Node.Columns, authtokens.FieldID) + for _, f := range fields { + if !authtokens.ValidColumn(f) { + return nil, &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)} + } + if f != authtokens.FieldID { + _spec.Node.Columns = append(_spec.Node.Columns, f) + } + } + } + if ps := atuo.mutation.predicates; len(ps) > 0 { + _spec.Predicate = func(selector *sql.Selector) { + for i := range ps { + ps[i](selector) + } + } + } + if value, ok := atuo.mutation.Token(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeBytes, + Value: value, + Column: authtokens.FieldToken, + }) + } + if value, ok := atuo.mutation.ExpiresAt(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeTime, + Value: value, + Column: authtokens.FieldExpiresAt, + }) + } + if value, ok := atuo.mutation.CreatedAt(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeTime, + Value: value, + Column: authtokens.FieldCreatedAt, + }) + } + if atuo.mutation.UserCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2O, + Inverse: true, + Table: authtokens.UserTable, + Columns: []string{authtokens.UserColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: user.FieldID, + }, + }, + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := atuo.mutation.UserIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2O, + Inverse: true, + Table: authtokens.UserTable, + Columns: []string{authtokens.UserColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: user.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Add = append(_spec.Edges.Add, edge) + } + _node = &AuthTokens{config: atuo.config} + _spec.Assign = _node.assignValues + _spec.ScanValues = _node.scanValues + if err = sqlgraph.UpdateNode(ctx, atuo.driver, _spec); err != nil { + if _, ok := err.(*sqlgraph.NotFoundError); ok { + err = &NotFoundError{authtokens.Label} + } else if sqlgraph.IsConstraintError(err) { + err = &ConstraintError{err.Error(), err} + } + return nil, err + } + return _node, nil +} diff --git a/backend/ent/client.go b/backend/ent/client.go new file mode 100644 index 0000000..c6cf533 --- /dev/null +++ b/backend/ent/client.go @@ -0,0 +1,344 @@ +// Code generated by entc, DO NOT EDIT. + +package ent + +import ( + "context" + "fmt" + "log" + + "github.com/google/uuid" + "github.com/hay-kot/git-web-template/backend/ent/migrate" + + "github.com/hay-kot/git-web-template/backend/ent/authtokens" + "github.com/hay-kot/git-web-template/backend/ent/user" + + "entgo.io/ent/dialect" + "entgo.io/ent/dialect/sql" + "entgo.io/ent/dialect/sql/sqlgraph" +) + +// Client is the client that holds all ent builders. +type Client struct { + config + // Schema is the client for creating, migrating and dropping schema. + Schema *migrate.Schema + // AuthTokens is the client for interacting with the AuthTokens builders. + AuthTokens *AuthTokensClient + // User is the client for interacting with the User builders. + User *UserClient +} + +// NewClient creates a new client configured with the given options. +func NewClient(opts ...Option) *Client { + cfg := config{log: log.Println, hooks: &hooks{}} + cfg.options(opts...) + client := &Client{config: cfg} + client.init() + return client +} + +func (c *Client) init() { + c.Schema = migrate.NewSchema(c.driver) + c.AuthTokens = NewAuthTokensClient(c.config) + c.User = NewUserClient(c.config) +} + +// Open opens a database/sql.DB specified by the driver name and +// the data source name, and returns a new client attached to it. +// Optional parameters can be added for configuring the client. +func Open(driverName, dataSourceName string, options ...Option) (*Client, error) { + switch driverName { + case dialect.MySQL, dialect.Postgres, dialect.SQLite: + drv, err := sql.Open(driverName, dataSourceName) + if err != nil { + return nil, err + } + return NewClient(append(options, Driver(drv))...), nil + default: + return nil, fmt.Errorf("unsupported driver: %q", driverName) + } +} + +// Tx returns a new transactional client. The provided context +// is used until the transaction is committed or rolled back. +func (c *Client) Tx(ctx context.Context) (*Tx, error) { + if _, ok := c.driver.(*txDriver); ok { + return nil, fmt.Errorf("ent: cannot start a transaction within a transaction") + } + tx, err := newTx(ctx, c.driver) + if err != nil { + return nil, fmt.Errorf("ent: starting a transaction: %w", err) + } + cfg := c.config + cfg.driver = tx + return &Tx{ + ctx: ctx, + config: cfg, + AuthTokens: NewAuthTokensClient(cfg), + User: NewUserClient(cfg), + }, nil +} + +// BeginTx returns a transactional client with specified options. +func (c *Client) BeginTx(ctx context.Context, opts *sql.TxOptions) (*Tx, error) { + if _, ok := c.driver.(*txDriver); ok { + return nil, fmt.Errorf("ent: cannot start a transaction within a transaction") + } + tx, err := c.driver.(interface { + BeginTx(context.Context, *sql.TxOptions) (dialect.Tx, error) + }).BeginTx(ctx, opts) + if err != nil { + return nil, fmt.Errorf("ent: starting a transaction: %w", err) + } + cfg := c.config + cfg.driver = &txDriver{tx: tx, drv: c.driver} + return &Tx{ + ctx: ctx, + config: cfg, + AuthTokens: NewAuthTokensClient(cfg), + User: NewUserClient(cfg), + }, nil +} + +// Debug returns a new debug-client. It's used to get verbose logging on specific operations. +// +// client.Debug(). +// AuthTokens. +// Query(). +// Count(ctx) +// +func (c *Client) Debug() *Client { + if c.debug { + return c + } + cfg := c.config + cfg.driver = dialect.Debug(c.driver, c.log) + client := &Client{config: cfg} + client.init() + return client +} + +// Close closes the database connection and prevents new queries from starting. +func (c *Client) Close() error { + return c.driver.Close() +} + +// Use adds the mutation hooks to all the entity clients. +// In order to add hooks to a specific client, call: `client.Node.Use(...)`. +func (c *Client) Use(hooks ...Hook) { + c.AuthTokens.Use(hooks...) + c.User.Use(hooks...) +} + +// AuthTokensClient is a client for the AuthTokens schema. +type AuthTokensClient struct { + config +} + +// NewAuthTokensClient returns a client for the AuthTokens from the given config. +func NewAuthTokensClient(c config) *AuthTokensClient { + return &AuthTokensClient{config: c} +} + +// Use adds a list of mutation hooks to the hooks stack. +// A call to `Use(f, g, h)` equals to `authtokens.Hooks(f(g(h())))`. +func (c *AuthTokensClient) Use(hooks ...Hook) { + c.hooks.AuthTokens = append(c.hooks.AuthTokens, hooks...) +} + +// Create returns a create builder for AuthTokens. +func (c *AuthTokensClient) Create() *AuthTokensCreate { + mutation := newAuthTokensMutation(c.config, OpCreate) + return &AuthTokensCreate{config: c.config, hooks: c.Hooks(), mutation: mutation} +} + +// CreateBulk returns a builder for creating a bulk of AuthTokens entities. +func (c *AuthTokensClient) CreateBulk(builders ...*AuthTokensCreate) *AuthTokensCreateBulk { + return &AuthTokensCreateBulk{config: c.config, builders: builders} +} + +// Update returns an update builder for AuthTokens. +func (c *AuthTokensClient) Update() *AuthTokensUpdate { + mutation := newAuthTokensMutation(c.config, OpUpdate) + return &AuthTokensUpdate{config: c.config, hooks: c.Hooks(), mutation: mutation} +} + +// UpdateOne returns an update builder for the given entity. +func (c *AuthTokensClient) UpdateOne(at *AuthTokens) *AuthTokensUpdateOne { + mutation := newAuthTokensMutation(c.config, OpUpdateOne, withAuthTokens(at)) + return &AuthTokensUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation} +} + +// UpdateOneID returns an update builder for the given id. +func (c *AuthTokensClient) UpdateOneID(id int) *AuthTokensUpdateOne { + mutation := newAuthTokensMutation(c.config, OpUpdateOne, withAuthTokensID(id)) + return &AuthTokensUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation} +} + +// Delete returns a delete builder for AuthTokens. +func (c *AuthTokensClient) Delete() *AuthTokensDelete { + mutation := newAuthTokensMutation(c.config, OpDelete) + return &AuthTokensDelete{config: c.config, hooks: c.Hooks(), mutation: mutation} +} + +// DeleteOne returns a delete builder for the given entity. +func (c *AuthTokensClient) DeleteOne(at *AuthTokens) *AuthTokensDeleteOne { + return c.DeleteOneID(at.ID) +} + +// DeleteOneID returns a delete builder for the given id. +func (c *AuthTokensClient) DeleteOneID(id int) *AuthTokensDeleteOne { + builder := c.Delete().Where(authtokens.ID(id)) + builder.mutation.id = &id + builder.mutation.op = OpDeleteOne + return &AuthTokensDeleteOne{builder} +} + +// Query returns a query builder for AuthTokens. +func (c *AuthTokensClient) Query() *AuthTokensQuery { + return &AuthTokensQuery{ + config: c.config, + } +} + +// Get returns a AuthTokens entity by its id. +func (c *AuthTokensClient) Get(ctx context.Context, id int) (*AuthTokens, error) { + return c.Query().Where(authtokens.ID(id)).Only(ctx) +} + +// GetX is like Get, but panics if an error occurs. +func (c *AuthTokensClient) GetX(ctx context.Context, id int) *AuthTokens { + obj, err := c.Get(ctx, id) + if err != nil { + panic(err) + } + return obj +} + +// QueryUser queries the user edge of a AuthTokens. +func (c *AuthTokensClient) QueryUser(at *AuthTokens) *UserQuery { + query := &UserQuery{config: c.config} + query.path = func(ctx context.Context) (fromV *sql.Selector, _ error) { + id := at.ID + step := sqlgraph.NewStep( + sqlgraph.From(authtokens.Table, authtokens.FieldID, id), + sqlgraph.To(user.Table, user.FieldID), + sqlgraph.Edge(sqlgraph.M2O, true, authtokens.UserTable, authtokens.UserColumn), + ) + fromV = sqlgraph.Neighbors(at.driver.Dialect(), step) + return fromV, nil + } + return query +} + +// Hooks returns the client hooks. +func (c *AuthTokensClient) Hooks() []Hook { + return c.hooks.AuthTokens +} + +// UserClient is a client for the User schema. +type UserClient struct { + config +} + +// NewUserClient returns a client for the User from the given config. +func NewUserClient(c config) *UserClient { + return &UserClient{config: c} +} + +// Use adds a list of mutation hooks to the hooks stack. +// A call to `Use(f, g, h)` equals to `user.Hooks(f(g(h())))`. +func (c *UserClient) Use(hooks ...Hook) { + c.hooks.User = append(c.hooks.User, hooks...) +} + +// Create returns a create builder for User. +func (c *UserClient) Create() *UserCreate { + mutation := newUserMutation(c.config, OpCreate) + return &UserCreate{config: c.config, hooks: c.Hooks(), mutation: mutation} +} + +// CreateBulk returns a builder for creating a bulk of User entities. +func (c *UserClient) CreateBulk(builders ...*UserCreate) *UserCreateBulk { + return &UserCreateBulk{config: c.config, builders: builders} +} + +// Update returns an update builder for User. +func (c *UserClient) Update() *UserUpdate { + mutation := newUserMutation(c.config, OpUpdate) + return &UserUpdate{config: c.config, hooks: c.Hooks(), mutation: mutation} +} + +// UpdateOne returns an update builder for the given entity. +func (c *UserClient) UpdateOne(u *User) *UserUpdateOne { + mutation := newUserMutation(c.config, OpUpdateOne, withUser(u)) + return &UserUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation} +} + +// UpdateOneID returns an update builder for the given id. +func (c *UserClient) UpdateOneID(id uuid.UUID) *UserUpdateOne { + mutation := newUserMutation(c.config, OpUpdateOne, withUserID(id)) + return &UserUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation} +} + +// Delete returns a delete builder for User. +func (c *UserClient) Delete() *UserDelete { + mutation := newUserMutation(c.config, OpDelete) + return &UserDelete{config: c.config, hooks: c.Hooks(), mutation: mutation} +} + +// DeleteOne returns a delete builder for the given entity. +func (c *UserClient) DeleteOne(u *User) *UserDeleteOne { + return c.DeleteOneID(u.ID) +} + +// DeleteOneID returns a delete builder for the given id. +func (c *UserClient) DeleteOneID(id uuid.UUID) *UserDeleteOne { + builder := c.Delete().Where(user.ID(id)) + builder.mutation.id = &id + builder.mutation.op = OpDeleteOne + return &UserDeleteOne{builder} +} + +// Query returns a query builder for User. +func (c *UserClient) Query() *UserQuery { + return &UserQuery{ + config: c.config, + } +} + +// Get returns a User entity by its id. +func (c *UserClient) Get(ctx context.Context, id uuid.UUID) (*User, error) { + return c.Query().Where(user.ID(id)).Only(ctx) +} + +// GetX is like Get, but panics if an error occurs. +func (c *UserClient) GetX(ctx context.Context, id uuid.UUID) *User { + obj, err := c.Get(ctx, id) + if err != nil { + panic(err) + } + return obj +} + +// QueryAuthTokens queries the auth_tokens edge of a User. +func (c *UserClient) QueryAuthTokens(u *User) *AuthTokensQuery { + query := &AuthTokensQuery{config: c.config} + query.path = func(ctx context.Context) (fromV *sql.Selector, _ error) { + id := u.ID + step := sqlgraph.NewStep( + sqlgraph.From(user.Table, user.FieldID, id), + sqlgraph.To(authtokens.Table, authtokens.FieldID), + sqlgraph.Edge(sqlgraph.O2M, false, user.AuthTokensTable, user.AuthTokensColumn), + ) + fromV = sqlgraph.Neighbors(u.driver.Dialect(), step) + return fromV, nil + } + return query +} + +// Hooks returns the client hooks. +func (c *UserClient) Hooks() []Hook { + return c.hooks.User +} diff --git a/backend/ent/config.go b/backend/ent/config.go new file mode 100644 index 0000000..550e16e --- /dev/null +++ b/backend/ent/config.go @@ -0,0 +1,60 @@ +// Code generated by entc, DO NOT EDIT. + +package ent + +import ( + "entgo.io/ent" + "entgo.io/ent/dialect" +) + +// Option function to configure the client. +type Option func(*config) + +// Config is the configuration for the client and its builder. +type config struct { + // driver used for executing database requests. + driver dialect.Driver + // debug enable a debug logging. + debug bool + // log used for logging on debug mode. + log func(...interface{}) + // hooks to execute on mutations. + hooks *hooks +} + +// hooks per client, for fast access. +type hooks struct { + AuthTokens []ent.Hook + User []ent.Hook +} + +// Options applies the options on the config object. +func (c *config) options(opts ...Option) { + for _, opt := range opts { + opt(c) + } + if c.debug { + c.driver = dialect.Debug(c.driver, c.log) + } +} + +// Debug enables debug logging on the ent.Driver. +func Debug() Option { + return func(c *config) { + c.debug = true + } +} + +// Log sets the logging function for debug mode. +func Log(fn func(...interface{})) Option { + return func(c *config) { + c.log = fn + } +} + +// Driver configures the client driver. +func Driver(driver dialect.Driver) Option { + return func(c *config) { + c.driver = driver + } +} diff --git a/backend/ent/context.go b/backend/ent/context.go new file mode 100644 index 0000000..0840726 --- /dev/null +++ b/backend/ent/context.go @@ -0,0 +1,33 @@ +// Code generated by entc, DO NOT EDIT. + +package ent + +import ( + "context" +) + +type clientCtxKey struct{} + +// FromContext returns a Client stored inside a context, or nil if there isn't one. +func FromContext(ctx context.Context) *Client { + c, _ := ctx.Value(clientCtxKey{}).(*Client) + return c +} + +// NewContext returns a new context with the given Client attached. +func NewContext(parent context.Context, c *Client) context.Context { + return context.WithValue(parent, clientCtxKey{}, c) +} + +type txCtxKey struct{} + +// TxFromContext returns a Tx stored inside a context, or nil if there isn't one. +func TxFromContext(ctx context.Context) *Tx { + tx, _ := ctx.Value(txCtxKey{}).(*Tx) + return tx +} + +// NewTxContext returns a new context with the given Tx attached. +func NewTxContext(parent context.Context, tx *Tx) context.Context { + return context.WithValue(parent, txCtxKey{}, tx) +} diff --git a/backend/ent/ent.go b/backend/ent/ent.go new file mode 100644 index 0000000..2568a4e --- /dev/null +++ b/backend/ent/ent.go @@ -0,0 +1,261 @@ +// Code generated by entc, DO NOT EDIT. + +package ent + +import ( + "errors" + "fmt" + + "entgo.io/ent" + "entgo.io/ent/dialect/sql" + "github.com/hay-kot/git-web-template/backend/ent/authtokens" + "github.com/hay-kot/git-web-template/backend/ent/user" +) + +// ent aliases to avoid import conflicts in user's code. +type ( + Op = ent.Op + Hook = ent.Hook + Value = ent.Value + Query = ent.Query + Policy = ent.Policy + Mutator = ent.Mutator + Mutation = ent.Mutation + MutateFunc = ent.MutateFunc +) + +// OrderFunc applies an ordering on the sql selector. +type OrderFunc func(*sql.Selector) + +// columnChecker returns a function indicates if the column exists in the given column. +func columnChecker(table string) func(string) error { + checks := map[string]func(string) bool{ + authtokens.Table: authtokens.ValidColumn, + user.Table: user.ValidColumn, + } + check, ok := checks[table] + if !ok { + return func(string) error { + return fmt.Errorf("unknown table %q", table) + } + } + return func(column string) error { + if !check(column) { + return fmt.Errorf("unknown column %q for table %q", column, table) + } + return nil + } +} + +// Asc applies the given fields in ASC order. +func Asc(fields ...string) OrderFunc { + return func(s *sql.Selector) { + check := columnChecker(s.TableName()) + for _, f := range fields { + if err := check(f); err != nil { + s.AddError(&ValidationError{Name: f, err: fmt.Errorf("ent: %w", err)}) + } + s.OrderBy(sql.Asc(s.C(f))) + } + } +} + +// Desc applies the given fields in DESC order. +func Desc(fields ...string) OrderFunc { + return func(s *sql.Selector) { + check := columnChecker(s.TableName()) + for _, f := range fields { + if err := check(f); err != nil { + s.AddError(&ValidationError{Name: f, err: fmt.Errorf("ent: %w", err)}) + } + s.OrderBy(sql.Desc(s.C(f))) + } + } +} + +// AggregateFunc applies an aggregation step on the group-by traversal/selector. +type AggregateFunc func(*sql.Selector) string + +// As is a pseudo aggregation function for renaming another other functions with custom names. For example: +// +// GroupBy(field1, field2). +// Aggregate(ent.As(ent.Sum(field1), "sum_field1"), (ent.As(ent.Sum(field2), "sum_field2")). +// Scan(ctx, &v) +// +func As(fn AggregateFunc, end string) AggregateFunc { + return func(s *sql.Selector) string { + return sql.As(fn(s), end) + } +} + +// Count applies the "count" aggregation function on each group. +func Count() AggregateFunc { + return func(s *sql.Selector) string { + return sql.Count("*") + } +} + +// Max applies the "max" aggregation function on the given field of each group. +func Max(field string) AggregateFunc { + return func(s *sql.Selector) string { + check := columnChecker(s.TableName()) + if err := check(field); err != nil { + s.AddError(&ValidationError{Name: field, err: fmt.Errorf("ent: %w", err)}) + return "" + } + return sql.Max(s.C(field)) + } +} + +// Mean applies the "mean" aggregation function on the given field of each group. +func Mean(field string) AggregateFunc { + return func(s *sql.Selector) string { + check := columnChecker(s.TableName()) + if err := check(field); err != nil { + s.AddError(&ValidationError{Name: field, err: fmt.Errorf("ent: %w", err)}) + return "" + } + return sql.Avg(s.C(field)) + } +} + +// Min applies the "min" aggregation function on the given field of each group. +func Min(field string) AggregateFunc { + return func(s *sql.Selector) string { + check := columnChecker(s.TableName()) + if err := check(field); err != nil { + s.AddError(&ValidationError{Name: field, err: fmt.Errorf("ent: %w", err)}) + return "" + } + return sql.Min(s.C(field)) + } +} + +// Sum applies the "sum" aggregation function on the given field of each group. +func Sum(field string) AggregateFunc { + return func(s *sql.Selector) string { + check := columnChecker(s.TableName()) + if err := check(field); err != nil { + s.AddError(&ValidationError{Name: field, err: fmt.Errorf("ent: %w", err)}) + return "" + } + return sql.Sum(s.C(field)) + } +} + +// ValidationError returns when validating a field or edge fails. +type ValidationError struct { + Name string // Field or edge name. + err error +} + +// Error implements the error interface. +func (e *ValidationError) Error() string { + return e.err.Error() +} + +// Unwrap implements the errors.Wrapper interface. +func (e *ValidationError) Unwrap() error { + return e.err +} + +// IsValidationError returns a boolean indicating whether the error is a validation error. +func IsValidationError(err error) bool { + if err == nil { + return false + } + var e *ValidationError + return errors.As(err, &e) +} + +// NotFoundError returns when trying to fetch a specific entity and it was not found in the database. +type NotFoundError struct { + label string +} + +// Error implements the error interface. +func (e *NotFoundError) Error() string { + return "ent: " + e.label + " not found" +} + +// IsNotFound returns a boolean indicating whether the error is a not found error. +func IsNotFound(err error) bool { + if err == nil { + return false + } + var e *NotFoundError + return errors.As(err, &e) +} + +// MaskNotFound masks not found error. +func MaskNotFound(err error) error { + if IsNotFound(err) { + return nil + } + return err +} + +// NotSingularError returns when trying to fetch a singular entity and more then one was found in the database. +type NotSingularError struct { + label string +} + +// Error implements the error interface. +func (e *NotSingularError) Error() string { + return "ent: " + e.label + " not singular" +} + +// IsNotSingular returns a boolean indicating whether the error is a not singular error. +func IsNotSingular(err error) bool { + if err == nil { + return false + } + var e *NotSingularError + return errors.As(err, &e) +} + +// NotLoadedError returns when trying to get a node that was not loaded by the query. +type NotLoadedError struct { + edge string +} + +// Error implements the error interface. +func (e *NotLoadedError) Error() string { + return "ent: " + e.edge + " edge was not loaded" +} + +// IsNotLoaded returns a boolean indicating whether the error is a not loaded error. +func IsNotLoaded(err error) bool { + if err == nil { + return false + } + var e *NotLoadedError + return errors.As(err, &e) +} + +// ConstraintError returns when trying to create/update one or more entities and +// one or more of their constraints failed. For example, violation of edge or +// field uniqueness. +type ConstraintError struct { + msg string + wrap error +} + +// Error implements the error interface. +func (e ConstraintError) Error() string { + return "ent: constraint failed: " + e.msg +} + +// Unwrap implements the errors.Wrapper interface. +func (e *ConstraintError) Unwrap() error { + return e.wrap +} + +// IsConstraintError returns a boolean indicating whether the error is a constraint failure. +func IsConstraintError(err error) bool { + if err == nil { + return false + } + var e *ConstraintError + return errors.As(err, &e) +} diff --git a/backend/ent/enttest/enttest.go b/backend/ent/enttest/enttest.go new file mode 100644 index 0000000..cc6930e --- /dev/null +++ b/backend/ent/enttest/enttest.go @@ -0,0 +1,78 @@ +// Code generated by entc, DO NOT EDIT. + +package enttest + +import ( + "context" + + "github.com/hay-kot/git-web-template/backend/ent" + // required by schema hooks. + _ "github.com/hay-kot/git-web-template/backend/ent/runtime" + + "entgo.io/ent/dialect/sql/schema" +) + +type ( + // TestingT is the interface that is shared between + // testing.T and testing.B and used by enttest. + TestingT interface { + FailNow() + Error(...interface{}) + } + + // Option configures client creation. + Option func(*options) + + options struct { + opts []ent.Option + migrateOpts []schema.MigrateOption + } +) + +// WithOptions forwards options to client creation. +func WithOptions(opts ...ent.Option) Option { + return func(o *options) { + o.opts = append(o.opts, opts...) + } +} + +// WithMigrateOptions forwards options to auto migration. +func WithMigrateOptions(opts ...schema.MigrateOption) Option { + return func(o *options) { + o.migrateOpts = append(o.migrateOpts, opts...) + } +} + +func newOptions(opts []Option) *options { + o := &options{} + for _, opt := range opts { + opt(o) + } + return o +} + +// Open calls ent.Open and auto-run migration. +func Open(t TestingT, driverName, dataSourceName string, opts ...Option) *ent.Client { + o := newOptions(opts) + c, err := ent.Open(driverName, dataSourceName, o.opts...) + if err != nil { + t.Error(err) + t.FailNow() + } + if err := c.Schema.Create(context.Background(), o.migrateOpts...); err != nil { + t.Error(err) + t.FailNow() + } + return c +} + +// NewClient calls ent.NewClient and auto-run migration. +func NewClient(t TestingT, opts ...Option) *ent.Client { + o := newOptions(opts) + c := ent.NewClient(o.opts...) + if err := c.Schema.Create(context.Background(), o.migrateOpts...); err != nil { + t.Error(err) + t.FailNow() + } + return c +} diff --git a/backend/ent/generate.go b/backend/ent/generate.go new file mode 100644 index 0000000..8d3fdfd --- /dev/null +++ b/backend/ent/generate.go @@ -0,0 +1,3 @@ +package ent + +//go:generate go run -mod=mod entgo.io/ent/cmd/ent generate ./schema diff --git a/backend/ent/hook/hook.go b/backend/ent/hook/hook.go new file mode 100644 index 0000000..1eefec3 --- /dev/null +++ b/backend/ent/hook/hook.go @@ -0,0 +1,217 @@ +// Code generated by entc, DO NOT EDIT. + +package hook + +import ( + "context" + "fmt" + + "github.com/hay-kot/git-web-template/backend/ent" +) + +// The AuthTokensFunc type is an adapter to allow the use of ordinary +// function as AuthTokens mutator. +type AuthTokensFunc func(context.Context, *ent.AuthTokensMutation) (ent.Value, error) + +// Mutate calls f(ctx, m). +func (f AuthTokensFunc) Mutate(ctx context.Context, m ent.Mutation) (ent.Value, error) { + mv, ok := m.(*ent.AuthTokensMutation) + if !ok { + return nil, fmt.Errorf("unexpected mutation type %T. expect *ent.AuthTokensMutation", m) + } + return f(ctx, mv) +} + +// The UserFunc type is an adapter to allow the use of ordinary +// function as User mutator. +type UserFunc func(context.Context, *ent.UserMutation) (ent.Value, error) + +// Mutate calls f(ctx, m). +func (f UserFunc) Mutate(ctx context.Context, m ent.Mutation) (ent.Value, error) { + mv, ok := m.(*ent.UserMutation) + if !ok { + return nil, fmt.Errorf("unexpected mutation type %T. expect *ent.UserMutation", m) + } + return f(ctx, mv) +} + +// Condition is a hook condition function. +type Condition func(context.Context, ent.Mutation) bool + +// And groups conditions with the AND operator. +func And(first, second Condition, rest ...Condition) Condition { + return func(ctx context.Context, m ent.Mutation) bool { + if !first(ctx, m) || !second(ctx, m) { + return false + } + for _, cond := range rest { + if !cond(ctx, m) { + return false + } + } + return true + } +} + +// Or groups conditions with the OR operator. +func Or(first, second Condition, rest ...Condition) Condition { + return func(ctx context.Context, m ent.Mutation) bool { + if first(ctx, m) || second(ctx, m) { + return true + } + for _, cond := range rest { + if cond(ctx, m) { + return true + } + } + return false + } +} + +// Not negates a given condition. +func Not(cond Condition) Condition { + return func(ctx context.Context, m ent.Mutation) bool { + return !cond(ctx, m) + } +} + +// HasOp is a condition testing mutation operation. +func HasOp(op ent.Op) Condition { + return func(_ context.Context, m ent.Mutation) bool { + return m.Op().Is(op) + } +} + +// HasAddedFields is a condition validating `.AddedField` on fields. +func HasAddedFields(field string, fields ...string) Condition { + return func(_ context.Context, m ent.Mutation) bool { + if _, exists := m.AddedField(field); !exists { + return false + } + for _, field := range fields { + if _, exists := m.AddedField(field); !exists { + return false + } + } + return true + } +} + +// HasClearedFields is a condition validating `.FieldCleared` on fields. +func HasClearedFields(field string, fields ...string) Condition { + return func(_ context.Context, m ent.Mutation) bool { + if exists := m.FieldCleared(field); !exists { + return false + } + for _, field := range fields { + if exists := m.FieldCleared(field); !exists { + return false + } + } + return true + } +} + +// HasFields is a condition validating `.Field` on fields. +func HasFields(field string, fields ...string) Condition { + return func(_ context.Context, m ent.Mutation) bool { + if _, exists := m.Field(field); !exists { + return false + } + for _, field := range fields { + if _, exists := m.Field(field); !exists { + return false + } + } + return true + } +} + +// If executes the given hook under condition. +// +// hook.If(ComputeAverage, And(HasFields(...), HasAddedFields(...))) +// +func If(hk ent.Hook, cond Condition) ent.Hook { + return func(next ent.Mutator) ent.Mutator { + return ent.MutateFunc(func(ctx context.Context, m ent.Mutation) (ent.Value, error) { + if cond(ctx, m) { + return hk(next).Mutate(ctx, m) + } + return next.Mutate(ctx, m) + }) + } +} + +// On executes the given hook only for the given operation. +// +// hook.On(Log, ent.Delete|ent.Create) +// +func On(hk ent.Hook, op ent.Op) ent.Hook { + return If(hk, HasOp(op)) +} + +// Unless skips the given hook only for the given operation. +// +// hook.Unless(Log, ent.Update|ent.UpdateOne) +// +func Unless(hk ent.Hook, op ent.Op) ent.Hook { + return If(hk, Not(HasOp(op))) +} + +// FixedError is a hook returning a fixed error. +func FixedError(err error) ent.Hook { + return func(ent.Mutator) ent.Mutator { + return ent.MutateFunc(func(context.Context, ent.Mutation) (ent.Value, error) { + return nil, err + }) + } +} + +// Reject returns a hook that rejects all operations that match op. +// +// func (T) Hooks() []ent.Hook { +// return []ent.Hook{ +// Reject(ent.Delete|ent.Update), +// } +// } +// +func Reject(op ent.Op) ent.Hook { + hk := FixedError(fmt.Errorf("%s operation is not allowed", op)) + return On(hk, op) +} + +// Chain acts as a list of hooks and is effectively immutable. +// Once created, it will always hold the same set of hooks in the same order. +type Chain struct { + hooks []ent.Hook +} + +// NewChain creates a new chain of hooks. +func NewChain(hooks ...ent.Hook) Chain { + return Chain{append([]ent.Hook(nil), hooks...)} +} + +// Hook chains the list of hooks and returns the final hook. +func (c Chain) Hook() ent.Hook { + return func(mutator ent.Mutator) ent.Mutator { + for i := len(c.hooks) - 1; i >= 0; i-- { + mutator = c.hooks[i](mutator) + } + return mutator + } +} + +// Append extends a chain, adding the specified hook +// as the last ones in the mutation flow. +func (c Chain) Append(hooks ...ent.Hook) Chain { + newHooks := make([]ent.Hook, 0, len(c.hooks)+len(hooks)) + newHooks = append(newHooks, c.hooks...) + newHooks = append(newHooks, hooks...) + return Chain{newHooks} +} + +// Extend extends a chain, adding the specified chain +// as the last ones in the mutation flow. +func (c Chain) Extend(chain Chain) Chain { + return c.Append(chain.hooks...) +} diff --git a/backend/ent/migrate/migrate.go b/backend/ent/migrate/migrate.go new file mode 100644 index 0000000..9bdaf52 --- /dev/null +++ b/backend/ent/migrate/migrate.go @@ -0,0 +1,71 @@ +// Code generated by entc, DO NOT EDIT. + +package migrate + +import ( + "context" + "fmt" + "io" + + "entgo.io/ent/dialect" + "entgo.io/ent/dialect/sql/schema" +) + +var ( + // WithGlobalUniqueID sets the universal ids options to the migration. + // If this option is enabled, ent migration will allocate a 1<<32 range + // for the ids of each entity (table). + // Note that this option cannot be applied on tables that already exist. + WithGlobalUniqueID = schema.WithGlobalUniqueID + // WithDropColumn sets the drop column option to the migration. + // If this option is enabled, ent migration will drop old columns + // that were used for both fields and edges. This defaults to false. + WithDropColumn = schema.WithDropColumn + // WithDropIndex sets the drop index option to the migration. + // If this option is enabled, ent migration will drop old indexes + // that were defined in the schema. This defaults to false. + // Note that unique constraints are defined using `UNIQUE INDEX`, + // and therefore, it's recommended to enable this option to get more + // flexibility in the schema changes. + WithDropIndex = schema.WithDropIndex + // WithFixture sets the foreign-key renaming option to the migration when upgrading + // ent from v0.1.0 (issue-#285). Defaults to false. + WithFixture = schema.WithFixture + // WithForeignKeys enables creating foreign-key in schema DDL. This defaults to true. + WithForeignKeys = schema.WithForeignKeys +) + +// Schema is the API for creating, migrating and dropping a schema. +type Schema struct { + drv dialect.Driver +} + +// NewSchema creates a new schema client. +func NewSchema(drv dialect.Driver) *Schema { return &Schema{drv: drv} } + +// Create creates all schema resources. +func (s *Schema) Create(ctx context.Context, opts ...schema.MigrateOption) error { + migrate, err := schema.NewMigrate(s.drv, opts...) + if err != nil { + return fmt.Errorf("ent/migrate: %w", err) + } + return migrate.Create(ctx, Tables...) +} + +// WriteTo writes the schema changes to w instead of running them against the database. +// +// if err := client.Schema.WriteTo(context.Background(), os.Stdout); err != nil { +// log.Fatal(err) +// } +// +func (s *Schema) WriteTo(ctx context.Context, w io.Writer, opts ...schema.MigrateOption) error { + drv := &schema.WriteDriver{ + Writer: w, + Driver: s.drv, + } + migrate, err := schema.NewMigrate(drv, opts...) + if err != nil { + return fmt.Errorf("ent/migrate: %w", err) + } + return migrate.Create(ctx, Tables...) +} diff --git a/backend/ent/migrate/schema.go b/backend/ent/migrate/schema.go new file mode 100644 index 0000000..203f01c --- /dev/null +++ b/backend/ent/migrate/schema.go @@ -0,0 +1,63 @@ +// Code generated by entc, DO NOT EDIT. + +package migrate + +import ( + "entgo.io/ent/dialect/sql/schema" + "entgo.io/ent/schema/field" +) + +var ( + // AuthTokensColumns holds the columns for the "auth_tokens" table. + AuthTokensColumns = []*schema.Column{ + {Name: "id", Type: field.TypeInt, Increment: true}, + {Name: "token", Type: field.TypeBytes, Unique: true}, + {Name: "expires_at", Type: field.TypeTime}, + {Name: "created_at", Type: field.TypeTime}, + {Name: "user_auth_tokens", Type: field.TypeUUID, Nullable: true}, + } + // AuthTokensTable holds the schema information for the "auth_tokens" table. + AuthTokensTable = &schema.Table{ + Name: "auth_tokens", + Columns: AuthTokensColumns, + PrimaryKey: []*schema.Column{AuthTokensColumns[0]}, + ForeignKeys: []*schema.ForeignKey{ + { + Symbol: "auth_tokens_users_auth_tokens", + Columns: []*schema.Column{AuthTokensColumns[4]}, + RefColumns: []*schema.Column{UsersColumns[0]}, + OnDelete: schema.SetNull, + }, + }, + Indexes: []*schema.Index{ + { + Name: "authtokens_token", + Unique: false, + Columns: []*schema.Column{AuthTokensColumns[1]}, + }, + }, + } + // UsersColumns holds the columns for the "users" table. + UsersColumns = []*schema.Column{ + {Name: "id", Type: field.TypeUUID}, + {Name: "name", Type: field.TypeString}, + {Name: "email", Type: field.TypeString, Unique: true}, + {Name: "password", Type: field.TypeString}, + {Name: "is_superuser", Type: field.TypeBool, Default: false}, + } + // UsersTable holds the schema information for the "users" table. + UsersTable = &schema.Table{ + Name: "users", + Columns: UsersColumns, + PrimaryKey: []*schema.Column{UsersColumns[0]}, + } + // Tables holds all the tables in the schema. + Tables = []*schema.Table{ + AuthTokensTable, + UsersTable, + } +) + +func init() { + AuthTokensTable.ForeignKeys[0].RefTable = UsersTable +} diff --git a/backend/ent/mutation.go b/backend/ent/mutation.go new file mode 100644 index 0000000..3705cb4 --- /dev/null +++ b/backend/ent/mutation.go @@ -0,0 +1,1091 @@ +// Code generated by entc, DO NOT EDIT. + +package ent + +import ( + "context" + "errors" + "fmt" + "sync" + "time" + + "github.com/google/uuid" + "github.com/hay-kot/git-web-template/backend/ent/authtokens" + "github.com/hay-kot/git-web-template/backend/ent/predicate" + "github.com/hay-kot/git-web-template/backend/ent/user" + + "entgo.io/ent" +) + +const ( + // Operation types. + OpCreate = ent.OpCreate + OpDelete = ent.OpDelete + OpDeleteOne = ent.OpDeleteOne + OpUpdate = ent.OpUpdate + OpUpdateOne = ent.OpUpdateOne + + // Node types. + TypeAuthTokens = "AuthTokens" + TypeUser = "User" +) + +// AuthTokensMutation represents an operation that mutates the AuthTokens nodes in the graph. +type AuthTokensMutation struct { + config + op Op + typ string + id *int + token *[]byte + expires_at *time.Time + created_at *time.Time + clearedFields map[string]struct{} + user *uuid.UUID + cleareduser bool + done bool + oldValue func(context.Context) (*AuthTokens, error) + predicates []predicate.AuthTokens +} + +var _ ent.Mutation = (*AuthTokensMutation)(nil) + +// authtokensOption allows management of the mutation configuration using functional options. +type authtokensOption func(*AuthTokensMutation) + +// newAuthTokensMutation creates new mutation for the AuthTokens entity. +func newAuthTokensMutation(c config, op Op, opts ...authtokensOption) *AuthTokensMutation { + m := &AuthTokensMutation{ + config: c, + op: op, + typ: TypeAuthTokens, + clearedFields: make(map[string]struct{}), + } + for _, opt := range opts { + opt(m) + } + return m +} + +// withAuthTokensID sets the ID field of the mutation. +func withAuthTokensID(id int) authtokensOption { + return func(m *AuthTokensMutation) { + var ( + err error + once sync.Once + value *AuthTokens + ) + m.oldValue = func(ctx context.Context) (*AuthTokens, error) { + once.Do(func() { + if m.done { + err = errors.New("querying old values post mutation is not allowed") + } else { + value, err = m.Client().AuthTokens.Get(ctx, id) + } + }) + return value, err + } + m.id = &id + } +} + +// withAuthTokens sets the old AuthTokens of the mutation. +func withAuthTokens(node *AuthTokens) authtokensOption { + return func(m *AuthTokensMutation) { + m.oldValue = func(context.Context) (*AuthTokens, error) { + return node, nil + } + m.id = &node.ID + } +} + +// Client returns a new `ent.Client` from the mutation. If the mutation was +// executed in a transaction (ent.Tx), a transactional client is returned. +func (m AuthTokensMutation) Client() *Client { + client := &Client{config: m.config} + client.init() + return client +} + +// Tx returns an `ent.Tx` for mutations that were executed in transactions; +// it returns an error otherwise. +func (m AuthTokensMutation) Tx() (*Tx, error) { + if _, ok := m.driver.(*txDriver); !ok { + return nil, errors.New("ent: mutation is not running in a transaction") + } + tx := &Tx{config: m.config} + tx.init() + return tx, nil +} + +// ID returns the ID value in the mutation. Note that the ID is only available +// if it was provided to the builder or after it was returned from the database. +func (m *AuthTokensMutation) ID() (id int, exists bool) { + if m.id == nil { + return + } + return *m.id, true +} + +// IDs queries the database and returns the entity ids that match the mutation's predicate. +// That means, if the mutation is applied within a transaction with an isolation level such +// as sql.LevelSerializable, the returned ids match the ids of the rows that will be updated +// or updated by the mutation. +func (m *AuthTokensMutation) IDs(ctx context.Context) ([]int, error) { + switch { + case m.op.Is(OpUpdateOne | OpDeleteOne): + id, exists := m.ID() + if exists { + return []int{id}, nil + } + fallthrough + case m.op.Is(OpUpdate | OpDelete): + return m.Client().AuthTokens.Query().Where(m.predicates...).IDs(ctx) + default: + return nil, fmt.Errorf("IDs is not allowed on %s operations", m.op) + } +} + +// SetToken sets the "token" field. +func (m *AuthTokensMutation) SetToken(b []byte) { + m.token = &b +} + +// Token returns the value of the "token" field in the mutation. +func (m *AuthTokensMutation) Token() (r []byte, exists bool) { + v := m.token + if v == nil { + return + } + return *v, true +} + +// OldToken returns the old "token" field's value of the AuthTokens entity. +// If the AuthTokens object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *AuthTokensMutation) OldToken(ctx context.Context) (v []byte, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldToken is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldToken requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldToken: %w", err) + } + return oldValue.Token, nil +} + +// ResetToken resets all changes to the "token" field. +func (m *AuthTokensMutation) ResetToken() { + m.token = nil +} + +// SetExpiresAt sets the "expires_at" field. +func (m *AuthTokensMutation) SetExpiresAt(t time.Time) { + m.expires_at = &t +} + +// ExpiresAt returns the value of the "expires_at" field in the mutation. +func (m *AuthTokensMutation) ExpiresAt() (r time.Time, exists bool) { + v := m.expires_at + if v == nil { + return + } + return *v, true +} + +// OldExpiresAt returns the old "expires_at" field's value of the AuthTokens entity. +// If the AuthTokens object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *AuthTokensMutation) OldExpiresAt(ctx context.Context) (v time.Time, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldExpiresAt is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldExpiresAt requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldExpiresAt: %w", err) + } + return oldValue.ExpiresAt, nil +} + +// ResetExpiresAt resets all changes to the "expires_at" field. +func (m *AuthTokensMutation) ResetExpiresAt() { + m.expires_at = nil +} + +// SetCreatedAt sets the "created_at" field. +func (m *AuthTokensMutation) SetCreatedAt(t time.Time) { + m.created_at = &t +} + +// CreatedAt returns the value of the "created_at" field in the mutation. +func (m *AuthTokensMutation) CreatedAt() (r time.Time, exists bool) { + v := m.created_at + if v == nil { + return + } + return *v, true +} + +// OldCreatedAt returns the old "created_at" field's value of the AuthTokens entity. +// If the AuthTokens object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *AuthTokensMutation) OldCreatedAt(ctx context.Context) (v time.Time, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldCreatedAt is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldCreatedAt requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldCreatedAt: %w", err) + } + return oldValue.CreatedAt, nil +} + +// ResetCreatedAt resets all changes to the "created_at" field. +func (m *AuthTokensMutation) ResetCreatedAt() { + m.created_at = nil +} + +// SetUserID sets the "user" edge to the User entity by id. +func (m *AuthTokensMutation) SetUserID(id uuid.UUID) { + m.user = &id +} + +// ClearUser clears the "user" edge to the User entity. +func (m *AuthTokensMutation) ClearUser() { + m.cleareduser = true +} + +// UserCleared reports if the "user" edge to the User entity was cleared. +func (m *AuthTokensMutation) UserCleared() bool { + return m.cleareduser +} + +// UserID returns the "user" edge ID in the mutation. +func (m *AuthTokensMutation) UserID() (id uuid.UUID, exists bool) { + if m.user != nil { + return *m.user, true + } + return +} + +// UserIDs returns the "user" edge IDs in the mutation. +// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use +// UserID instead. It exists only for internal usage by the builders. +func (m *AuthTokensMutation) UserIDs() (ids []uuid.UUID) { + if id := m.user; id != nil { + ids = append(ids, *id) + } + return +} + +// ResetUser resets all changes to the "user" edge. +func (m *AuthTokensMutation) ResetUser() { + m.user = nil + m.cleareduser = false +} + +// Where appends a list predicates to the AuthTokensMutation builder. +func (m *AuthTokensMutation) Where(ps ...predicate.AuthTokens) { + m.predicates = append(m.predicates, ps...) +} + +// Op returns the operation name. +func (m *AuthTokensMutation) Op() Op { + return m.op +} + +// Type returns the node type of this mutation (AuthTokens). +func (m *AuthTokensMutation) Type() string { + return m.typ +} + +// Fields returns all fields that were changed during this mutation. Note that in +// order to get all numeric fields that were incremented/decremented, call +// AddedFields(). +func (m *AuthTokensMutation) Fields() []string { + fields := make([]string, 0, 3) + if m.token != nil { + fields = append(fields, authtokens.FieldToken) + } + if m.expires_at != nil { + fields = append(fields, authtokens.FieldExpiresAt) + } + if m.created_at != nil { + fields = append(fields, authtokens.FieldCreatedAt) + } + return fields +} + +// Field returns the value of a field with the given name. The second boolean +// return value indicates that this field was not set, or was not defined in the +// schema. +func (m *AuthTokensMutation) Field(name string) (ent.Value, bool) { + switch name { + case authtokens.FieldToken: + return m.Token() + case authtokens.FieldExpiresAt: + return m.ExpiresAt() + case authtokens.FieldCreatedAt: + return m.CreatedAt() + } + return nil, false +} + +// OldField returns the old value of the field from the database. An error is +// returned if the mutation operation is not UpdateOne, or the query to the +// database failed. +func (m *AuthTokensMutation) OldField(ctx context.Context, name string) (ent.Value, error) { + switch name { + case authtokens.FieldToken: + return m.OldToken(ctx) + case authtokens.FieldExpiresAt: + return m.OldExpiresAt(ctx) + case authtokens.FieldCreatedAt: + return m.OldCreatedAt(ctx) + } + return nil, fmt.Errorf("unknown AuthTokens field %s", name) +} + +// SetField sets the value of a field with the given name. It returns an error if +// the field is not defined in the schema, or if the type mismatched the field +// type. +func (m *AuthTokensMutation) SetField(name string, value ent.Value) error { + switch name { + case authtokens.FieldToken: + v, ok := value.([]byte) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetToken(v) + return nil + case authtokens.FieldExpiresAt: + v, ok := value.(time.Time) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetExpiresAt(v) + return nil + case authtokens.FieldCreatedAt: + v, ok := value.(time.Time) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetCreatedAt(v) + return nil + } + return fmt.Errorf("unknown AuthTokens field %s", name) +} + +// AddedFields returns all numeric fields that were incremented/decremented during +// this mutation. +func (m *AuthTokensMutation) AddedFields() []string { + return nil +} + +// AddedField returns the numeric value that was incremented/decremented on a field +// with the given name. The second boolean return value indicates that this field +// was not set, or was not defined in the schema. +func (m *AuthTokensMutation) AddedField(name string) (ent.Value, bool) { + return nil, false +} + +// AddField adds the value to the field with the given name. It returns an error if +// the field is not defined in the schema, or if the type mismatched the field +// type. +func (m *AuthTokensMutation) AddField(name string, value ent.Value) error { + switch name { + } + return fmt.Errorf("unknown AuthTokens numeric field %s", name) +} + +// ClearedFields returns all nullable fields that were cleared during this +// mutation. +func (m *AuthTokensMutation) ClearedFields() []string { + return nil +} + +// FieldCleared returns a boolean indicating if a field with the given name was +// cleared in this mutation. +func (m *AuthTokensMutation) FieldCleared(name string) bool { + _, ok := m.clearedFields[name] + return ok +} + +// ClearField clears the value of the field with the given name. It returns an +// error if the field is not defined in the schema. +func (m *AuthTokensMutation) ClearField(name string) error { + return fmt.Errorf("unknown AuthTokens nullable field %s", name) +} + +// ResetField resets all changes in the mutation for the field with the given name. +// It returns an error if the field is not defined in the schema. +func (m *AuthTokensMutation) ResetField(name string) error { + switch name { + case authtokens.FieldToken: + m.ResetToken() + return nil + case authtokens.FieldExpiresAt: + m.ResetExpiresAt() + return nil + case authtokens.FieldCreatedAt: + m.ResetCreatedAt() + return nil + } + return fmt.Errorf("unknown AuthTokens field %s", name) +} + +// AddedEdges returns all edge names that were set/added in this mutation. +func (m *AuthTokensMutation) AddedEdges() []string { + edges := make([]string, 0, 1) + if m.user != nil { + edges = append(edges, authtokens.EdgeUser) + } + return edges +} + +// AddedIDs returns all IDs (to other nodes) that were added for the given edge +// name in this mutation. +func (m *AuthTokensMutation) AddedIDs(name string) []ent.Value { + switch name { + case authtokens.EdgeUser: + if id := m.user; id != nil { + return []ent.Value{*id} + } + } + return nil +} + +// RemovedEdges returns all edge names that were removed in this mutation. +func (m *AuthTokensMutation) RemovedEdges() []string { + edges := make([]string, 0, 1) + return edges +} + +// RemovedIDs returns all IDs (to other nodes) that were removed for the edge with +// the given name in this mutation. +func (m *AuthTokensMutation) RemovedIDs(name string) []ent.Value { + switch name { + } + return nil +} + +// ClearedEdges returns all edge names that were cleared in this mutation. +func (m *AuthTokensMutation) ClearedEdges() []string { + edges := make([]string, 0, 1) + if m.cleareduser { + edges = append(edges, authtokens.EdgeUser) + } + return edges +} + +// EdgeCleared returns a boolean which indicates if the edge with the given name +// was cleared in this mutation. +func (m *AuthTokensMutation) EdgeCleared(name string) bool { + switch name { + case authtokens.EdgeUser: + return m.cleareduser + } + return false +} + +// ClearEdge clears the value of the edge with the given name. It returns an error +// if that edge is not defined in the schema. +func (m *AuthTokensMutation) ClearEdge(name string) error { + switch name { + case authtokens.EdgeUser: + m.ClearUser() + return nil + } + return fmt.Errorf("unknown AuthTokens unique edge %s", name) +} + +// ResetEdge resets all changes to the edge with the given name in this mutation. +// It returns an error if the edge is not defined in the schema. +func (m *AuthTokensMutation) ResetEdge(name string) error { + switch name { + case authtokens.EdgeUser: + m.ResetUser() + return nil + } + return fmt.Errorf("unknown AuthTokens edge %s", name) +} + +// UserMutation represents an operation that mutates the User nodes in the graph. +type UserMutation struct { + config + op Op + typ string + id *uuid.UUID + name *string + email *string + password *string + is_superuser *bool + clearedFields map[string]struct{} + auth_tokens map[int]struct{} + removedauth_tokens map[int]struct{} + clearedauth_tokens bool + done bool + oldValue func(context.Context) (*User, error) + predicates []predicate.User +} + +var _ ent.Mutation = (*UserMutation)(nil) + +// userOption allows management of the mutation configuration using functional options. +type userOption func(*UserMutation) + +// newUserMutation creates new mutation for the User entity. +func newUserMutation(c config, op Op, opts ...userOption) *UserMutation { + m := &UserMutation{ + config: c, + op: op, + typ: TypeUser, + clearedFields: make(map[string]struct{}), + } + for _, opt := range opts { + opt(m) + } + return m +} + +// withUserID sets the ID field of the mutation. +func withUserID(id uuid.UUID) userOption { + return func(m *UserMutation) { + var ( + err error + once sync.Once + value *User + ) + m.oldValue = func(ctx context.Context) (*User, error) { + once.Do(func() { + if m.done { + err = errors.New("querying old values post mutation is not allowed") + } else { + value, err = m.Client().User.Get(ctx, id) + } + }) + return value, err + } + m.id = &id + } +} + +// withUser sets the old User of the mutation. +func withUser(node *User) userOption { + return func(m *UserMutation) { + m.oldValue = func(context.Context) (*User, error) { + return node, nil + } + m.id = &node.ID + } +} + +// Client returns a new `ent.Client` from the mutation. If the mutation was +// executed in a transaction (ent.Tx), a transactional client is returned. +func (m UserMutation) Client() *Client { + client := &Client{config: m.config} + client.init() + return client +} + +// Tx returns an `ent.Tx` for mutations that were executed in transactions; +// it returns an error otherwise. +func (m UserMutation) Tx() (*Tx, error) { + if _, ok := m.driver.(*txDriver); !ok { + return nil, errors.New("ent: mutation is not running in a transaction") + } + tx := &Tx{config: m.config} + tx.init() + return tx, nil +} + +// SetID sets the value of the id field. Note that this +// operation is only accepted on creation of User entities. +func (m *UserMutation) SetID(id uuid.UUID) { + m.id = &id +} + +// ID returns the ID value in the mutation. Note that the ID is only available +// if it was provided to the builder or after it was returned from the database. +func (m *UserMutation) ID() (id uuid.UUID, exists bool) { + if m.id == nil { + return + } + return *m.id, true +} + +// IDs queries the database and returns the entity ids that match the mutation's predicate. +// That means, if the mutation is applied within a transaction with an isolation level such +// as sql.LevelSerializable, the returned ids match the ids of the rows that will be updated +// or updated by the mutation. +func (m *UserMutation) IDs(ctx context.Context) ([]uuid.UUID, error) { + switch { + case m.op.Is(OpUpdateOne | OpDeleteOne): + id, exists := m.ID() + if exists { + return []uuid.UUID{id}, nil + } + fallthrough + case m.op.Is(OpUpdate | OpDelete): + return m.Client().User.Query().Where(m.predicates...).IDs(ctx) + default: + return nil, fmt.Errorf("IDs is not allowed on %s operations", m.op) + } +} + +// SetName sets the "name" field. +func (m *UserMutation) SetName(s string) { + m.name = &s +} + +// Name returns the value of the "name" field in the mutation. +func (m *UserMutation) Name() (r string, exists bool) { + v := m.name + if v == nil { + return + } + return *v, true +} + +// OldName returns the old "name" field's value of the User entity. +// If the User object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *UserMutation) OldName(ctx context.Context) (v string, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldName is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldName requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldName: %w", err) + } + return oldValue.Name, nil +} + +// ResetName resets all changes to the "name" field. +func (m *UserMutation) ResetName() { + m.name = nil +} + +// SetEmail sets the "email" field. +func (m *UserMutation) SetEmail(s string) { + m.email = &s +} + +// Email returns the value of the "email" field in the mutation. +func (m *UserMutation) Email() (r string, exists bool) { + v := m.email + if v == nil { + return + } + return *v, true +} + +// OldEmail returns the old "email" field's value of the User entity. +// If the User object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *UserMutation) OldEmail(ctx context.Context) (v string, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldEmail is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldEmail requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldEmail: %w", err) + } + return oldValue.Email, nil +} + +// ResetEmail resets all changes to the "email" field. +func (m *UserMutation) ResetEmail() { + m.email = nil +} + +// SetPassword sets the "password" field. +func (m *UserMutation) SetPassword(s string) { + m.password = &s +} + +// Password returns the value of the "password" field in the mutation. +func (m *UserMutation) Password() (r string, exists bool) { + v := m.password + if v == nil { + return + } + return *v, true +} + +// OldPassword returns the old "password" field's value of the User entity. +// If the User object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *UserMutation) OldPassword(ctx context.Context) (v string, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldPassword is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldPassword requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldPassword: %w", err) + } + return oldValue.Password, nil +} + +// ResetPassword resets all changes to the "password" field. +func (m *UserMutation) ResetPassword() { + m.password = nil +} + +// SetIsSuperuser sets the "is_superuser" field. +func (m *UserMutation) SetIsSuperuser(b bool) { + m.is_superuser = &b +} + +// IsSuperuser returns the value of the "is_superuser" field in the mutation. +func (m *UserMutation) IsSuperuser() (r bool, exists bool) { + v := m.is_superuser + if v == nil { + return + } + return *v, true +} + +// OldIsSuperuser returns the old "is_superuser" field's value of the User entity. +// If the User object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *UserMutation) OldIsSuperuser(ctx context.Context) (v bool, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldIsSuperuser is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldIsSuperuser requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldIsSuperuser: %w", err) + } + return oldValue.IsSuperuser, nil +} + +// ResetIsSuperuser resets all changes to the "is_superuser" field. +func (m *UserMutation) ResetIsSuperuser() { + m.is_superuser = nil +} + +// AddAuthTokenIDs adds the "auth_tokens" edge to the AuthTokens entity by ids. +func (m *UserMutation) AddAuthTokenIDs(ids ...int) { + if m.auth_tokens == nil { + m.auth_tokens = make(map[int]struct{}) + } + for i := range ids { + m.auth_tokens[ids[i]] = struct{}{} + } +} + +// ClearAuthTokens clears the "auth_tokens" edge to the AuthTokens entity. +func (m *UserMutation) ClearAuthTokens() { + m.clearedauth_tokens = true +} + +// AuthTokensCleared reports if the "auth_tokens" edge to the AuthTokens entity was cleared. +func (m *UserMutation) AuthTokensCleared() bool { + return m.clearedauth_tokens +} + +// RemoveAuthTokenIDs removes the "auth_tokens" edge to the AuthTokens entity by IDs. +func (m *UserMutation) RemoveAuthTokenIDs(ids ...int) { + if m.removedauth_tokens == nil { + m.removedauth_tokens = make(map[int]struct{}) + } + for i := range ids { + delete(m.auth_tokens, ids[i]) + m.removedauth_tokens[ids[i]] = struct{}{} + } +} + +// RemovedAuthTokens returns the removed IDs of the "auth_tokens" edge to the AuthTokens entity. +func (m *UserMutation) RemovedAuthTokensIDs() (ids []int) { + for id := range m.removedauth_tokens { + ids = append(ids, id) + } + return +} + +// AuthTokensIDs returns the "auth_tokens" edge IDs in the mutation. +func (m *UserMutation) AuthTokensIDs() (ids []int) { + for id := range m.auth_tokens { + ids = append(ids, id) + } + return +} + +// ResetAuthTokens resets all changes to the "auth_tokens" edge. +func (m *UserMutation) ResetAuthTokens() { + m.auth_tokens = nil + m.clearedauth_tokens = false + m.removedauth_tokens = nil +} + +// Where appends a list predicates to the UserMutation builder. +func (m *UserMutation) Where(ps ...predicate.User) { + m.predicates = append(m.predicates, ps...) +} + +// Op returns the operation name. +func (m *UserMutation) Op() Op { + return m.op +} + +// Type returns the node type of this mutation (User). +func (m *UserMutation) Type() string { + return m.typ +} + +// Fields returns all fields that were changed during this mutation. Note that in +// order to get all numeric fields that were incremented/decremented, call +// AddedFields(). +func (m *UserMutation) Fields() []string { + fields := make([]string, 0, 4) + if m.name != nil { + fields = append(fields, user.FieldName) + } + if m.email != nil { + fields = append(fields, user.FieldEmail) + } + if m.password != nil { + fields = append(fields, user.FieldPassword) + } + if m.is_superuser != nil { + fields = append(fields, user.FieldIsSuperuser) + } + return fields +} + +// Field returns the value of a field with the given name. The second boolean +// return value indicates that this field was not set, or was not defined in the +// schema. +func (m *UserMutation) Field(name string) (ent.Value, bool) { + switch name { + case user.FieldName: + return m.Name() + case user.FieldEmail: + return m.Email() + case user.FieldPassword: + return m.Password() + case user.FieldIsSuperuser: + return m.IsSuperuser() + } + return nil, false +} + +// OldField returns the old value of the field from the database. An error is +// returned if the mutation operation is not UpdateOne, or the query to the +// database failed. +func (m *UserMutation) OldField(ctx context.Context, name string) (ent.Value, error) { + switch name { + case user.FieldName: + return m.OldName(ctx) + case user.FieldEmail: + return m.OldEmail(ctx) + case user.FieldPassword: + return m.OldPassword(ctx) + case user.FieldIsSuperuser: + return m.OldIsSuperuser(ctx) + } + return nil, fmt.Errorf("unknown User field %s", name) +} + +// SetField sets the value of a field with the given name. It returns an error if +// the field is not defined in the schema, or if the type mismatched the field +// type. +func (m *UserMutation) SetField(name string, value ent.Value) error { + switch name { + case user.FieldName: + v, ok := value.(string) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetName(v) + return nil + case user.FieldEmail: + v, ok := value.(string) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetEmail(v) + return nil + case user.FieldPassword: + v, ok := value.(string) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetPassword(v) + return nil + case user.FieldIsSuperuser: + v, ok := value.(bool) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetIsSuperuser(v) + return nil + } + return fmt.Errorf("unknown User field %s", name) +} + +// AddedFields returns all numeric fields that were incremented/decremented during +// this mutation. +func (m *UserMutation) AddedFields() []string { + return nil +} + +// AddedField returns the numeric value that was incremented/decremented on a field +// with the given name. The second boolean return value indicates that this field +// was not set, or was not defined in the schema. +func (m *UserMutation) AddedField(name string) (ent.Value, bool) { + return nil, false +} + +// AddField adds the value to the field with the given name. It returns an error if +// the field is not defined in the schema, or if the type mismatched the field +// type. +func (m *UserMutation) AddField(name string, value ent.Value) error { + switch name { + } + return fmt.Errorf("unknown User numeric field %s", name) +} + +// ClearedFields returns all nullable fields that were cleared during this +// mutation. +func (m *UserMutation) ClearedFields() []string { + return nil +} + +// FieldCleared returns a boolean indicating if a field with the given name was +// cleared in this mutation. +func (m *UserMutation) FieldCleared(name string) bool { + _, ok := m.clearedFields[name] + return ok +} + +// ClearField clears the value of the field with the given name. It returns an +// error if the field is not defined in the schema. +func (m *UserMutation) ClearField(name string) error { + return fmt.Errorf("unknown User nullable field %s", name) +} + +// ResetField resets all changes in the mutation for the field with the given name. +// It returns an error if the field is not defined in the schema. +func (m *UserMutation) ResetField(name string) error { + switch name { + case user.FieldName: + m.ResetName() + return nil + case user.FieldEmail: + m.ResetEmail() + return nil + case user.FieldPassword: + m.ResetPassword() + return nil + case user.FieldIsSuperuser: + m.ResetIsSuperuser() + return nil + } + return fmt.Errorf("unknown User field %s", name) +} + +// AddedEdges returns all edge names that were set/added in this mutation. +func (m *UserMutation) AddedEdges() []string { + edges := make([]string, 0, 1) + if m.auth_tokens != nil { + edges = append(edges, user.EdgeAuthTokens) + } + return edges +} + +// AddedIDs returns all IDs (to other nodes) that were added for the given edge +// name in this mutation. +func (m *UserMutation) AddedIDs(name string) []ent.Value { + switch name { + case user.EdgeAuthTokens: + ids := make([]ent.Value, 0, len(m.auth_tokens)) + for id := range m.auth_tokens { + ids = append(ids, id) + } + return ids + } + return nil +} + +// RemovedEdges returns all edge names that were removed in this mutation. +func (m *UserMutation) RemovedEdges() []string { + edges := make([]string, 0, 1) + if m.removedauth_tokens != nil { + edges = append(edges, user.EdgeAuthTokens) + } + return edges +} + +// RemovedIDs returns all IDs (to other nodes) that were removed for the edge with +// the given name in this mutation. +func (m *UserMutation) RemovedIDs(name string) []ent.Value { + switch name { + case user.EdgeAuthTokens: + ids := make([]ent.Value, 0, len(m.removedauth_tokens)) + for id := range m.removedauth_tokens { + ids = append(ids, id) + } + return ids + } + return nil +} + +// ClearedEdges returns all edge names that were cleared in this mutation. +func (m *UserMutation) ClearedEdges() []string { + edges := make([]string, 0, 1) + if m.clearedauth_tokens { + edges = append(edges, user.EdgeAuthTokens) + } + return edges +} + +// EdgeCleared returns a boolean which indicates if the edge with the given name +// was cleared in this mutation. +func (m *UserMutation) EdgeCleared(name string) bool { + switch name { + case user.EdgeAuthTokens: + return m.clearedauth_tokens + } + return false +} + +// ClearEdge clears the value of the edge with the given name. It returns an error +// if that edge is not defined in the schema. +func (m *UserMutation) ClearEdge(name string) error { + switch name { + } + return fmt.Errorf("unknown User unique edge %s", name) +} + +// ResetEdge resets all changes to the edge with the given name in this mutation. +// It returns an error if the edge is not defined in the schema. +func (m *UserMutation) ResetEdge(name string) error { + switch name { + case user.EdgeAuthTokens: + m.ResetAuthTokens() + return nil + } + return fmt.Errorf("unknown User edge %s", name) +} diff --git a/backend/ent/predicate/predicate.go b/backend/ent/predicate/predicate.go new file mode 100644 index 0000000..b26324f --- /dev/null +++ b/backend/ent/predicate/predicate.go @@ -0,0 +1,13 @@ +// Code generated by entc, DO NOT EDIT. + +package predicate + +import ( + "entgo.io/ent/dialect/sql" +) + +// AuthTokens is the predicate function for authtokens builders. +type AuthTokens func(*sql.Selector) + +// User is the predicate function for user builders. +type User func(*sql.Selector) diff --git a/backend/ent/runtime.go b/backend/ent/runtime.go new file mode 100644 index 0000000..828477f --- /dev/null +++ b/backend/ent/runtime.go @@ -0,0 +1,50 @@ +// Code generated by entc, DO NOT EDIT. + +package ent + +import ( + "time" + + "github.com/google/uuid" + "github.com/hay-kot/git-web-template/backend/ent/authtokens" + "github.com/hay-kot/git-web-template/backend/ent/schema" + "github.com/hay-kot/git-web-template/backend/ent/user" +) + +// The init function reads all schema descriptors with runtime code +// (default values, validators, hooks and policies) and stitches it +// to their package variables. +func init() { + authtokensFields := schema.AuthTokens{}.Fields() + _ = authtokensFields + // authtokensDescExpiresAt is the schema descriptor for expires_at field. + authtokensDescExpiresAt := authtokensFields[1].Descriptor() + // authtokens.DefaultExpiresAt holds the default value on creation for the expires_at field. + authtokens.DefaultExpiresAt = authtokensDescExpiresAt.Default.(func() time.Time) + // authtokensDescCreatedAt is the schema descriptor for created_at field. + authtokensDescCreatedAt := authtokensFields[2].Descriptor() + // authtokens.DefaultCreatedAt holds the default value on creation for the created_at field. + authtokens.DefaultCreatedAt = authtokensDescCreatedAt.Default.(func() time.Time) + userFields := schema.User{}.Fields() + _ = userFields + // userDescName is the schema descriptor for name field. + userDescName := userFields[1].Descriptor() + // user.NameValidator is a validator for the "name" field. It is called by the builders before save. + user.NameValidator = userDescName.Validators[0].(func(string) error) + // userDescEmail is the schema descriptor for email field. + userDescEmail := userFields[2].Descriptor() + // user.EmailValidator is a validator for the "email" field. It is called by the builders before save. + user.EmailValidator = userDescEmail.Validators[0].(func(string) error) + // userDescPassword is the schema descriptor for password field. + userDescPassword := userFields[3].Descriptor() + // user.PasswordValidator is a validator for the "password" field. It is called by the builders before save. + user.PasswordValidator = userDescPassword.Validators[0].(func(string) error) + // userDescIsSuperuser is the schema descriptor for is_superuser field. + userDescIsSuperuser := userFields[4].Descriptor() + // user.DefaultIsSuperuser holds the default value on creation for the is_superuser field. + user.DefaultIsSuperuser = userDescIsSuperuser.Default.(bool) + // userDescID is the schema descriptor for id field. + userDescID := userFields[0].Descriptor() + // user.DefaultID holds the default value on creation for the id field. + user.DefaultID = userDescID.Default.(func() uuid.UUID) +} diff --git a/backend/ent/runtime/runtime.go b/backend/ent/runtime/runtime.go new file mode 100644 index 0000000..31da890 --- /dev/null +++ b/backend/ent/runtime/runtime.go @@ -0,0 +1,10 @@ +// Code generated by entc, DO NOT EDIT. + +package runtime + +// The schema-stitching logic is generated in github.com/hay-kot/git-web-template/backend/ent/runtime.go + +const ( + Version = "v0.10.0" // Version of ent codegen. + Sum = "h1:9cBomE1fh+WX34DPYQL7tDNAIvhKa3tXvwxuLyhYCMo=" // Sum of ent codegen. +) diff --git a/backend/ent/schema/authtokens.go b/backend/ent/schema/authtokens.go new file mode 100644 index 0000000..fbe9fd1 --- /dev/null +++ b/backend/ent/schema/authtokens.go @@ -0,0 +1,43 @@ +package schema + +import ( + "time" + + "entgo.io/ent" + "entgo.io/ent/schema/edge" + "entgo.io/ent/schema/field" + "entgo.io/ent/schema/index" +) + +// AuthTokens holds the schema definition for the AuthTokens entity. +type AuthTokens struct { + ent.Schema +} + +// Fields of the AuthTokens. +func (AuthTokens) Fields() []ent.Field { + return []ent.Field{ + field.Bytes("token"). + Unique(), + field.Time("expires_at"). + Default(func() time.Time { return time.Now().Add(time.Hour * 24 * 7) }), + field.Time("created_at"). + Default(time.Now), + } +} + +// Edges of the AuthTokens. +func (AuthTokens) Edges() []ent.Edge { + return []ent.Edge{ + edge.From("user", User.Type). + Ref("auth_tokens"). + Unique(), + } +} + +func (AuthTokens) Indexes() []ent.Index { + return []ent.Index{ + // non-unique index. + index.Fields("token"), + } +} diff --git a/backend/ent/schema/user.go b/backend/ent/schema/user.go new file mode 100644 index 0000000..d1fb726 --- /dev/null +++ b/backend/ent/schema/user.go @@ -0,0 +1,38 @@ +package schema + +import ( + "entgo.io/ent" + "entgo.io/ent/schema/edge" + "entgo.io/ent/schema/field" + "github.com/google/uuid" +) + +// User holds the schema definition for the User entity. +type User struct { + ent.Schema +} + +// Fields of the User. +func (User) Fields() []ent.Field { + return []ent.Field{ + field.UUID("id", uuid.UUID{}). + Default(uuid.New), + field.String("name"). + NotEmpty(), + field.String("email"). + NotEmpty(). + Unique(), + field.String("password"). + NotEmpty(). + Sensitive(), + field.Bool("is_superuser"). + Default(false), + } +} + +// Edges of the User. +func (User) Edges() []ent.Edge { + return []ent.Edge{ + edge.To("auth_tokens", AuthTokens.Type), + } +} diff --git a/backend/ent/tx.go b/backend/ent/tx.go new file mode 100644 index 0000000..b93d068 --- /dev/null +++ b/backend/ent/tx.go @@ -0,0 +1,213 @@ +// Code generated by entc, DO NOT EDIT. + +package ent + +import ( + "context" + "sync" + + "entgo.io/ent/dialect" +) + +// Tx is a transactional client that is created by calling Client.Tx(). +type Tx struct { + config + // AuthTokens is the client for interacting with the AuthTokens builders. + AuthTokens *AuthTokensClient + // User is the client for interacting with the User builders. + User *UserClient + + // lazily loaded. + client *Client + clientOnce sync.Once + + // completion callbacks. + mu sync.Mutex + onCommit []CommitHook + onRollback []RollbackHook + + // ctx lives for the life of the transaction. It is + // the same context used by the underlying connection. + ctx context.Context +} + +type ( + // Committer is the interface that wraps the Commit method. + Committer interface { + Commit(context.Context, *Tx) error + } + + // The CommitFunc type is an adapter to allow the use of ordinary + // function as a Committer. If f is a function with the appropriate + // signature, CommitFunc(f) is a Committer that calls f. + CommitFunc func(context.Context, *Tx) error + + // CommitHook defines the "commit middleware". A function that gets a Committer + // and returns a Committer. For example: + // + // hook := func(next ent.Committer) ent.Committer { + // return ent.CommitFunc(func(ctx context.Context, tx *ent.Tx) error { + // // Do some stuff before. + // if err := next.Commit(ctx, tx); err != nil { + // return err + // } + // // Do some stuff after. + // return nil + // }) + // } + // + CommitHook func(Committer) Committer +) + +// Commit calls f(ctx, m). +func (f CommitFunc) Commit(ctx context.Context, tx *Tx) error { + return f(ctx, tx) +} + +// Commit commits the transaction. +func (tx *Tx) Commit() error { + txDriver := tx.config.driver.(*txDriver) + var fn Committer = CommitFunc(func(context.Context, *Tx) error { + return txDriver.tx.Commit() + }) + tx.mu.Lock() + hooks := append([]CommitHook(nil), tx.onCommit...) + tx.mu.Unlock() + for i := len(hooks) - 1; i >= 0; i-- { + fn = hooks[i](fn) + } + return fn.Commit(tx.ctx, tx) +} + +// OnCommit adds a hook to call on commit. +func (tx *Tx) OnCommit(f CommitHook) { + tx.mu.Lock() + defer tx.mu.Unlock() + tx.onCommit = append(tx.onCommit, f) +} + +type ( + // Rollbacker is the interface that wraps the Rollback method. + Rollbacker interface { + Rollback(context.Context, *Tx) error + } + + // The RollbackFunc type is an adapter to allow the use of ordinary + // function as a Rollbacker. If f is a function with the appropriate + // signature, RollbackFunc(f) is a Rollbacker that calls f. + RollbackFunc func(context.Context, *Tx) error + + // RollbackHook defines the "rollback middleware". A function that gets a Rollbacker + // and returns a Rollbacker. For example: + // + // hook := func(next ent.Rollbacker) ent.Rollbacker { + // return ent.RollbackFunc(func(ctx context.Context, tx *ent.Tx) error { + // // Do some stuff before. + // if err := next.Rollback(ctx, tx); err != nil { + // return err + // } + // // Do some stuff after. + // return nil + // }) + // } + // + RollbackHook func(Rollbacker) Rollbacker +) + +// Rollback calls f(ctx, m). +func (f RollbackFunc) Rollback(ctx context.Context, tx *Tx) error { + return f(ctx, tx) +} + +// Rollback rollbacks the transaction. +func (tx *Tx) Rollback() error { + txDriver := tx.config.driver.(*txDriver) + var fn Rollbacker = RollbackFunc(func(context.Context, *Tx) error { + return txDriver.tx.Rollback() + }) + tx.mu.Lock() + hooks := append([]RollbackHook(nil), tx.onRollback...) + tx.mu.Unlock() + for i := len(hooks) - 1; i >= 0; i-- { + fn = hooks[i](fn) + } + return fn.Rollback(tx.ctx, tx) +} + +// OnRollback adds a hook to call on rollback. +func (tx *Tx) OnRollback(f RollbackHook) { + tx.mu.Lock() + defer tx.mu.Unlock() + tx.onRollback = append(tx.onRollback, f) +} + +// Client returns a Client that binds to current transaction. +func (tx *Tx) Client() *Client { + tx.clientOnce.Do(func() { + tx.client = &Client{config: tx.config} + tx.client.init() + }) + return tx.client +} + +func (tx *Tx) init() { + tx.AuthTokens = NewAuthTokensClient(tx.config) + tx.User = NewUserClient(tx.config) +} + +// txDriver wraps the given dialect.Tx with a nop dialect.Driver implementation. +// The idea is to support transactions without adding any extra code to the builders. +// When a builder calls to driver.Tx(), it gets the same dialect.Tx instance. +// Commit and Rollback are nop for the internal builders and the user must call one +// of them in order to commit or rollback the transaction. +// +// If a closed transaction is embedded in one of the generated entities, and the entity +// applies a query, for example: AuthTokens.QueryXXX(), the query will be executed +// through the driver which created this transaction. +// +// Note that txDriver is not goroutine safe. +type txDriver struct { + // the driver we started the transaction from. + drv dialect.Driver + // tx is the underlying transaction. + tx dialect.Tx +} + +// newTx creates a new transactional driver. +func newTx(ctx context.Context, drv dialect.Driver) (*txDriver, error) { + tx, err := drv.Tx(ctx) + if err != nil { + return nil, err + } + return &txDriver{tx: tx, drv: drv}, nil +} + +// Tx returns the transaction wrapper (txDriver) to avoid Commit or Rollback calls +// from the internal builders. Should be called only by the internal builders. +func (tx *txDriver) Tx(context.Context) (dialect.Tx, error) { return tx, nil } + +// Dialect returns the dialect of the driver we started the transaction from. +func (tx *txDriver) Dialect() string { return tx.drv.Dialect() } + +// Close is a nop close. +func (*txDriver) Close() error { return nil } + +// Commit is a nop commit for the internal builders. +// User must call `Tx.Commit` in order to commit the transaction. +func (*txDriver) Commit() error { return nil } + +// Rollback is a nop rollback for the internal builders. +// User must call `Tx.Rollback` in order to rollback the transaction. +func (*txDriver) Rollback() error { return nil } + +// Exec calls tx.Exec. +func (tx *txDriver) Exec(ctx context.Context, query string, args, v interface{}) error { + return tx.tx.Exec(ctx, query, args, v) +} + +// Query calls tx.Query. +func (tx *txDriver) Query(ctx context.Context, query string, args, v interface{}) error { + return tx.tx.Query(ctx, query, args, v) +} + +var _ dialect.Driver = (*txDriver)(nil) diff --git a/backend/ent/user.go b/backend/ent/user.go new file mode 100644 index 0000000..62eaf8f --- /dev/null +++ b/backend/ent/user.go @@ -0,0 +1,157 @@ +// Code generated by entc, DO NOT EDIT. + +package ent + +import ( + "fmt" + "strings" + + "entgo.io/ent/dialect/sql" + "github.com/google/uuid" + "github.com/hay-kot/git-web-template/backend/ent/user" +) + +// User is the model entity for the User schema. +type User struct { + config `json:"-"` + // ID of the ent. + ID uuid.UUID `json:"id,omitempty"` + // Name holds the value of the "name" field. + Name string `json:"name,omitempty"` + // Email holds the value of the "email" field. + Email string `json:"email,omitempty"` + // Password holds the value of the "password" field. + Password string `json:"-"` + // IsSuperuser holds the value of the "is_superuser" field. + IsSuperuser bool `json:"is_superuser,omitempty"` + // Edges holds the relations/edges for other nodes in the graph. + // The values are being populated by the UserQuery when eager-loading is set. + Edges UserEdges `json:"edges"` +} + +// UserEdges holds the relations/edges for other nodes in the graph. +type UserEdges struct { + // AuthTokens holds the value of the auth_tokens edge. + AuthTokens []*AuthTokens `json:"auth_tokens,omitempty"` + // loadedTypes holds the information for reporting if a + // type was loaded (or requested) in eager-loading or not. + loadedTypes [1]bool +} + +// AuthTokensOrErr returns the AuthTokens value or an error if the edge +// was not loaded in eager-loading. +func (e UserEdges) AuthTokensOrErr() ([]*AuthTokens, error) { + if e.loadedTypes[0] { + return e.AuthTokens, nil + } + return nil, &NotLoadedError{edge: "auth_tokens"} +} + +// scanValues returns the types for scanning values from sql.Rows. +func (*User) scanValues(columns []string) ([]interface{}, error) { + values := make([]interface{}, len(columns)) + for i := range columns { + switch columns[i] { + case user.FieldIsSuperuser: + values[i] = new(sql.NullBool) + case user.FieldName, user.FieldEmail, user.FieldPassword: + values[i] = new(sql.NullString) + case user.FieldID: + values[i] = new(uuid.UUID) + default: + return nil, fmt.Errorf("unexpected column %q for type User", columns[i]) + } + } + return values, nil +} + +// assignValues assigns the values that were returned from sql.Rows (after scanning) +// to the User fields. +func (u *User) assignValues(columns []string, values []interface{}) error { + if m, n := len(values), len(columns); m < n { + return fmt.Errorf("mismatch number of scan values: %d != %d", m, n) + } + for i := range columns { + switch columns[i] { + case user.FieldID: + if value, ok := values[i].(*uuid.UUID); !ok { + return fmt.Errorf("unexpected type %T for field id", values[i]) + } else if value != nil { + u.ID = *value + } + case user.FieldName: + if value, ok := values[i].(*sql.NullString); !ok { + return fmt.Errorf("unexpected type %T for field name", values[i]) + } else if value.Valid { + u.Name = value.String + } + case user.FieldEmail: + if value, ok := values[i].(*sql.NullString); !ok { + return fmt.Errorf("unexpected type %T for field email", values[i]) + } else if value.Valid { + u.Email = value.String + } + case user.FieldPassword: + if value, ok := values[i].(*sql.NullString); !ok { + return fmt.Errorf("unexpected type %T for field password", values[i]) + } else if value.Valid { + u.Password = value.String + } + case user.FieldIsSuperuser: + if value, ok := values[i].(*sql.NullBool); !ok { + return fmt.Errorf("unexpected type %T for field is_superuser", values[i]) + } else if value.Valid { + u.IsSuperuser = value.Bool + } + } + } + return nil +} + +// QueryAuthTokens queries the "auth_tokens" edge of the User entity. +func (u *User) QueryAuthTokens() *AuthTokensQuery { + return (&UserClient{config: u.config}).QueryAuthTokens(u) +} + +// Update returns a builder for updating this User. +// Note that you need to call User.Unwrap() before calling this method if this User +// was returned from a transaction, and the transaction was committed or rolled back. +func (u *User) Update() *UserUpdateOne { + return (&UserClient{config: u.config}).UpdateOne(u) +} + +// Unwrap unwraps the User entity that was returned from a transaction after it was closed, +// so that all future queries will be executed through the driver which created the transaction. +func (u *User) Unwrap() *User { + tx, ok := u.config.driver.(*txDriver) + if !ok { + panic("ent: User is not a transactional entity") + } + u.config.driver = tx.drv + return u +} + +// String implements the fmt.Stringer. +func (u *User) String() string { + var builder strings.Builder + builder.WriteString("User(") + builder.WriteString(fmt.Sprintf("id=%v", u.ID)) + builder.WriteString(", name=") + builder.WriteString(u.Name) + builder.WriteString(", email=") + builder.WriteString(u.Email) + builder.WriteString(", password=") + builder.WriteString(", is_superuser=") + builder.WriteString(fmt.Sprintf("%v", u.IsSuperuser)) + builder.WriteByte(')') + return builder.String() +} + +// Users is a parsable slice of User. +type Users []*User + +func (u Users) config(cfg config) { + for _i := range u { + u[_i].config = cfg + } +} diff --git a/backend/ent/user/user.go b/backend/ent/user/user.go new file mode 100644 index 0000000..9bbbd9a --- /dev/null +++ b/backend/ent/user/user.go @@ -0,0 +1,65 @@ +// Code generated by entc, DO NOT EDIT. + +package user + +import ( + "github.com/google/uuid" +) + +const ( + // Label holds the string label denoting the user type in the database. + Label = "user" + // FieldID holds the string denoting the id field in the database. + FieldID = "id" + // FieldName holds the string denoting the name field in the database. + FieldName = "name" + // FieldEmail holds the string denoting the email field in the database. + FieldEmail = "email" + // FieldPassword holds the string denoting the password field in the database. + FieldPassword = "password" + // FieldIsSuperuser holds the string denoting the is_superuser field in the database. + FieldIsSuperuser = "is_superuser" + // EdgeAuthTokens holds the string denoting the auth_tokens edge name in mutations. + EdgeAuthTokens = "auth_tokens" + // Table holds the table name of the user in the database. + Table = "users" + // AuthTokensTable is the table that holds the auth_tokens relation/edge. + AuthTokensTable = "auth_tokens" + // AuthTokensInverseTable is the table name for the AuthTokens entity. + // It exists in this package in order to avoid circular dependency with the "authtokens" package. + AuthTokensInverseTable = "auth_tokens" + // AuthTokensColumn is the table column denoting the auth_tokens relation/edge. + AuthTokensColumn = "user_auth_tokens" +) + +// Columns holds all SQL columns for user fields. +var Columns = []string{ + FieldID, + FieldName, + FieldEmail, + FieldPassword, + FieldIsSuperuser, +} + +// ValidColumn reports if the column name is valid (part of the table columns). +func ValidColumn(column string) bool { + for i := range Columns { + if column == Columns[i] { + return true + } + } + return false +} + +var ( + // NameValidator is a validator for the "name" field. It is called by the builders before save. + NameValidator func(string) error + // EmailValidator is a validator for the "email" field. It is called by the builders before save. + EmailValidator func(string) error + // PasswordValidator is a validator for the "password" field. It is called by the builders before save. + PasswordValidator func(string) error + // DefaultIsSuperuser holds the default value on creation for the "is_superuser" field. + DefaultIsSuperuser bool + // DefaultID holds the default value on creation for the "id" field. + DefaultID func() uuid.UUID +) diff --git a/backend/ent/user/where.go b/backend/ent/user/where.go new file mode 100644 index 0000000..36db52a --- /dev/null +++ b/backend/ent/user/where.go @@ -0,0 +1,528 @@ +// Code generated by entc, DO NOT EDIT. + +package user + +import ( + "entgo.io/ent/dialect/sql" + "entgo.io/ent/dialect/sql/sqlgraph" + "github.com/google/uuid" + "github.com/hay-kot/git-web-template/backend/ent/predicate" +) + +// ID filters vertices based on their ID field. +func ID(id uuid.UUID) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldID), id)) + }) +} + +// IDEQ applies the EQ predicate on the ID field. +func IDEQ(id uuid.UUID) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldID), id)) + }) +} + +// IDNEQ applies the NEQ predicate on the ID field. +func IDNEQ(id uuid.UUID) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldID), id)) + }) +} + +// IDIn applies the In predicate on the ID field. +func IDIn(ids ...uuid.UUID) predicate.User { + return predicate.User(func(s *sql.Selector) { + // if not arguments were provided, append the FALSE constants, + // since we can't apply "IN ()". This will make this predicate falsy. + if len(ids) == 0 { + s.Where(sql.False()) + return + } + v := make([]interface{}, len(ids)) + for i := range v { + v[i] = ids[i] + } + s.Where(sql.In(s.C(FieldID), v...)) + }) +} + +// IDNotIn applies the NotIn predicate on the ID field. +func IDNotIn(ids ...uuid.UUID) predicate.User { + return predicate.User(func(s *sql.Selector) { + // if not arguments were provided, append the FALSE constants, + // since we can't apply "IN ()". This will make this predicate falsy. + if len(ids) == 0 { + s.Where(sql.False()) + return + } + v := make([]interface{}, len(ids)) + for i := range v { + v[i] = ids[i] + } + s.Where(sql.NotIn(s.C(FieldID), v...)) + }) +} + +// IDGT applies the GT predicate on the ID field. +func IDGT(id uuid.UUID) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.GT(s.C(FieldID), id)) + }) +} + +// IDGTE applies the GTE predicate on the ID field. +func IDGTE(id uuid.UUID) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.GTE(s.C(FieldID), id)) + }) +} + +// IDLT applies the LT predicate on the ID field. +func IDLT(id uuid.UUID) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.LT(s.C(FieldID), id)) + }) +} + +// IDLTE applies the LTE predicate on the ID field. +func IDLTE(id uuid.UUID) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.LTE(s.C(FieldID), id)) + }) +} + +// Name applies equality check predicate on the "name" field. It's identical to NameEQ. +func Name(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldName), v)) + }) +} + +// Email applies equality check predicate on the "email" field. It's identical to EmailEQ. +func Email(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldEmail), v)) + }) +} + +// Password applies equality check predicate on the "password" field. It's identical to PasswordEQ. +func Password(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldPassword), v)) + }) +} + +// IsSuperuser applies equality check predicate on the "is_superuser" field. It's identical to IsSuperuserEQ. +func IsSuperuser(v bool) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldIsSuperuser), v)) + }) +} + +// NameEQ applies the EQ predicate on the "name" field. +func NameEQ(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldName), v)) + }) +} + +// NameNEQ applies the NEQ predicate on the "name" field. +func NameNEQ(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldName), v)) + }) +} + +// NameIn applies the In predicate on the "name" field. +func NameIn(vs ...string) predicate.User { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.User(func(s *sql.Selector) { + // if not arguments were provided, append the FALSE constants, + // since we can't apply "IN ()". This will make this predicate falsy. + if len(v) == 0 { + s.Where(sql.False()) + return + } + s.Where(sql.In(s.C(FieldName), v...)) + }) +} + +// NameNotIn applies the NotIn predicate on the "name" field. +func NameNotIn(vs ...string) predicate.User { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.User(func(s *sql.Selector) { + // if not arguments were provided, append the FALSE constants, + // since we can't apply "IN ()". This will make this predicate falsy. + if len(v) == 0 { + s.Where(sql.False()) + return + } + s.Where(sql.NotIn(s.C(FieldName), v...)) + }) +} + +// NameGT applies the GT predicate on the "name" field. +func NameGT(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.GT(s.C(FieldName), v)) + }) +} + +// NameGTE applies the GTE predicate on the "name" field. +func NameGTE(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.GTE(s.C(FieldName), v)) + }) +} + +// NameLT applies the LT predicate on the "name" field. +func NameLT(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.LT(s.C(FieldName), v)) + }) +} + +// NameLTE applies the LTE predicate on the "name" field. +func NameLTE(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.LTE(s.C(FieldName), v)) + }) +} + +// NameContains applies the Contains predicate on the "name" field. +func NameContains(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.Contains(s.C(FieldName), v)) + }) +} + +// NameHasPrefix applies the HasPrefix predicate on the "name" field. +func NameHasPrefix(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.HasPrefix(s.C(FieldName), v)) + }) +} + +// NameHasSuffix applies the HasSuffix predicate on the "name" field. +func NameHasSuffix(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.HasSuffix(s.C(FieldName), v)) + }) +} + +// NameEqualFold applies the EqualFold predicate on the "name" field. +func NameEqualFold(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.EqualFold(s.C(FieldName), v)) + }) +} + +// NameContainsFold applies the ContainsFold predicate on the "name" field. +func NameContainsFold(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.ContainsFold(s.C(FieldName), v)) + }) +} + +// EmailEQ applies the EQ predicate on the "email" field. +func EmailEQ(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldEmail), v)) + }) +} + +// EmailNEQ applies the NEQ predicate on the "email" field. +func EmailNEQ(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldEmail), v)) + }) +} + +// EmailIn applies the In predicate on the "email" field. +func EmailIn(vs ...string) predicate.User { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.User(func(s *sql.Selector) { + // if not arguments were provided, append the FALSE constants, + // since we can't apply "IN ()". This will make this predicate falsy. + if len(v) == 0 { + s.Where(sql.False()) + return + } + s.Where(sql.In(s.C(FieldEmail), v...)) + }) +} + +// EmailNotIn applies the NotIn predicate on the "email" field. +func EmailNotIn(vs ...string) predicate.User { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.User(func(s *sql.Selector) { + // if not arguments were provided, append the FALSE constants, + // since we can't apply "IN ()". This will make this predicate falsy. + if len(v) == 0 { + s.Where(sql.False()) + return + } + s.Where(sql.NotIn(s.C(FieldEmail), v...)) + }) +} + +// EmailGT applies the GT predicate on the "email" field. +func EmailGT(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.GT(s.C(FieldEmail), v)) + }) +} + +// EmailGTE applies the GTE predicate on the "email" field. +func EmailGTE(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.GTE(s.C(FieldEmail), v)) + }) +} + +// EmailLT applies the LT predicate on the "email" field. +func EmailLT(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.LT(s.C(FieldEmail), v)) + }) +} + +// EmailLTE applies the LTE predicate on the "email" field. +func EmailLTE(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.LTE(s.C(FieldEmail), v)) + }) +} + +// EmailContains applies the Contains predicate on the "email" field. +func EmailContains(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.Contains(s.C(FieldEmail), v)) + }) +} + +// EmailHasPrefix applies the HasPrefix predicate on the "email" field. +func EmailHasPrefix(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.HasPrefix(s.C(FieldEmail), v)) + }) +} + +// EmailHasSuffix applies the HasSuffix predicate on the "email" field. +func EmailHasSuffix(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.HasSuffix(s.C(FieldEmail), v)) + }) +} + +// EmailEqualFold applies the EqualFold predicate on the "email" field. +func EmailEqualFold(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.EqualFold(s.C(FieldEmail), v)) + }) +} + +// EmailContainsFold applies the ContainsFold predicate on the "email" field. +func EmailContainsFold(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.ContainsFold(s.C(FieldEmail), v)) + }) +} + +// PasswordEQ applies the EQ predicate on the "password" field. +func PasswordEQ(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldPassword), v)) + }) +} + +// PasswordNEQ applies the NEQ predicate on the "password" field. +func PasswordNEQ(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldPassword), v)) + }) +} + +// PasswordIn applies the In predicate on the "password" field. +func PasswordIn(vs ...string) predicate.User { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.User(func(s *sql.Selector) { + // if not arguments were provided, append the FALSE constants, + // since we can't apply "IN ()". This will make this predicate falsy. + if len(v) == 0 { + s.Where(sql.False()) + return + } + s.Where(sql.In(s.C(FieldPassword), v...)) + }) +} + +// PasswordNotIn applies the NotIn predicate on the "password" field. +func PasswordNotIn(vs ...string) predicate.User { + v := make([]interface{}, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.User(func(s *sql.Selector) { + // if not arguments were provided, append the FALSE constants, + // since we can't apply "IN ()". This will make this predicate falsy. + if len(v) == 0 { + s.Where(sql.False()) + return + } + s.Where(sql.NotIn(s.C(FieldPassword), v...)) + }) +} + +// PasswordGT applies the GT predicate on the "password" field. +func PasswordGT(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.GT(s.C(FieldPassword), v)) + }) +} + +// PasswordGTE applies the GTE predicate on the "password" field. +func PasswordGTE(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.GTE(s.C(FieldPassword), v)) + }) +} + +// PasswordLT applies the LT predicate on the "password" field. +func PasswordLT(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.LT(s.C(FieldPassword), v)) + }) +} + +// PasswordLTE applies the LTE predicate on the "password" field. +func PasswordLTE(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.LTE(s.C(FieldPassword), v)) + }) +} + +// PasswordContains applies the Contains predicate on the "password" field. +func PasswordContains(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.Contains(s.C(FieldPassword), v)) + }) +} + +// PasswordHasPrefix applies the HasPrefix predicate on the "password" field. +func PasswordHasPrefix(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.HasPrefix(s.C(FieldPassword), v)) + }) +} + +// PasswordHasSuffix applies the HasSuffix predicate on the "password" field. +func PasswordHasSuffix(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.HasSuffix(s.C(FieldPassword), v)) + }) +} + +// PasswordEqualFold applies the EqualFold predicate on the "password" field. +func PasswordEqualFold(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.EqualFold(s.C(FieldPassword), v)) + }) +} + +// PasswordContainsFold applies the ContainsFold predicate on the "password" field. +func PasswordContainsFold(v string) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.ContainsFold(s.C(FieldPassword), v)) + }) +} + +// IsSuperuserEQ applies the EQ predicate on the "is_superuser" field. +func IsSuperuserEQ(v bool) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldIsSuperuser), v)) + }) +} + +// IsSuperuserNEQ applies the NEQ predicate on the "is_superuser" field. +func IsSuperuserNEQ(v bool) predicate.User { + return predicate.User(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldIsSuperuser), v)) + }) +} + +// HasAuthTokens applies the HasEdge predicate on the "auth_tokens" edge. +func HasAuthTokens() predicate.User { + return predicate.User(func(s *sql.Selector) { + step := sqlgraph.NewStep( + sqlgraph.From(Table, FieldID), + sqlgraph.To(AuthTokensTable, FieldID), + sqlgraph.Edge(sqlgraph.O2M, false, AuthTokensTable, AuthTokensColumn), + ) + sqlgraph.HasNeighbors(s, step) + }) +} + +// HasAuthTokensWith applies the HasEdge predicate on the "auth_tokens" edge with a given conditions (other predicates). +func HasAuthTokensWith(preds ...predicate.AuthTokens) predicate.User { + return predicate.User(func(s *sql.Selector) { + step := sqlgraph.NewStep( + sqlgraph.From(Table, FieldID), + sqlgraph.To(AuthTokensInverseTable, FieldID), + sqlgraph.Edge(sqlgraph.O2M, false, AuthTokensTable, AuthTokensColumn), + ) + sqlgraph.HasNeighborsWith(s, step, func(s *sql.Selector) { + for _, p := range preds { + p(s) + } + }) + }) +} + +// And groups predicates with the AND operator between them. +func And(predicates ...predicate.User) predicate.User { + return predicate.User(func(s *sql.Selector) { + s1 := s.Clone().SetP(nil) + for _, p := range predicates { + p(s1) + } + s.Where(s1.P()) + }) +} + +// Or groups predicates with the OR operator between them. +func Or(predicates ...predicate.User) predicate.User { + return predicate.User(func(s *sql.Selector) { + s1 := s.Clone().SetP(nil) + for i, p := range predicates { + if i > 0 { + s1.Or() + } + p(s1) + } + s.Where(s1.P()) + }) +} + +// Not applies the not operator on the given predicate. +func Not(p predicate.User) predicate.User { + return predicate.User(func(s *sql.Selector) { + p(s.Not()) + }) +} diff --git a/backend/ent/user_create.go b/backend/ent/user_create.go new file mode 100644 index 0000000..95ad932 --- /dev/null +++ b/backend/ent/user_create.go @@ -0,0 +1,363 @@ +// Code generated by entc, DO NOT EDIT. + +package ent + +import ( + "context" + "errors" + "fmt" + + "entgo.io/ent/dialect/sql/sqlgraph" + "entgo.io/ent/schema/field" + "github.com/google/uuid" + "github.com/hay-kot/git-web-template/backend/ent/authtokens" + "github.com/hay-kot/git-web-template/backend/ent/user" +) + +// UserCreate is the builder for creating a User entity. +type UserCreate struct { + config + mutation *UserMutation + hooks []Hook +} + +// SetName sets the "name" field. +func (uc *UserCreate) SetName(s string) *UserCreate { + uc.mutation.SetName(s) + return uc +} + +// SetEmail sets the "email" field. +func (uc *UserCreate) SetEmail(s string) *UserCreate { + uc.mutation.SetEmail(s) + return uc +} + +// SetPassword sets the "password" field. +func (uc *UserCreate) SetPassword(s string) *UserCreate { + uc.mutation.SetPassword(s) + return uc +} + +// SetIsSuperuser sets the "is_superuser" field. +func (uc *UserCreate) SetIsSuperuser(b bool) *UserCreate { + uc.mutation.SetIsSuperuser(b) + return uc +} + +// SetNillableIsSuperuser sets the "is_superuser" field if the given value is not nil. +func (uc *UserCreate) SetNillableIsSuperuser(b *bool) *UserCreate { + if b != nil { + uc.SetIsSuperuser(*b) + } + return uc +} + +// SetID sets the "id" field. +func (uc *UserCreate) SetID(u uuid.UUID) *UserCreate { + uc.mutation.SetID(u) + return uc +} + +// SetNillableID sets the "id" field if the given value is not nil. +func (uc *UserCreate) SetNillableID(u *uuid.UUID) *UserCreate { + if u != nil { + uc.SetID(*u) + } + return uc +} + +// AddAuthTokenIDs adds the "auth_tokens" edge to the AuthTokens entity by IDs. +func (uc *UserCreate) AddAuthTokenIDs(ids ...int) *UserCreate { + uc.mutation.AddAuthTokenIDs(ids...) + return uc +} + +// AddAuthTokens adds the "auth_tokens" edges to the AuthTokens entity. +func (uc *UserCreate) AddAuthTokens(a ...*AuthTokens) *UserCreate { + ids := make([]int, len(a)) + for i := range a { + ids[i] = a[i].ID + } + return uc.AddAuthTokenIDs(ids...) +} + +// Mutation returns the UserMutation object of the builder. +func (uc *UserCreate) Mutation() *UserMutation { + return uc.mutation +} + +// Save creates the User in the database. +func (uc *UserCreate) Save(ctx context.Context) (*User, error) { + var ( + err error + node *User + ) + uc.defaults() + if len(uc.hooks) == 0 { + if err = uc.check(); err != nil { + return nil, err + } + node, err = uc.sqlSave(ctx) + } else { + var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) { + mutation, ok := m.(*UserMutation) + if !ok { + return nil, fmt.Errorf("unexpected mutation type %T", m) + } + if err = uc.check(); err != nil { + return nil, err + } + uc.mutation = mutation + if node, err = uc.sqlSave(ctx); err != nil { + return nil, err + } + mutation.id = &node.ID + mutation.done = true + return node, err + }) + for i := len(uc.hooks) - 1; i >= 0; i-- { + if uc.hooks[i] == nil { + return nil, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)") + } + mut = uc.hooks[i](mut) + } + if _, err := mut.Mutate(ctx, uc.mutation); err != nil { + return nil, err + } + } + return node, err +} + +// SaveX calls Save and panics if Save returns an error. +func (uc *UserCreate) SaveX(ctx context.Context) *User { + v, err := uc.Save(ctx) + if err != nil { + panic(err) + } + return v +} + +// Exec executes the query. +func (uc *UserCreate) Exec(ctx context.Context) error { + _, err := uc.Save(ctx) + return err +} + +// ExecX is like Exec, but panics if an error occurs. +func (uc *UserCreate) ExecX(ctx context.Context) { + if err := uc.Exec(ctx); err != nil { + panic(err) + } +} + +// defaults sets the default values of the builder before save. +func (uc *UserCreate) defaults() { + if _, ok := uc.mutation.IsSuperuser(); !ok { + v := user.DefaultIsSuperuser + uc.mutation.SetIsSuperuser(v) + } + if _, ok := uc.mutation.ID(); !ok { + v := user.DefaultID() + uc.mutation.SetID(v) + } +} + +// check runs all checks and user-defined validators on the builder. +func (uc *UserCreate) check() error { + if _, ok := uc.mutation.Name(); !ok { + return &ValidationError{Name: "name", err: errors.New(`ent: missing required field "User.name"`)} + } + if v, ok := uc.mutation.Name(); ok { + if err := user.NameValidator(v); err != nil { + return &ValidationError{Name: "name", err: fmt.Errorf(`ent: validator failed for field "User.name": %w`, err)} + } + } + if _, ok := uc.mutation.Email(); !ok { + return &ValidationError{Name: "email", err: errors.New(`ent: missing required field "User.email"`)} + } + if v, ok := uc.mutation.Email(); ok { + if err := user.EmailValidator(v); err != nil { + return &ValidationError{Name: "email", err: fmt.Errorf(`ent: validator failed for field "User.email": %w`, err)} + } + } + if _, ok := uc.mutation.Password(); !ok { + return &ValidationError{Name: "password", err: errors.New(`ent: missing required field "User.password"`)} + } + if v, ok := uc.mutation.Password(); ok { + if err := user.PasswordValidator(v); err != nil { + return &ValidationError{Name: "password", err: fmt.Errorf(`ent: validator failed for field "User.password": %w`, err)} + } + } + if _, ok := uc.mutation.IsSuperuser(); !ok { + return &ValidationError{Name: "is_superuser", err: errors.New(`ent: missing required field "User.is_superuser"`)} + } + return nil +} + +func (uc *UserCreate) sqlSave(ctx context.Context) (*User, error) { + _node, _spec := uc.createSpec() + if err := sqlgraph.CreateNode(ctx, uc.driver, _spec); err != nil { + if sqlgraph.IsConstraintError(err) { + err = &ConstraintError{err.Error(), err} + } + return nil, err + } + if _spec.ID.Value != nil { + if id, ok := _spec.ID.Value.(*uuid.UUID); ok { + _node.ID = *id + } else if err := _node.ID.Scan(_spec.ID.Value); err != nil { + return nil, err + } + } + return _node, nil +} + +func (uc *UserCreate) createSpec() (*User, *sqlgraph.CreateSpec) { + var ( + _node = &User{config: uc.config} + _spec = &sqlgraph.CreateSpec{ + Table: user.Table, + ID: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: user.FieldID, + }, + } + ) + if id, ok := uc.mutation.ID(); ok { + _node.ID = id + _spec.ID.Value = &id + } + if value, ok := uc.mutation.Name(); ok { + _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: user.FieldName, + }) + _node.Name = value + } + if value, ok := uc.mutation.Email(); ok { + _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: user.FieldEmail, + }) + _node.Email = value + } + if value, ok := uc.mutation.Password(); ok { + _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: user.FieldPassword, + }) + _node.Password = value + } + if value, ok := uc.mutation.IsSuperuser(); ok { + _spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{ + Type: field.TypeBool, + Value: value, + Column: user.FieldIsSuperuser, + }) + _node.IsSuperuser = value + } + if nodes := uc.mutation.AuthTokensIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: user.AuthTokensTable, + Columns: []string{user.AuthTokensColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeInt, + Column: authtokens.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges = append(_spec.Edges, edge) + } + return _node, _spec +} + +// UserCreateBulk is the builder for creating many User entities in bulk. +type UserCreateBulk struct { + config + builders []*UserCreate +} + +// Save creates the User entities in the database. +func (ucb *UserCreateBulk) Save(ctx context.Context) ([]*User, error) { + specs := make([]*sqlgraph.CreateSpec, len(ucb.builders)) + nodes := make([]*User, len(ucb.builders)) + mutators := make([]Mutator, len(ucb.builders)) + for i := range ucb.builders { + func(i int, root context.Context) { + builder := ucb.builders[i] + builder.defaults() + var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) { + mutation, ok := m.(*UserMutation) + if !ok { + return nil, fmt.Errorf("unexpected mutation type %T", m) + } + if err := builder.check(); err != nil { + return nil, err + } + builder.mutation = mutation + nodes[i], specs[i] = builder.createSpec() + var err error + if i < len(mutators)-1 { + _, err = mutators[i+1].Mutate(root, ucb.builders[i+1].mutation) + } else { + spec := &sqlgraph.BatchCreateSpec{Nodes: specs} + // Invoke the actual operation on the latest mutation in the chain. + if err = sqlgraph.BatchCreate(ctx, ucb.driver, spec); err != nil { + if sqlgraph.IsConstraintError(err) { + err = &ConstraintError{err.Error(), err} + } + } + } + if err != nil { + return nil, err + } + mutation.id = &nodes[i].ID + mutation.done = true + return nodes[i], nil + }) + for i := len(builder.hooks) - 1; i >= 0; i-- { + mut = builder.hooks[i](mut) + } + mutators[i] = mut + }(i, ctx) + } + if len(mutators) > 0 { + if _, err := mutators[0].Mutate(ctx, ucb.builders[0].mutation); err != nil { + return nil, err + } + } + return nodes, nil +} + +// SaveX is like Save, but panics if an error occurs. +func (ucb *UserCreateBulk) SaveX(ctx context.Context) []*User { + v, err := ucb.Save(ctx) + if err != nil { + panic(err) + } + return v +} + +// Exec executes the query. +func (ucb *UserCreateBulk) Exec(ctx context.Context) error { + _, err := ucb.Save(ctx) + return err +} + +// ExecX is like Exec, but panics if an error occurs. +func (ucb *UserCreateBulk) ExecX(ctx context.Context) { + if err := ucb.Exec(ctx); err != nil { + panic(err) + } +} diff --git a/backend/ent/user_delete.go b/backend/ent/user_delete.go new file mode 100644 index 0000000..6c5aafc --- /dev/null +++ b/backend/ent/user_delete.go @@ -0,0 +1,111 @@ +// Code generated by entc, DO NOT EDIT. + +package ent + +import ( + "context" + "fmt" + + "entgo.io/ent/dialect/sql" + "entgo.io/ent/dialect/sql/sqlgraph" + "entgo.io/ent/schema/field" + "github.com/hay-kot/git-web-template/backend/ent/predicate" + "github.com/hay-kot/git-web-template/backend/ent/user" +) + +// UserDelete is the builder for deleting a User entity. +type UserDelete struct { + config + hooks []Hook + mutation *UserMutation +} + +// Where appends a list predicates to the UserDelete builder. +func (ud *UserDelete) Where(ps ...predicate.User) *UserDelete { + ud.mutation.Where(ps...) + return ud +} + +// Exec executes the deletion query and returns how many vertices were deleted. +func (ud *UserDelete) Exec(ctx context.Context) (int, error) { + var ( + err error + affected int + ) + if len(ud.hooks) == 0 { + affected, err = ud.sqlExec(ctx) + } else { + var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) { + mutation, ok := m.(*UserMutation) + if !ok { + return nil, fmt.Errorf("unexpected mutation type %T", m) + } + ud.mutation = mutation + affected, err = ud.sqlExec(ctx) + mutation.done = true + return affected, err + }) + for i := len(ud.hooks) - 1; i >= 0; i-- { + if ud.hooks[i] == nil { + return 0, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)") + } + mut = ud.hooks[i](mut) + } + if _, err := mut.Mutate(ctx, ud.mutation); err != nil { + return 0, err + } + } + return affected, err +} + +// ExecX is like Exec, but panics if an error occurs. +func (ud *UserDelete) ExecX(ctx context.Context) int { + n, err := ud.Exec(ctx) + if err != nil { + panic(err) + } + return n +} + +func (ud *UserDelete) sqlExec(ctx context.Context) (int, error) { + _spec := &sqlgraph.DeleteSpec{ + Node: &sqlgraph.NodeSpec{ + Table: user.Table, + ID: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: user.FieldID, + }, + }, + } + if ps := ud.mutation.predicates; len(ps) > 0 { + _spec.Predicate = func(selector *sql.Selector) { + for i := range ps { + ps[i](selector) + } + } + } + return sqlgraph.DeleteNodes(ctx, ud.driver, _spec) +} + +// UserDeleteOne is the builder for deleting a single User entity. +type UserDeleteOne struct { + ud *UserDelete +} + +// Exec executes the deletion query. +func (udo *UserDeleteOne) Exec(ctx context.Context) error { + n, err := udo.ud.Exec(ctx) + switch { + case err != nil: + return err + case n == 0: + return &NotFoundError{user.Label} + default: + return nil + } +} + +// ExecX is like Exec, but panics if an error occurs. +func (udo *UserDeleteOne) ExecX(ctx context.Context) { + udo.ud.ExecX(ctx) +} diff --git a/backend/ent/user_query.go b/backend/ent/user_query.go new file mode 100644 index 0000000..804688d --- /dev/null +++ b/backend/ent/user_query.go @@ -0,0 +1,993 @@ +// Code generated by entc, DO NOT EDIT. + +package ent + +import ( + "context" + "database/sql/driver" + "errors" + "fmt" + "math" + + "entgo.io/ent/dialect/sql" + "entgo.io/ent/dialect/sql/sqlgraph" + "entgo.io/ent/schema/field" + "github.com/google/uuid" + "github.com/hay-kot/git-web-template/backend/ent/authtokens" + "github.com/hay-kot/git-web-template/backend/ent/predicate" + "github.com/hay-kot/git-web-template/backend/ent/user" +) + +// UserQuery is the builder for querying User entities. +type UserQuery struct { + config + limit *int + offset *int + unique *bool + order []OrderFunc + fields []string + predicates []predicate.User + // eager-loading edges. + withAuthTokens *AuthTokensQuery + // intermediate query (i.e. traversal path). + sql *sql.Selector + path func(context.Context) (*sql.Selector, error) +} + +// Where adds a new predicate for the UserQuery builder. +func (uq *UserQuery) Where(ps ...predicate.User) *UserQuery { + uq.predicates = append(uq.predicates, ps...) + return uq +} + +// Limit adds a limit step to the query. +func (uq *UserQuery) Limit(limit int) *UserQuery { + uq.limit = &limit + return uq +} + +// Offset adds an offset step to the query. +func (uq *UserQuery) Offset(offset int) *UserQuery { + uq.offset = &offset + return uq +} + +// Unique configures the query builder to filter duplicate records on query. +// By default, unique is set to true, and can be disabled using this method. +func (uq *UserQuery) Unique(unique bool) *UserQuery { + uq.unique = &unique + return uq +} + +// Order adds an order step to the query. +func (uq *UserQuery) Order(o ...OrderFunc) *UserQuery { + uq.order = append(uq.order, o...) + return uq +} + +// QueryAuthTokens chains the current query on the "auth_tokens" edge. +func (uq *UserQuery) QueryAuthTokens() *AuthTokensQuery { + query := &AuthTokensQuery{config: uq.config} + query.path = func(ctx context.Context) (fromU *sql.Selector, err error) { + if err := uq.prepareQuery(ctx); err != nil { + return nil, err + } + selector := uq.sqlQuery(ctx) + if err := selector.Err(); err != nil { + return nil, err + } + step := sqlgraph.NewStep( + sqlgraph.From(user.Table, user.FieldID, selector), + sqlgraph.To(authtokens.Table, authtokens.FieldID), + sqlgraph.Edge(sqlgraph.O2M, false, user.AuthTokensTable, user.AuthTokensColumn), + ) + fromU = sqlgraph.SetNeighbors(uq.driver.Dialect(), step) + return fromU, nil + } + return query +} + +// First returns the first User entity from the query. +// Returns a *NotFoundError when no User was found. +func (uq *UserQuery) First(ctx context.Context) (*User, error) { + nodes, err := uq.Limit(1).All(ctx) + if err != nil { + return nil, err + } + if len(nodes) == 0 { + return nil, &NotFoundError{user.Label} + } + return nodes[0], nil +} + +// FirstX is like First, but panics if an error occurs. +func (uq *UserQuery) FirstX(ctx context.Context) *User { + node, err := uq.First(ctx) + if err != nil && !IsNotFound(err) { + panic(err) + } + return node +} + +// FirstID returns the first User ID from the query. +// Returns a *NotFoundError when no User ID was found. +func (uq *UserQuery) FirstID(ctx context.Context) (id uuid.UUID, err error) { + var ids []uuid.UUID + if ids, err = uq.Limit(1).IDs(ctx); err != nil { + return + } + if len(ids) == 0 { + err = &NotFoundError{user.Label} + return + } + return ids[0], nil +} + +// FirstIDX is like FirstID, but panics if an error occurs. +func (uq *UserQuery) FirstIDX(ctx context.Context) uuid.UUID { + id, err := uq.FirstID(ctx) + if err != nil && !IsNotFound(err) { + panic(err) + } + return id +} + +// Only returns a single User entity found by the query, ensuring it only returns one. +// Returns a *NotSingularError when exactly one User entity is not found. +// Returns a *NotFoundError when no User entities are found. +func (uq *UserQuery) Only(ctx context.Context) (*User, error) { + nodes, err := uq.Limit(2).All(ctx) + if err != nil { + return nil, err + } + switch len(nodes) { + case 1: + return nodes[0], nil + case 0: + return nil, &NotFoundError{user.Label} + default: + return nil, &NotSingularError{user.Label} + } +} + +// OnlyX is like Only, but panics if an error occurs. +func (uq *UserQuery) OnlyX(ctx context.Context) *User { + node, err := uq.Only(ctx) + if err != nil { + panic(err) + } + return node +} + +// OnlyID is like Only, but returns the only User ID in the query. +// Returns a *NotSingularError when exactly one User ID is not found. +// Returns a *NotFoundError when no entities are found. +func (uq *UserQuery) OnlyID(ctx context.Context) (id uuid.UUID, err error) { + var ids []uuid.UUID + if ids, err = uq.Limit(2).IDs(ctx); err != nil { + return + } + switch len(ids) { + case 1: + id = ids[0] + case 0: + err = &NotFoundError{user.Label} + default: + err = &NotSingularError{user.Label} + } + return +} + +// OnlyIDX is like OnlyID, but panics if an error occurs. +func (uq *UserQuery) OnlyIDX(ctx context.Context) uuid.UUID { + id, err := uq.OnlyID(ctx) + if err != nil { + panic(err) + } + return id +} + +// All executes the query and returns a list of Users. +func (uq *UserQuery) All(ctx context.Context) ([]*User, error) { + if err := uq.prepareQuery(ctx); err != nil { + return nil, err + } + return uq.sqlAll(ctx) +} + +// AllX is like All, but panics if an error occurs. +func (uq *UserQuery) AllX(ctx context.Context) []*User { + nodes, err := uq.All(ctx) + if err != nil { + panic(err) + } + return nodes +} + +// IDs executes the query and returns a list of User IDs. +func (uq *UserQuery) IDs(ctx context.Context) ([]uuid.UUID, error) { + var ids []uuid.UUID + if err := uq.Select(user.FieldID).Scan(ctx, &ids); err != nil { + return nil, err + } + return ids, nil +} + +// IDsX is like IDs, but panics if an error occurs. +func (uq *UserQuery) IDsX(ctx context.Context) []uuid.UUID { + ids, err := uq.IDs(ctx) + if err != nil { + panic(err) + } + return ids +} + +// Count returns the count of the given query. +func (uq *UserQuery) Count(ctx context.Context) (int, error) { + if err := uq.prepareQuery(ctx); err != nil { + return 0, err + } + return uq.sqlCount(ctx) +} + +// CountX is like Count, but panics if an error occurs. +func (uq *UserQuery) CountX(ctx context.Context) int { + count, err := uq.Count(ctx) + if err != nil { + panic(err) + } + return count +} + +// Exist returns true if the query has elements in the graph. +func (uq *UserQuery) Exist(ctx context.Context) (bool, error) { + if err := uq.prepareQuery(ctx); err != nil { + return false, err + } + return uq.sqlExist(ctx) +} + +// ExistX is like Exist, but panics if an error occurs. +func (uq *UserQuery) ExistX(ctx context.Context) bool { + exist, err := uq.Exist(ctx) + if err != nil { + panic(err) + } + return exist +} + +// Clone returns a duplicate of the UserQuery builder, including all associated steps. It can be +// used to prepare common query builders and use them differently after the clone is made. +func (uq *UserQuery) Clone() *UserQuery { + if uq == nil { + return nil + } + return &UserQuery{ + config: uq.config, + limit: uq.limit, + offset: uq.offset, + order: append([]OrderFunc{}, uq.order...), + predicates: append([]predicate.User{}, uq.predicates...), + withAuthTokens: uq.withAuthTokens.Clone(), + // clone intermediate query. + sql: uq.sql.Clone(), + path: uq.path, + } +} + +// WithAuthTokens tells the query-builder to eager-load the nodes that are connected to +// the "auth_tokens" edge. The optional arguments are used to configure the query builder of the edge. +func (uq *UserQuery) WithAuthTokens(opts ...func(*AuthTokensQuery)) *UserQuery { + query := &AuthTokensQuery{config: uq.config} + for _, opt := range opts { + opt(query) + } + uq.withAuthTokens = query + return uq +} + +// GroupBy is used to group vertices by one or more fields/columns. +// It is often used with aggregate functions, like: count, max, mean, min, sum. +// +// Example: +// +// var v []struct { +// Name string `json:"name,omitempty"` +// Count int `json:"count,omitempty"` +// } +// +// client.User.Query(). +// GroupBy(user.FieldName). +// Aggregate(ent.Count()). +// Scan(ctx, &v) +// +func (uq *UserQuery) GroupBy(field string, fields ...string) *UserGroupBy { + group := &UserGroupBy{config: uq.config} + group.fields = append([]string{field}, fields...) + group.path = func(ctx context.Context) (prev *sql.Selector, err error) { + if err := uq.prepareQuery(ctx); err != nil { + return nil, err + } + return uq.sqlQuery(ctx), nil + } + return group +} + +// Select allows the selection one or more fields/columns for the given query, +// instead of selecting all fields in the entity. +// +// Example: +// +// var v []struct { +// Name string `json:"name,omitempty"` +// } +// +// client.User.Query(). +// Select(user.FieldName). +// Scan(ctx, &v) +// +func (uq *UserQuery) Select(fields ...string) *UserSelect { + uq.fields = append(uq.fields, fields...) + return &UserSelect{UserQuery: uq} +} + +func (uq *UserQuery) prepareQuery(ctx context.Context) error { + for _, f := range uq.fields { + if !user.ValidColumn(f) { + return &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)} + } + } + if uq.path != nil { + prev, err := uq.path(ctx) + if err != nil { + return err + } + uq.sql = prev + } + return nil +} + +func (uq *UserQuery) sqlAll(ctx context.Context) ([]*User, error) { + var ( + nodes = []*User{} + _spec = uq.querySpec() + loadedTypes = [1]bool{ + uq.withAuthTokens != nil, + } + ) + _spec.ScanValues = func(columns []string) ([]interface{}, error) { + node := &User{config: uq.config} + nodes = append(nodes, node) + return node.scanValues(columns) + } + _spec.Assign = func(columns []string, values []interface{}) error { + if len(nodes) == 0 { + return fmt.Errorf("ent: Assign called without calling ScanValues") + } + node := nodes[len(nodes)-1] + node.Edges.loadedTypes = loadedTypes + return node.assignValues(columns, values) + } + if err := sqlgraph.QueryNodes(ctx, uq.driver, _spec); err != nil { + return nil, err + } + if len(nodes) == 0 { + return nodes, nil + } + + if query := uq.withAuthTokens; query != nil { + fks := make([]driver.Value, 0, len(nodes)) + nodeids := make(map[uuid.UUID]*User) + for i := range nodes { + fks = append(fks, nodes[i].ID) + nodeids[nodes[i].ID] = nodes[i] + nodes[i].Edges.AuthTokens = []*AuthTokens{} + } + query.withFKs = true + query.Where(predicate.AuthTokens(func(s *sql.Selector) { + s.Where(sql.InValues(user.AuthTokensColumn, fks...)) + })) + neighbors, err := query.All(ctx) + if err != nil { + return nil, err + } + for _, n := range neighbors { + fk := n.user_auth_tokens + if fk == nil { + return nil, fmt.Errorf(`foreign-key "user_auth_tokens" is nil for node %v`, n.ID) + } + node, ok := nodeids[*fk] + if !ok { + return nil, fmt.Errorf(`unexpected foreign-key "user_auth_tokens" returned %v for node %v`, *fk, n.ID) + } + node.Edges.AuthTokens = append(node.Edges.AuthTokens, n) + } + } + + return nodes, nil +} + +func (uq *UserQuery) sqlCount(ctx context.Context) (int, error) { + _spec := uq.querySpec() + _spec.Node.Columns = uq.fields + if len(uq.fields) > 0 { + _spec.Unique = uq.unique != nil && *uq.unique + } + return sqlgraph.CountNodes(ctx, uq.driver, _spec) +} + +func (uq *UserQuery) sqlExist(ctx context.Context) (bool, error) { + n, err := uq.sqlCount(ctx) + if err != nil { + return false, fmt.Errorf("ent: check existence: %w", err) + } + return n > 0, nil +} + +func (uq *UserQuery) querySpec() *sqlgraph.QuerySpec { + _spec := &sqlgraph.QuerySpec{ + Node: &sqlgraph.NodeSpec{ + Table: user.Table, + Columns: user.Columns, + ID: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: user.FieldID, + }, + }, + From: uq.sql, + Unique: true, + } + if unique := uq.unique; unique != nil { + _spec.Unique = *unique + } + if fields := uq.fields; len(fields) > 0 { + _spec.Node.Columns = make([]string, 0, len(fields)) + _spec.Node.Columns = append(_spec.Node.Columns, user.FieldID) + for i := range fields { + if fields[i] != user.FieldID { + _spec.Node.Columns = append(_spec.Node.Columns, fields[i]) + } + } + } + if ps := uq.predicates; len(ps) > 0 { + _spec.Predicate = func(selector *sql.Selector) { + for i := range ps { + ps[i](selector) + } + } + } + if limit := uq.limit; limit != nil { + _spec.Limit = *limit + } + if offset := uq.offset; offset != nil { + _spec.Offset = *offset + } + if ps := uq.order; len(ps) > 0 { + _spec.Order = func(selector *sql.Selector) { + for i := range ps { + ps[i](selector) + } + } + } + return _spec +} + +func (uq *UserQuery) sqlQuery(ctx context.Context) *sql.Selector { + builder := sql.Dialect(uq.driver.Dialect()) + t1 := builder.Table(user.Table) + columns := uq.fields + if len(columns) == 0 { + columns = user.Columns + } + selector := builder.Select(t1.Columns(columns...)...).From(t1) + if uq.sql != nil { + selector = uq.sql + selector.Select(selector.Columns(columns...)...) + } + if uq.unique != nil && *uq.unique { + selector.Distinct() + } + for _, p := range uq.predicates { + p(selector) + } + for _, p := range uq.order { + p(selector) + } + if offset := uq.offset; offset != nil { + // limit is mandatory for offset clause. We start + // with default value, and override it below if needed. + selector.Offset(*offset).Limit(math.MaxInt32) + } + if limit := uq.limit; limit != nil { + selector.Limit(*limit) + } + return selector +} + +// UserGroupBy is the group-by builder for User entities. +type UserGroupBy struct { + config + fields []string + fns []AggregateFunc + // intermediate query (i.e. traversal path). + sql *sql.Selector + path func(context.Context) (*sql.Selector, error) +} + +// Aggregate adds the given aggregation functions to the group-by query. +func (ugb *UserGroupBy) Aggregate(fns ...AggregateFunc) *UserGroupBy { + ugb.fns = append(ugb.fns, fns...) + return ugb +} + +// Scan applies the group-by query and scans the result into the given value. +func (ugb *UserGroupBy) Scan(ctx context.Context, v interface{}) error { + query, err := ugb.path(ctx) + if err != nil { + return err + } + ugb.sql = query + return ugb.sqlScan(ctx, v) +} + +// ScanX is like Scan, but panics if an error occurs. +func (ugb *UserGroupBy) ScanX(ctx context.Context, v interface{}) { + if err := ugb.Scan(ctx, v); err != nil { + panic(err) + } +} + +// Strings returns list of strings from group-by. +// It is only allowed when executing a group-by query with one field. +func (ugb *UserGroupBy) Strings(ctx context.Context) ([]string, error) { + if len(ugb.fields) > 1 { + return nil, errors.New("ent: UserGroupBy.Strings is not achievable when grouping more than 1 field") + } + var v []string + if err := ugb.Scan(ctx, &v); err != nil { + return nil, err + } + return v, nil +} + +// StringsX is like Strings, but panics if an error occurs. +func (ugb *UserGroupBy) StringsX(ctx context.Context) []string { + v, err := ugb.Strings(ctx) + if err != nil { + panic(err) + } + return v +} + +// String returns a single string from a group-by query. +// It is only allowed when executing a group-by query with one field. +func (ugb *UserGroupBy) String(ctx context.Context) (_ string, err error) { + var v []string + if v, err = ugb.Strings(ctx); err != nil { + return + } + switch len(v) { + case 1: + return v[0], nil + case 0: + err = &NotFoundError{user.Label} + default: + err = fmt.Errorf("ent: UserGroupBy.Strings returned %d results when one was expected", len(v)) + } + return +} + +// StringX is like String, but panics if an error occurs. +func (ugb *UserGroupBy) StringX(ctx context.Context) string { + v, err := ugb.String(ctx) + if err != nil { + panic(err) + } + return v +} + +// Ints returns list of ints from group-by. +// It is only allowed when executing a group-by query with one field. +func (ugb *UserGroupBy) Ints(ctx context.Context) ([]int, error) { + if len(ugb.fields) > 1 { + return nil, errors.New("ent: UserGroupBy.Ints is not achievable when grouping more than 1 field") + } + var v []int + if err := ugb.Scan(ctx, &v); err != nil { + return nil, err + } + return v, nil +} + +// IntsX is like Ints, but panics if an error occurs. +func (ugb *UserGroupBy) IntsX(ctx context.Context) []int { + v, err := ugb.Ints(ctx) + if err != nil { + panic(err) + } + return v +} + +// Int returns a single int from a group-by query. +// It is only allowed when executing a group-by query with one field. +func (ugb *UserGroupBy) Int(ctx context.Context) (_ int, err error) { + var v []int + if v, err = ugb.Ints(ctx); err != nil { + return + } + switch len(v) { + case 1: + return v[0], nil + case 0: + err = &NotFoundError{user.Label} + default: + err = fmt.Errorf("ent: UserGroupBy.Ints returned %d results when one was expected", len(v)) + } + return +} + +// IntX is like Int, but panics if an error occurs. +func (ugb *UserGroupBy) IntX(ctx context.Context) int { + v, err := ugb.Int(ctx) + if err != nil { + panic(err) + } + return v +} + +// Float64s returns list of float64s from group-by. +// It is only allowed when executing a group-by query with one field. +func (ugb *UserGroupBy) Float64s(ctx context.Context) ([]float64, error) { + if len(ugb.fields) > 1 { + return nil, errors.New("ent: UserGroupBy.Float64s is not achievable when grouping more than 1 field") + } + var v []float64 + if err := ugb.Scan(ctx, &v); err != nil { + return nil, err + } + return v, nil +} + +// Float64sX is like Float64s, but panics if an error occurs. +func (ugb *UserGroupBy) Float64sX(ctx context.Context) []float64 { + v, err := ugb.Float64s(ctx) + if err != nil { + panic(err) + } + return v +} + +// Float64 returns a single float64 from a group-by query. +// It is only allowed when executing a group-by query with one field. +func (ugb *UserGroupBy) Float64(ctx context.Context) (_ float64, err error) { + var v []float64 + if v, err = ugb.Float64s(ctx); err != nil { + return + } + switch len(v) { + case 1: + return v[0], nil + case 0: + err = &NotFoundError{user.Label} + default: + err = fmt.Errorf("ent: UserGroupBy.Float64s returned %d results when one was expected", len(v)) + } + return +} + +// Float64X is like Float64, but panics if an error occurs. +func (ugb *UserGroupBy) Float64X(ctx context.Context) float64 { + v, err := ugb.Float64(ctx) + if err != nil { + panic(err) + } + return v +} + +// Bools returns list of bools from group-by. +// It is only allowed when executing a group-by query with one field. +func (ugb *UserGroupBy) Bools(ctx context.Context) ([]bool, error) { + if len(ugb.fields) > 1 { + return nil, errors.New("ent: UserGroupBy.Bools is not achievable when grouping more than 1 field") + } + var v []bool + if err := ugb.Scan(ctx, &v); err != nil { + return nil, err + } + return v, nil +} + +// BoolsX is like Bools, but panics if an error occurs. +func (ugb *UserGroupBy) BoolsX(ctx context.Context) []bool { + v, err := ugb.Bools(ctx) + if err != nil { + panic(err) + } + return v +} + +// Bool returns a single bool from a group-by query. +// It is only allowed when executing a group-by query with one field. +func (ugb *UserGroupBy) Bool(ctx context.Context) (_ bool, err error) { + var v []bool + if v, err = ugb.Bools(ctx); err != nil { + return + } + switch len(v) { + case 1: + return v[0], nil + case 0: + err = &NotFoundError{user.Label} + default: + err = fmt.Errorf("ent: UserGroupBy.Bools returned %d results when one was expected", len(v)) + } + return +} + +// BoolX is like Bool, but panics if an error occurs. +func (ugb *UserGroupBy) BoolX(ctx context.Context) bool { + v, err := ugb.Bool(ctx) + if err != nil { + panic(err) + } + return v +} + +func (ugb *UserGroupBy) sqlScan(ctx context.Context, v interface{}) error { + for _, f := range ugb.fields { + if !user.ValidColumn(f) { + return &ValidationError{Name: f, err: fmt.Errorf("invalid field %q for group-by", f)} + } + } + selector := ugb.sqlQuery() + if err := selector.Err(); err != nil { + return err + } + rows := &sql.Rows{} + query, args := selector.Query() + if err := ugb.driver.Query(ctx, query, args, rows); err != nil { + return err + } + defer rows.Close() + return sql.ScanSlice(rows, v) +} + +func (ugb *UserGroupBy) sqlQuery() *sql.Selector { + selector := ugb.sql.Select() + aggregation := make([]string, 0, len(ugb.fns)) + for _, fn := range ugb.fns { + aggregation = append(aggregation, fn(selector)) + } + // If no columns were selected in a custom aggregation function, the default + // selection is the fields used for "group-by", and the aggregation functions. + if len(selector.SelectedColumns()) == 0 { + columns := make([]string, 0, len(ugb.fields)+len(ugb.fns)) + for _, f := range ugb.fields { + columns = append(columns, selector.C(f)) + } + columns = append(columns, aggregation...) + selector.Select(columns...) + } + return selector.GroupBy(selector.Columns(ugb.fields...)...) +} + +// UserSelect is the builder for selecting fields of User entities. +type UserSelect struct { + *UserQuery + // intermediate query (i.e. traversal path). + sql *sql.Selector +} + +// Scan applies the selector query and scans the result into the given value. +func (us *UserSelect) Scan(ctx context.Context, v interface{}) error { + if err := us.prepareQuery(ctx); err != nil { + return err + } + us.sql = us.UserQuery.sqlQuery(ctx) + return us.sqlScan(ctx, v) +} + +// ScanX is like Scan, but panics if an error occurs. +func (us *UserSelect) ScanX(ctx context.Context, v interface{}) { + if err := us.Scan(ctx, v); err != nil { + panic(err) + } +} + +// Strings returns list of strings from a selector. It is only allowed when selecting one field. +func (us *UserSelect) Strings(ctx context.Context) ([]string, error) { + if len(us.fields) > 1 { + return nil, errors.New("ent: UserSelect.Strings is not achievable when selecting more than 1 field") + } + var v []string + if err := us.Scan(ctx, &v); err != nil { + return nil, err + } + return v, nil +} + +// StringsX is like Strings, but panics if an error occurs. +func (us *UserSelect) StringsX(ctx context.Context) []string { + v, err := us.Strings(ctx) + if err != nil { + panic(err) + } + return v +} + +// String returns a single string from a selector. It is only allowed when selecting one field. +func (us *UserSelect) String(ctx context.Context) (_ string, err error) { + var v []string + if v, err = us.Strings(ctx); err != nil { + return + } + switch len(v) { + case 1: + return v[0], nil + case 0: + err = &NotFoundError{user.Label} + default: + err = fmt.Errorf("ent: UserSelect.Strings returned %d results when one was expected", len(v)) + } + return +} + +// StringX is like String, but panics if an error occurs. +func (us *UserSelect) StringX(ctx context.Context) string { + v, err := us.String(ctx) + if err != nil { + panic(err) + } + return v +} + +// Ints returns list of ints from a selector. It is only allowed when selecting one field. +func (us *UserSelect) Ints(ctx context.Context) ([]int, error) { + if len(us.fields) > 1 { + return nil, errors.New("ent: UserSelect.Ints is not achievable when selecting more than 1 field") + } + var v []int + if err := us.Scan(ctx, &v); err != nil { + return nil, err + } + return v, nil +} + +// IntsX is like Ints, but panics if an error occurs. +func (us *UserSelect) IntsX(ctx context.Context) []int { + v, err := us.Ints(ctx) + if err != nil { + panic(err) + } + return v +} + +// Int returns a single int from a selector. It is only allowed when selecting one field. +func (us *UserSelect) Int(ctx context.Context) (_ int, err error) { + var v []int + if v, err = us.Ints(ctx); err != nil { + return + } + switch len(v) { + case 1: + return v[0], nil + case 0: + err = &NotFoundError{user.Label} + default: + err = fmt.Errorf("ent: UserSelect.Ints returned %d results when one was expected", len(v)) + } + return +} + +// IntX is like Int, but panics if an error occurs. +func (us *UserSelect) IntX(ctx context.Context) int { + v, err := us.Int(ctx) + if err != nil { + panic(err) + } + return v +} + +// Float64s returns list of float64s from a selector. It is only allowed when selecting one field. +func (us *UserSelect) Float64s(ctx context.Context) ([]float64, error) { + if len(us.fields) > 1 { + return nil, errors.New("ent: UserSelect.Float64s is not achievable when selecting more than 1 field") + } + var v []float64 + if err := us.Scan(ctx, &v); err != nil { + return nil, err + } + return v, nil +} + +// Float64sX is like Float64s, but panics if an error occurs. +func (us *UserSelect) Float64sX(ctx context.Context) []float64 { + v, err := us.Float64s(ctx) + if err != nil { + panic(err) + } + return v +} + +// Float64 returns a single float64 from a selector. It is only allowed when selecting one field. +func (us *UserSelect) Float64(ctx context.Context) (_ float64, err error) { + var v []float64 + if v, err = us.Float64s(ctx); err != nil { + return + } + switch len(v) { + case 1: + return v[0], nil + case 0: + err = &NotFoundError{user.Label} + default: + err = fmt.Errorf("ent: UserSelect.Float64s returned %d results when one was expected", len(v)) + } + return +} + +// Float64X is like Float64, but panics if an error occurs. +func (us *UserSelect) Float64X(ctx context.Context) float64 { + v, err := us.Float64(ctx) + if err != nil { + panic(err) + } + return v +} + +// Bools returns list of bools from a selector. It is only allowed when selecting one field. +func (us *UserSelect) Bools(ctx context.Context) ([]bool, error) { + if len(us.fields) > 1 { + return nil, errors.New("ent: UserSelect.Bools is not achievable when selecting more than 1 field") + } + var v []bool + if err := us.Scan(ctx, &v); err != nil { + return nil, err + } + return v, nil +} + +// BoolsX is like Bools, but panics if an error occurs. +func (us *UserSelect) BoolsX(ctx context.Context) []bool { + v, err := us.Bools(ctx) + if err != nil { + panic(err) + } + return v +} + +// Bool returns a single bool from a selector. It is only allowed when selecting one field. +func (us *UserSelect) Bool(ctx context.Context) (_ bool, err error) { + var v []bool + if v, err = us.Bools(ctx); err != nil { + return + } + switch len(v) { + case 1: + return v[0], nil + case 0: + err = &NotFoundError{user.Label} + default: + err = fmt.Errorf("ent: UserSelect.Bools returned %d results when one was expected", len(v)) + } + return +} + +// BoolX is like Bool, but panics if an error occurs. +func (us *UserSelect) BoolX(ctx context.Context) bool { + v, err := us.Bool(ctx) + if err != nil { + panic(err) + } + return v +} + +func (us *UserSelect) sqlScan(ctx context.Context, v interface{}) error { + rows := &sql.Rows{} + query, args := us.sql.Query() + if err := us.driver.Query(ctx, query, args, rows); err != nil { + return err + } + defer rows.Close() + return sql.ScanSlice(rows, v) +} diff --git a/backend/ent/user_update.go b/backend/ent/user_update.go new file mode 100644 index 0000000..d532fc5 --- /dev/null +++ b/backend/ent/user_update.go @@ -0,0 +1,592 @@ +// Code generated by entc, DO NOT EDIT. + +package ent + +import ( + "context" + "errors" + "fmt" + + "entgo.io/ent/dialect/sql" + "entgo.io/ent/dialect/sql/sqlgraph" + "entgo.io/ent/schema/field" + "github.com/hay-kot/git-web-template/backend/ent/authtokens" + "github.com/hay-kot/git-web-template/backend/ent/predicate" + "github.com/hay-kot/git-web-template/backend/ent/user" +) + +// UserUpdate is the builder for updating User entities. +type UserUpdate struct { + config + hooks []Hook + mutation *UserMutation +} + +// Where appends a list predicates to the UserUpdate builder. +func (uu *UserUpdate) Where(ps ...predicate.User) *UserUpdate { + uu.mutation.Where(ps...) + return uu +} + +// SetName sets the "name" field. +func (uu *UserUpdate) SetName(s string) *UserUpdate { + uu.mutation.SetName(s) + return uu +} + +// SetEmail sets the "email" field. +func (uu *UserUpdate) SetEmail(s string) *UserUpdate { + uu.mutation.SetEmail(s) + return uu +} + +// SetPassword sets the "password" field. +func (uu *UserUpdate) SetPassword(s string) *UserUpdate { + uu.mutation.SetPassword(s) + return uu +} + +// SetIsSuperuser sets the "is_superuser" field. +func (uu *UserUpdate) SetIsSuperuser(b bool) *UserUpdate { + uu.mutation.SetIsSuperuser(b) + return uu +} + +// SetNillableIsSuperuser sets the "is_superuser" field if the given value is not nil. +func (uu *UserUpdate) SetNillableIsSuperuser(b *bool) *UserUpdate { + if b != nil { + uu.SetIsSuperuser(*b) + } + return uu +} + +// AddAuthTokenIDs adds the "auth_tokens" edge to the AuthTokens entity by IDs. +func (uu *UserUpdate) AddAuthTokenIDs(ids ...int) *UserUpdate { + uu.mutation.AddAuthTokenIDs(ids...) + return uu +} + +// AddAuthTokens adds the "auth_tokens" edges to the AuthTokens entity. +func (uu *UserUpdate) AddAuthTokens(a ...*AuthTokens) *UserUpdate { + ids := make([]int, len(a)) + for i := range a { + ids[i] = a[i].ID + } + return uu.AddAuthTokenIDs(ids...) +} + +// Mutation returns the UserMutation object of the builder. +func (uu *UserUpdate) Mutation() *UserMutation { + return uu.mutation +} + +// ClearAuthTokens clears all "auth_tokens" edges to the AuthTokens entity. +func (uu *UserUpdate) ClearAuthTokens() *UserUpdate { + uu.mutation.ClearAuthTokens() + return uu +} + +// RemoveAuthTokenIDs removes the "auth_tokens" edge to AuthTokens entities by IDs. +func (uu *UserUpdate) RemoveAuthTokenIDs(ids ...int) *UserUpdate { + uu.mutation.RemoveAuthTokenIDs(ids...) + return uu +} + +// RemoveAuthTokens removes "auth_tokens" edges to AuthTokens entities. +func (uu *UserUpdate) RemoveAuthTokens(a ...*AuthTokens) *UserUpdate { + ids := make([]int, len(a)) + for i := range a { + ids[i] = a[i].ID + } + return uu.RemoveAuthTokenIDs(ids...) +} + +// Save executes the query and returns the number of nodes affected by the update operation. +func (uu *UserUpdate) Save(ctx context.Context) (int, error) { + var ( + err error + affected int + ) + if len(uu.hooks) == 0 { + if err = uu.check(); err != nil { + return 0, err + } + affected, err = uu.sqlSave(ctx) + } else { + var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) { + mutation, ok := m.(*UserMutation) + if !ok { + return nil, fmt.Errorf("unexpected mutation type %T", m) + } + if err = uu.check(); err != nil { + return 0, err + } + uu.mutation = mutation + affected, err = uu.sqlSave(ctx) + mutation.done = true + return affected, err + }) + for i := len(uu.hooks) - 1; i >= 0; i-- { + if uu.hooks[i] == nil { + return 0, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)") + } + mut = uu.hooks[i](mut) + } + if _, err := mut.Mutate(ctx, uu.mutation); err != nil { + return 0, err + } + } + return affected, err +} + +// SaveX is like Save, but panics if an error occurs. +func (uu *UserUpdate) SaveX(ctx context.Context) int { + affected, err := uu.Save(ctx) + if err != nil { + panic(err) + } + return affected +} + +// Exec executes the query. +func (uu *UserUpdate) Exec(ctx context.Context) error { + _, err := uu.Save(ctx) + return err +} + +// ExecX is like Exec, but panics if an error occurs. +func (uu *UserUpdate) ExecX(ctx context.Context) { + if err := uu.Exec(ctx); err != nil { + panic(err) + } +} + +// check runs all checks and user-defined validators on the builder. +func (uu *UserUpdate) check() error { + if v, ok := uu.mutation.Name(); ok { + if err := user.NameValidator(v); err != nil { + return &ValidationError{Name: "name", err: fmt.Errorf(`ent: validator failed for field "User.name": %w`, err)} + } + } + if v, ok := uu.mutation.Email(); ok { + if err := user.EmailValidator(v); err != nil { + return &ValidationError{Name: "email", err: fmt.Errorf(`ent: validator failed for field "User.email": %w`, err)} + } + } + if v, ok := uu.mutation.Password(); ok { + if err := user.PasswordValidator(v); err != nil { + return &ValidationError{Name: "password", err: fmt.Errorf(`ent: validator failed for field "User.password": %w`, err)} + } + } + return nil +} + +func (uu *UserUpdate) sqlSave(ctx context.Context) (n int, err error) { + _spec := &sqlgraph.UpdateSpec{ + Node: &sqlgraph.NodeSpec{ + Table: user.Table, + Columns: user.Columns, + ID: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: user.FieldID, + }, + }, + } + if ps := uu.mutation.predicates; len(ps) > 0 { + _spec.Predicate = func(selector *sql.Selector) { + for i := range ps { + ps[i](selector) + } + } + } + if value, ok := uu.mutation.Name(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: user.FieldName, + }) + } + if value, ok := uu.mutation.Email(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: user.FieldEmail, + }) + } + if value, ok := uu.mutation.Password(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: user.FieldPassword, + }) + } + if value, ok := uu.mutation.IsSuperuser(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeBool, + Value: value, + Column: user.FieldIsSuperuser, + }) + } + if uu.mutation.AuthTokensCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: user.AuthTokensTable, + Columns: []string{user.AuthTokensColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeInt, + Column: authtokens.FieldID, + }, + }, + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := uu.mutation.RemovedAuthTokensIDs(); len(nodes) > 0 && !uu.mutation.AuthTokensCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: user.AuthTokensTable, + Columns: []string{user.AuthTokensColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeInt, + Column: authtokens.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := uu.mutation.AuthTokensIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: user.AuthTokensTable, + Columns: []string{user.AuthTokensColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeInt, + Column: authtokens.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Add = append(_spec.Edges.Add, edge) + } + if n, err = sqlgraph.UpdateNodes(ctx, uu.driver, _spec); err != nil { + if _, ok := err.(*sqlgraph.NotFoundError); ok { + err = &NotFoundError{user.Label} + } else if sqlgraph.IsConstraintError(err) { + err = &ConstraintError{err.Error(), err} + } + return 0, err + } + return n, nil +} + +// UserUpdateOne is the builder for updating a single User entity. +type UserUpdateOne struct { + config + fields []string + hooks []Hook + mutation *UserMutation +} + +// SetName sets the "name" field. +func (uuo *UserUpdateOne) SetName(s string) *UserUpdateOne { + uuo.mutation.SetName(s) + return uuo +} + +// SetEmail sets the "email" field. +func (uuo *UserUpdateOne) SetEmail(s string) *UserUpdateOne { + uuo.mutation.SetEmail(s) + return uuo +} + +// SetPassword sets the "password" field. +func (uuo *UserUpdateOne) SetPassword(s string) *UserUpdateOne { + uuo.mutation.SetPassword(s) + return uuo +} + +// SetIsSuperuser sets the "is_superuser" field. +func (uuo *UserUpdateOne) SetIsSuperuser(b bool) *UserUpdateOne { + uuo.mutation.SetIsSuperuser(b) + return uuo +} + +// SetNillableIsSuperuser sets the "is_superuser" field if the given value is not nil. +func (uuo *UserUpdateOne) SetNillableIsSuperuser(b *bool) *UserUpdateOne { + if b != nil { + uuo.SetIsSuperuser(*b) + } + return uuo +} + +// AddAuthTokenIDs adds the "auth_tokens" edge to the AuthTokens entity by IDs. +func (uuo *UserUpdateOne) AddAuthTokenIDs(ids ...int) *UserUpdateOne { + uuo.mutation.AddAuthTokenIDs(ids...) + return uuo +} + +// AddAuthTokens adds the "auth_tokens" edges to the AuthTokens entity. +func (uuo *UserUpdateOne) AddAuthTokens(a ...*AuthTokens) *UserUpdateOne { + ids := make([]int, len(a)) + for i := range a { + ids[i] = a[i].ID + } + return uuo.AddAuthTokenIDs(ids...) +} + +// Mutation returns the UserMutation object of the builder. +func (uuo *UserUpdateOne) Mutation() *UserMutation { + return uuo.mutation +} + +// ClearAuthTokens clears all "auth_tokens" edges to the AuthTokens entity. +func (uuo *UserUpdateOne) ClearAuthTokens() *UserUpdateOne { + uuo.mutation.ClearAuthTokens() + return uuo +} + +// RemoveAuthTokenIDs removes the "auth_tokens" edge to AuthTokens entities by IDs. +func (uuo *UserUpdateOne) RemoveAuthTokenIDs(ids ...int) *UserUpdateOne { + uuo.mutation.RemoveAuthTokenIDs(ids...) + return uuo +} + +// RemoveAuthTokens removes "auth_tokens" edges to AuthTokens entities. +func (uuo *UserUpdateOne) RemoveAuthTokens(a ...*AuthTokens) *UserUpdateOne { + ids := make([]int, len(a)) + for i := range a { + ids[i] = a[i].ID + } + return uuo.RemoveAuthTokenIDs(ids...) +} + +// Select allows selecting one or more fields (columns) of the returned entity. +// The default is selecting all fields defined in the entity schema. +func (uuo *UserUpdateOne) Select(field string, fields ...string) *UserUpdateOne { + uuo.fields = append([]string{field}, fields...) + return uuo +} + +// Save executes the query and returns the updated User entity. +func (uuo *UserUpdateOne) Save(ctx context.Context) (*User, error) { + var ( + err error + node *User + ) + if len(uuo.hooks) == 0 { + if err = uuo.check(); err != nil { + return nil, err + } + node, err = uuo.sqlSave(ctx) + } else { + var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) { + mutation, ok := m.(*UserMutation) + if !ok { + return nil, fmt.Errorf("unexpected mutation type %T", m) + } + if err = uuo.check(); err != nil { + return nil, err + } + uuo.mutation = mutation + node, err = uuo.sqlSave(ctx) + mutation.done = true + return node, err + }) + for i := len(uuo.hooks) - 1; i >= 0; i-- { + if uuo.hooks[i] == nil { + return nil, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)") + } + mut = uuo.hooks[i](mut) + } + if _, err := mut.Mutate(ctx, uuo.mutation); err != nil { + return nil, err + } + } + return node, err +} + +// SaveX is like Save, but panics if an error occurs. +func (uuo *UserUpdateOne) SaveX(ctx context.Context) *User { + node, err := uuo.Save(ctx) + if err != nil { + panic(err) + } + return node +} + +// Exec executes the query on the entity. +func (uuo *UserUpdateOne) Exec(ctx context.Context) error { + _, err := uuo.Save(ctx) + return err +} + +// ExecX is like Exec, but panics if an error occurs. +func (uuo *UserUpdateOne) ExecX(ctx context.Context) { + if err := uuo.Exec(ctx); err != nil { + panic(err) + } +} + +// check runs all checks and user-defined validators on the builder. +func (uuo *UserUpdateOne) check() error { + if v, ok := uuo.mutation.Name(); ok { + if err := user.NameValidator(v); err != nil { + return &ValidationError{Name: "name", err: fmt.Errorf(`ent: validator failed for field "User.name": %w`, err)} + } + } + if v, ok := uuo.mutation.Email(); ok { + if err := user.EmailValidator(v); err != nil { + return &ValidationError{Name: "email", err: fmt.Errorf(`ent: validator failed for field "User.email": %w`, err)} + } + } + if v, ok := uuo.mutation.Password(); ok { + if err := user.PasswordValidator(v); err != nil { + return &ValidationError{Name: "password", err: fmt.Errorf(`ent: validator failed for field "User.password": %w`, err)} + } + } + return nil +} + +func (uuo *UserUpdateOne) sqlSave(ctx context.Context) (_node *User, err error) { + _spec := &sqlgraph.UpdateSpec{ + Node: &sqlgraph.NodeSpec{ + Table: user.Table, + Columns: user.Columns, + ID: &sqlgraph.FieldSpec{ + Type: field.TypeUUID, + Column: user.FieldID, + }, + }, + } + id, ok := uuo.mutation.ID() + if !ok { + return nil, &ValidationError{Name: "id", err: errors.New(`ent: missing "User.id" for update`)} + } + _spec.Node.ID.Value = id + if fields := uuo.fields; len(fields) > 0 { + _spec.Node.Columns = make([]string, 0, len(fields)) + _spec.Node.Columns = append(_spec.Node.Columns, user.FieldID) + for _, f := range fields { + if !user.ValidColumn(f) { + return nil, &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)} + } + if f != user.FieldID { + _spec.Node.Columns = append(_spec.Node.Columns, f) + } + } + } + if ps := uuo.mutation.predicates; len(ps) > 0 { + _spec.Predicate = func(selector *sql.Selector) { + for i := range ps { + ps[i](selector) + } + } + } + if value, ok := uuo.mutation.Name(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: user.FieldName, + }) + } + if value, ok := uuo.mutation.Email(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: user.FieldEmail, + }) + } + if value, ok := uuo.mutation.Password(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeString, + Value: value, + Column: user.FieldPassword, + }) + } + if value, ok := uuo.mutation.IsSuperuser(); ok { + _spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{ + Type: field.TypeBool, + Value: value, + Column: user.FieldIsSuperuser, + }) + } + if uuo.mutation.AuthTokensCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: user.AuthTokensTable, + Columns: []string{user.AuthTokensColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeInt, + Column: authtokens.FieldID, + }, + }, + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := uuo.mutation.RemovedAuthTokensIDs(); len(nodes) > 0 && !uuo.mutation.AuthTokensCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: user.AuthTokensTable, + Columns: []string{user.AuthTokensColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeInt, + Column: authtokens.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := uuo.mutation.AuthTokensIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: user.AuthTokensTable, + Columns: []string{user.AuthTokensColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: &sqlgraph.FieldSpec{ + Type: field.TypeInt, + Column: authtokens.FieldID, + }, + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Add = append(_spec.Edges.Add, edge) + } + _node = &User{config: uuo.config} + _spec.Assign = _node.assignValues + _spec.ScanValues = _node.scanValues + if err = sqlgraph.UpdateNode(ctx, uuo.driver, _spec); err != nil { + if _, ok := err.(*sqlgraph.NotFoundError); ok { + err = &NotFoundError{user.Label} + } else if sqlgraph.IsConstraintError(err) { + err = &ConstraintError{err.Error(), err} + } + return nil, err + } + return _node, nil +} diff --git a/backend/go.mod b/backend/go.mod new file mode 100644 index 0000000..70ff971 --- /dev/null +++ b/backend/go.mod @@ -0,0 +1,49 @@ +module github.com/hay-kot/git-web-template/backend + +go 1.18 + +require ( + entgo.io/ent v0.10.0 + github.com/ardanlabs/conf/v2 v2.2.0 + github.com/go-chi/chi/v5 v5.0.7 + github.com/google/uuid v1.3.0 + github.com/mattn/go-sqlite3 v1.14.10 + github.com/stretchr/testify v1.7.1-0.20210427113832-6241f9ab9942 + github.com/swaggo/http-swagger v1.3.0 + github.com/swaggo/swag v1.8.3 + github.com/tkrajina/typescriptify-golang-structs v0.1.7 + github.com/urfave/cli/v2 v2.3.0 + golang.org/x/crypto v0.0.0-20210921155107-089bfa567519 +) + +require ( + ariga.io/atlas v0.3.2-0.20220120225051-c3fac7d636dd // indirect + github.com/KyleBanks/depth v1.2.1 // indirect + github.com/agext/levenshtein v1.2.1 // indirect + github.com/apparentlymart/go-textseg/v13 v13.0.0 // indirect + github.com/cpuguy83/go-md2man/v2 v2.0.1 // indirect + github.com/davecgh/go-spew v1.1.1 // indirect + github.com/go-openapi/inflect v0.19.0 // indirect + github.com/go-openapi/jsonpointer v0.19.5 // indirect + github.com/go-openapi/jsonreference v0.20.0 // indirect + github.com/go-openapi/spec v0.20.6 // indirect + github.com/go-openapi/swag v0.21.1 // indirect + github.com/google/go-cmp v0.5.6 // indirect + github.com/hashicorp/hcl/v2 v2.10.0 // indirect + github.com/josharian/intern v1.0.0 // indirect + github.com/kr/pretty v0.2.0 // indirect + github.com/mailru/easyjson v0.7.7 // indirect + github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7 // indirect + github.com/pmezard/go-difflib v1.0.0 // indirect + github.com/russross/blackfriday/v2 v2.1.0 // indirect + github.com/swaggo/files v0.0.0-20220610200504-28940afbdbfe // indirect + github.com/tkrajina/go-reflector v0.5.5 // indirect + github.com/zclconf/go-cty v1.8.0 // indirect + golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4 // indirect + golang.org/x/net v0.0.0-20220706163947-c90051bbdb60 // indirect + golang.org/x/sys v0.0.0-20220704084225-05e143d24a9e // indirect + golang.org/x/text v0.3.7 // indirect + golang.org/x/tools v0.1.11 // indirect + gopkg.in/yaml.v2 v2.4.0 // indirect + gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b // indirect +) diff --git a/backend/go.sum b/backend/go.sum new file mode 100644 index 0000000..118c2e5 --- /dev/null +++ b/backend/go.sum @@ -0,0 +1,154 @@ +ariga.io/atlas v0.3.2-0.20220120225051-c3fac7d636dd h1:YxnJl3ySvwQ3C7Rspa4CrQtwrftTZ0F8WJ36CvY7nWE= +ariga.io/atlas v0.3.2-0.20220120225051-c3fac7d636dd/go.mod h1:XcLUpQX7Cq4qtagEHIleq3MJaBeeJ76BS8doc4gkOJk= +entgo.io/ent v0.10.0 h1:9cBomE1fh+WX34DPYQL7tDNAIvhKa3tXvwxuLyhYCMo= +entgo.io/ent v0.10.0/go.mod h1:5bjIYdTizykmdtPY3knXrrGpxAh0cMjFfxdNnlNiUGU= +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/DATA-DOG/go-sqlmock v1.5.0 h1:Shsta01QNfFxHCfpW6YH2STWB0MudeXXEWMr20OEh60= +github.com/KyleBanks/depth v1.2.1 h1:5h8fQADFrWtarTdtDudMmGsC7GPbOAu6RVB3ffsVFHc= +github.com/KyleBanks/depth v1.2.1/go.mod h1:jzSb9d0L43HxTQfT+oSA1EEp2q+ne2uh6XgeJcm8brE= +github.com/agext/levenshtein v1.2.1 h1:QmvMAjj2aEICytGiWzmxoE0x2KZvE0fvmqMOfy2tjT8= +github.com/agext/levenshtein v1.2.1/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558= +github.com/apparentlymart/go-dump v0.0.0-20180507223929-23540a00eaa3/go.mod h1:oL81AME2rN47vu18xqj1S1jPIPuN7afo62yKTNn3XMM= +github.com/apparentlymart/go-textseg v1.0.0/go.mod h1:z96Txxhf3xSFMPmb5X/1W05FF/Nj9VFpLOpjS5yuumk= +github.com/apparentlymart/go-textseg/v13 v13.0.0 h1:Y+KvPE1NYz0xl601PVImeQfFyEy6iT90AvPUL1NNfNw= +github.com/apparentlymart/go-textseg/v13 v13.0.0/go.mod h1:ZK2fH7c4NqDTLtiYLvIkEghdlcqw7yxLeM89kiTRPUo= +github.com/ardanlabs/conf/v2 v2.2.0 h1:ar1+TYIYAh2Tdeg2DQroh7ruR56/vJR8BDfzDIrXgtk= +github.com/ardanlabs/conf/v2 v2.2.0/go.mod h1:m37ZKdW9jwMUEhGX36jRNt8VzSQ/HVmSziLZH2p33nY= +github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= +github.com/cpuguy83/go-md2man/v2 v2.0.1 h1:r/myEWzV9lfsM1tFLgDyu0atFtJ1fXn261LKYj/3DxU= +github.com/cpuguy83/go-md2man/v2 v2.0.1/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= +github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/go-chi/chi/v5 v5.0.7 h1:rDTPXLDHGATaeHvVlLcR4Qe0zftYethFucbjVQ1PxU8= +github.com/go-chi/chi/v5 v5.0.7/go.mod h1:DslCQbL2OYiznFReuXYUmQ2hGd1aDpCnlMNITLSKoi8= +github.com/go-openapi/inflect v0.19.0 h1:9jCH9scKIbHeV9m12SmPilScz6krDxKRasNNSNPXu/4= +github.com/go-openapi/inflect v0.19.0/go.mod h1:lHpZVlpIQqLyKwJ4N+YSc9hchQy/i12fJykb83CRBH4= +github.com/go-openapi/jsonpointer v0.19.3/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg= +github.com/go-openapi/jsonpointer v0.19.5 h1:gZr+CIYByUqjcgeLXnQu2gHYQC9o73G2XUeOFYEICuY= +github.com/go-openapi/jsonpointer v0.19.5/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg= +github.com/go-openapi/jsonreference v0.20.0 h1:MYlu0sBgChmCfJxxUKZ8g1cPWFOB37YSZqewK7OKeyA= +github.com/go-openapi/jsonreference v0.20.0/go.mod h1:Ag74Ico3lPc+zR+qjn4XBUmXymS4zJbYVCZmcgkasdo= +github.com/go-openapi/spec v0.20.6 h1:ich1RQ3WDbfoeTqTAb+5EIxNmpKVJZWBNah9RAT0jIQ= +github.com/go-openapi/spec v0.20.6/go.mod h1:2OpW+JddWPrpXSCIX8eOx7lZ5iyuWj3RYR6VaaBKcWA= +github.com/go-openapi/swag v0.19.5/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk= +github.com/go-openapi/swag v0.19.15/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ= +github.com/go-openapi/swag v0.21.1 h1:wm0rhTb5z7qpJRHBdPOMuY4QjVUMbF6/kwoYeRAOrKU= +github.com/go-openapi/swag v0.21.1/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ= +github.com/go-test/deep v1.0.3 h1:ZrJSEWsXzPOxaZnFteGEfooLba+ju3FYIbOrS+rQd68= +github.com/go-test/deep v1.0.3/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA= +github.com/golang/protobuf v1.1.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.5.6 h1:BKbKCqvP6I+rmFHt06ZmyQtvB8xAkWdhFyr0ZUNZcxQ= +github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I= +github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/hashicorp/hcl/v2 v2.10.0 h1:1S1UnuhDGlv3gRFV4+0EdwB+znNP5HmcGbIqwnSCByg= +github.com/hashicorp/hcl/v2 v2.10.0/go.mod h1:FwWsfWEjyV/CMj8s/gqAuiviY72rJ1/oayI9WftqcKg= +github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= +github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pretty v0.2.0 h1:s5hAObm+yFO5uHYt5dYjxi2rXrsnmRpJx4OYvIWUaQs= +github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348 h1:MtvEpTB6LX3vkb4ax0b5D2DHbNAUsen0Gx5wZoq3lV4= +github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348/go.mod h1:B69LEHPfb2qLo0BaaOLcbitczOKLWTsrBG9LczfCD4k= +github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= +github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= +github.com/mailru/easyjson v0.7.6/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= +github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= +github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= +github.com/mattn/go-sqlite3 v1.14.10 h1:MLn+5bFRlWMGoSRmJour3CL1w/qL96mvipqpwQW/Sfk= +github.com/mattn/go-sqlite3 v1.14.10/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= +github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7 h1:DpOJ2HYzCv8LZP15IdmG+YdwD2luVPHITV96TkirNBM= +github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7/go.mod h1:ZXFpozHsX6DPmq2I0TCekCxypsnAUbP2oI0UX1GXzOo= +github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs= +github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= +github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ= +github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= +github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= +github.com/spf13/pflag v1.0.2/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.1-0.20210427113832-6241f9ab9942 h1:t0lM6y/M5IiUZyvbBTcngso8SZEZICH7is9B6g/obVU= +github.com/stretchr/testify v1.7.1-0.20210427113832-6241f9ab9942/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/swaggo/files v0.0.0-20220610200504-28940afbdbfe h1:K8pHPVoTgxFJt1lXuIzzOX7zZhZFldJQK/CgKx9BFIc= +github.com/swaggo/files v0.0.0-20220610200504-28940afbdbfe/go.mod h1:lKJPbtWzJ9JhsTN1k1gZgleJWY/cqq0psdoMmaThG3w= +github.com/swaggo/http-swagger v1.3.0 h1:1+6M4qRorIbdyTWTsGrwnb0r9jGK5dcWN82O6oY/yHQ= +github.com/swaggo/http-swagger v1.3.0/go.mod h1:9glekdg40lwclrrKNRGgj/IMDxpNPZ3kzab4oPcF8EM= +github.com/swaggo/swag v1.8.3 h1:3pZSSCQ//gAH88lfmxM3Cd1+JCsxV8Md6f36b9hrZ5s= +github.com/swaggo/swag v1.8.3/go.mod h1:jMLeXOOmYyjk8PvHTsXBdrubsNd9gUJTTCzL5iBnseg= +github.com/tkrajina/go-reflector v0.5.5 h1:gwoQFNye30Kk7NrExj8zm3zFtrGPqOkzFMLuQZg1DtQ= +github.com/tkrajina/go-reflector v0.5.5/go.mod h1:ECbqLgccecY5kPmPmXg1MrHW585yMcDkVl6IvJe64T4= +github.com/tkrajina/typescriptify-golang-structs v0.1.7 h1:72jmiT/brlgtCPpwu4X0HkhMeUMtx8+xDiTMS93rFqY= +github.com/tkrajina/typescriptify-golang-structs v0.1.7/go.mod h1:sjU00nti/PMEOZb07KljFlR+lJ+RotsC0GBQMv9EKls= +github.com/urfave/cli/v2 v2.3.0 h1:qph92Y649prgesehzOrQjdWyxFOp/QVM+6imKHad91M= +github.com/urfave/cli/v2 v2.3.0/go.mod h1:LJmUH05zAU44vOAcrfzZQKsZbVcdbOG8rtL3/XcUArI= +github.com/vmihailenco/msgpack v3.3.3+incompatible/go.mod h1:fy3FlTQTDXWkZ7Bh6AcGMlsjHatGryHQYUTf1ShIgkk= +github.com/vmihailenco/msgpack/v4 v4.3.12/go.mod h1:gborTTJjAo/GWTqqRjrLCn9pgNN+NXzzngzBKDPIqw4= +github.com/vmihailenco/tagparser v0.1.1/go.mod h1:OeAg3pn3UbLjkWt+rN9oFYB6u/cQgqMEUPoW2WPyhdI= +github.com/zclconf/go-cty v1.2.0/go.mod h1:hOPWgoHbaTUnI5k4D2ld+GRpFJSCe6bCM7m1q/N4PQ8= +github.com/zclconf/go-cty v1.8.0 h1:s4AvqaeQzJIu3ndv4gVIhplVD0krU+bgrcLSVUnaWuA= +github.com/zclconf/go-cty v1.8.0/go.mod h1:vVKLxnk3puL4qRAv72AO+W99LUD4da90g3uUAzyuvAk= +github.com/zclconf/go-cty-debug v0.0.0-20191215020915-b22d67c1ba0b/go.mod h1:ZRKQfBXbGkpdV6QMzT3rU1kSTAnfu1dO8dPKjYprgj8= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20190426145343-a29dc8fdc734/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519 h1:7I4JAnoQBe7ZtJcBaYHi5UtiO8tQHbUSXxL+pnGRANg= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4 h1:6zppjxzCulZykYSLyVDYbneBfbaBIQPYMevg0bEwv2s= +golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= +golang.org/x/net v0.0.0-20180811021610-c39426892332/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= +golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20220706163947-c90051bbdb60 h1:8NSylCMxLW4JvserAndSgFL7aPli6A68yf0bYFTcWCM= +golang.org/x/net v0.0.0-20220706163947-c90051bbdb60/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190502175342-a43fa875dd82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20220704084225-05e143d24a9e h1:CsOuNlbOuf0mzxJIefr6Q4uAUetRUwZE4qt7VfzP+xo= +golang.org/x/sys v0.0.0-20220704084225-05e143d24a9e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= +golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7 h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.1.11 h1:loJ25fNOEhSXfHrpoGj91eCUThwdNX6u24rO1xnNteY= +golang.org/x/tools v0.1.11/go.mod h1:SgwaegtQh8clINPpECJMqnxLv9I09HLqnW3RMqW0CA4= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= +google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= +google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU= +gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b h1:h8qDotaEPuJATrMmW04NCwg7v22aHH28wwpauUhK9Oo= +gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/backend/internal/config/conf.go b/backend/internal/config/conf.go new file mode 100644 index 0000000..cf90938 --- /dev/null +++ b/backend/internal/config/conf.go @@ -0,0 +1,81 @@ +package config + +import ( + "encoding/json" + "errors" + "fmt" + "io/ioutil" + + "github.com/ardanlabs/conf/v2" + "github.com/ardanlabs/conf/v2/yaml" + + "os" +) + +const ( + ModeDevelopment = "development" + ModeProduction = "production" +) + +type Config struct { + Mode string `yaml:"mode" conf:"default:development"` // development or production + Web WebConfig `yaml:"web"` + Database Database `yaml:"database"` + Log LoggerConf `yaml:"logger"` + Mailer MailerConf `yaml:"mailer"` + Seed Seed `yaml:"seed"` + Swagger SwaggerConf `yaml:"swagger"` +} + +type SwaggerConf struct { + Host string `yaml:"host" conf:"default:localhost:7745"` + Scheme string `yaml:"scheme" conf:"default:http"` +} + +type WebConfig struct { + Port string `yaml:"port" conf:"default:7745"` + Host string `yaml:"host" conf:"default:127.0.0.1"` +} + +// NewConfig parses the CLI/Config file and returns a Config struct. If the file argument is an empty string, the +// file is not read. If the file is not empty, the file is read and the Config struct is returned. +func NewConfig(file string) (*Config, error) { + var cfg Config + + const prefix = "API" + + help, err := func() (string, error) { + if _, err := os.Stat(file); errors.Is(err, os.ErrNotExist) { + return conf.Parse(prefix, &cfg) + } else { + yamlData, err := ioutil.ReadFile(file) + if err != nil { + return "", err + } + return conf.Parse(prefix, &cfg, yaml.WithData(yamlData)) + } + }() + + if err != nil { + if errors.Is(err, conf.ErrHelpWanted) { + fmt.Println(help) + os.Exit(0) + } + return &cfg, fmt.Errorf("parsing config: %w", err) + } + + return &cfg, nil +} + +// Print prints the configuration to stdout as a json indented string +// This is useful for debugging. If the marshaller errors out, it will panic. +func (c *Config) Print() { + res, err := json.MarshalIndent(c, "", " ") + + if err != nil { + panic(err) + } + + fmt.Println(string(res)) + +} diff --git a/backend/internal/config/conf_database.go b/backend/internal/config/conf_database.go new file mode 100644 index 0000000..d8a6c7b --- /dev/null +++ b/backend/internal/config/conf_database.go @@ -0,0 +1,27 @@ +package config + +const ( + DriverSqlite3 = "sqlite3" + DriverPostgres = "postgres" +) + +type Database struct { + Driver string `yaml:"driver" conf:"default:sqlite3"` + SqliteUrl string `yaml:"sqlite-url" conf:"default:file:ent?mode=memory&cache=shared&_fk=1"` + PostgresUrl string `yaml:"postgres-url" conf:""` +} + +func (d *Database) GetDriver() string { + return d.Driver +} + +func (d *Database) GetUrl() string { + switch d.Driver { + case DriverSqlite3: + return d.SqliteUrl + case DriverPostgres: + return d.PostgresUrl + default: + panic("unknown database driver") + } +} diff --git a/backend/internal/config/conf_database_test.go b/backend/internal/config/conf_database_test.go new file mode 100644 index 0000000..4720a15 --- /dev/null +++ b/backend/internal/config/conf_database_test.go @@ -0,0 +1,36 @@ +package config + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_DatabaseConfig_Sqlite(t *testing.T) { + dbConf := &Database{ + Driver: DriverSqlite3, + SqliteUrl: "file:ent?mode=memory&cache=shared&_fk=1", + } + + assert.Equal(t, "sqlite3", dbConf.GetDriver()) + assert.Equal(t, "file:ent?mode=memory&cache=shared&_fk=1", dbConf.GetUrl()) +} + +func Test_DatabaseConfig_Postgres(t *testing.T) { + dbConf := &Database{ + Driver: DriverPostgres, + PostgresUrl: "postgres://user:pass@host:port/dbname?sslmode=disable", + } + + assert.Equal(t, "postgres", dbConf.GetDriver()) + assert.Equal(t, "postgres://user:pass@host:port/dbname?sslmode=disable", dbConf.GetUrl()) +} + +func Test_DatabaseConfig_Unknown(t *testing.T) { + dbConf := &Database{ + Driver: "null", + } + + assert.Panics(t, func() { dbConf.GetUrl() }) + +} diff --git a/backend/internal/config/conf_logger.go b/backend/internal/config/conf_logger.go new file mode 100644 index 0000000..9438cb8 --- /dev/null +++ b/backend/internal/config/conf_logger.go @@ -0,0 +1,6 @@ +package config + +type LoggerConf struct { + Level string `conf:"default:debug"` + File string `conf:""` +} diff --git a/backend/internal/config/conf_mailer.go b/backend/internal/config/conf_mailer.go new file mode 100644 index 0000000..1335a96 --- /dev/null +++ b/backend/internal/config/conf_mailer.go @@ -0,0 +1,15 @@ +package config + +type MailerConf struct { + Host string `conf:""` + Port int `conf:""` + Username string `conf:""` + Password string `conf:""` + From string `conf:""` +} + +// Ready is a simple check to ensure that the configuration is not empty. +// or with it's default state. +func (mc *MailerConf) Ready() bool { + return mc.Host != "" && mc.Port != 0 && mc.Username != "" && mc.Password != "" && mc.From != "" +} diff --git a/backend/internal/config/conf_mailer_test.go b/backend/internal/config/conf_mailer_test.go new file mode 100644 index 0000000..8656755 --- /dev/null +++ b/backend/internal/config/conf_mailer_test.go @@ -0,0 +1,40 @@ +package config + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_MailerReady_Success(t *testing.T) { + mc := &MailerConf{ + Host: "host", + Port: 1, + Username: "username", + Password: "password", + From: "from", + } + + assert.True(t, mc.Ready()) +} + +func Test_MailerReady_Failure(t *testing.T) { + mc := &MailerConf{} + assert.False(t, mc.Ready()) + + mc.Host = "host" + assert.False(t, mc.Ready()) + + mc.Port = 1 + assert.False(t, mc.Ready()) + + mc.Username = "username" + assert.False(t, mc.Ready()) + + mc.Password = "password" + assert.False(t, mc.Ready()) + + mc.From = "from" + assert.True(t, mc.Ready()) + +} diff --git a/backend/internal/config/conf_seed.go b/backend/internal/config/conf_seed.go new file mode 100644 index 0000000..e076593 --- /dev/null +++ b/backend/internal/config/conf_seed.go @@ -0,0 +1,13 @@ +package config + +type SeedUser struct { + Name string `yaml:"name"` + Email string `yaml:"email"` + Password string `yaml:"password"` + IsSuperuser bool `yaml:"isSuperuser"` +} + +type Seed struct { + Enabled bool `yaml:"enabled" conf:"default:false"` + Users []SeedUser `yaml:"users"` +} diff --git a/backend/internal/mapper/users_automapper.go b/backend/internal/mapper/users_automapper.go new file mode 100644 index 0000000..4f97e09 --- /dev/null +++ b/backend/internal/mapper/users_automapper.go @@ -0,0 +1,27 @@ +// Code generated by "/pkgs/automapper"; DO NOT EDIT. +package mapper + +import ( + "github.com/hay-kot/git-web-template/backend/ent" + "github.com/hay-kot/git-web-template/backend/internal/types" +) + +func UserOutFromModel(from ent.User) types.UserOut { + return types.UserOut{ + ID: from.ID, + Name: from.Name, + Email: from.Email, + Password: from.Password, + IsSuperuser: from.IsSuperuser, + } +} + +func UserOutToModel(from types.UserOut) ent.User { + return ent.User{ + ID: from.ID, + Name: from.Name, + Email: from.Email, + Password: from.Password, + IsSuperuser: from.IsSuperuser, + } +} diff --git a/backend/internal/mocks/chimocker/chimocker.go b/backend/internal/mocks/chimocker/chimocker.go new file mode 100644 index 0000000..b918403 --- /dev/null +++ b/backend/internal/mocks/chimocker/chimocker.go @@ -0,0 +1,30 @@ +package chimocker + +import ( + "context" + "net/http" + + "github.com/go-chi/chi/v5" +) + +type Params map[string]string + +// WithUrlParam returns a pointer to a request object with the given URL params +// added to a new chi.Context object. +func WithUrlParam(r *http.Request, key, value string) *http.Request { + chiCtx := chi.NewRouteContext() + req := r.WithContext(context.WithValue(r.Context(), chi.RouteCtxKey, chiCtx)) + chiCtx.URLParams.Add(key, value) + return req +} + +// WithUrlParams returns a pointer to a request object with the given URL params +// added to a new chi.Context object. for single param assignment see WithUrlParam +func WithUrlParams(r *http.Request, params Params) *http.Request { + chiCtx := chi.NewRouteContext() + req := r.WithContext(context.WithValue(r.Context(), chi.RouteCtxKey, chiCtx)) + for key, value := range params { + chiCtx.URLParams.Add(key, value) + } + return req +} diff --git a/backend/internal/mocks/factories/users.go b/backend/internal/mocks/factories/users.go new file mode 100644 index 0000000..1642a7a --- /dev/null +++ b/backend/internal/mocks/factories/users.go @@ -0,0 +1,16 @@ +package factories + +import ( + "github.com/hay-kot/git-web-template/backend/internal/types" + "github.com/hay-kot/git-web-template/backend/pkgs/faker" +) + +func UserFactory() types.UserCreate { + f := faker.NewFaker() + return types.UserCreate{ + Name: f.RandomString(10), + Email: f.RandomEmail(), + Password: f.RandomString(10), + IsSuperuser: f.RandomBool(), + } +} diff --git a/backend/internal/mocks/mock_logger.go b/backend/internal/mocks/mock_logger.go new file mode 100644 index 0000000..d367161 --- /dev/null +++ b/backend/internal/mocks/mock_logger.go @@ -0,0 +1,11 @@ +package mocks + +import ( + "os" + + "github.com/hay-kot/git-web-template/backend/pkgs/logger" +) + +func GetStructLogger() *logger.Logger { + return logger.New(os.Stdout, logger.LevelDebug) +} diff --git a/backend/internal/mocks/mocker_services.go b/backend/internal/mocks/mocker_services.go new file mode 100644 index 0000000..3011fb1 --- /dev/null +++ b/backend/internal/mocks/mocker_services.go @@ -0,0 +1,10 @@ +package mocks + +import ( + "github.com/hay-kot/git-web-template/backend/internal/repo" + "github.com/hay-kot/git-web-template/backend/internal/services" +) + +func GetMockServices(repos *repo.AllRepos) *services.AllServices { + return services.NewServices(repos) +} diff --git a/backend/internal/mocks/mocks_ent_repo.go b/backend/internal/mocks/mocks_ent_repo.go new file mode 100644 index 0000000..9273502 --- /dev/null +++ b/backend/internal/mocks/mocks_ent_repo.go @@ -0,0 +1,22 @@ +package mocks + +import ( + "context" + + "github.com/hay-kot/git-web-template/backend/ent" + "github.com/hay-kot/git-web-template/backend/internal/repo" + _ "github.com/mattn/go-sqlite3" +) + +func GetEntRepos() (*repo.AllRepos, func() error) { + c, err := ent.Open("sqlite3", "file:ent?mode=memory&cache=shared&_fk=1") + if err != nil { + panic(err) + } + + if err := c.Schema.Create(context.Background()); err != nil { + panic(err) + } + + return repo.EntAllRepos(c), c.Close +} diff --git a/backend/internal/repo/main_test.go b/backend/internal/repo/main_test.go new file mode 100644 index 0000000..f516bc5 --- /dev/null +++ b/backend/internal/repo/main_test.go @@ -0,0 +1,38 @@ +package repo + +import ( + "context" + "log" + "math/rand" + "os" + "testing" + "time" + + "github.com/hay-kot/git-web-template/backend/ent" + _ "github.com/mattn/go-sqlite3" +) + +var testEntClient *ent.Client +var testRepos *AllRepos + +func TestMain(m *testing.M) { + rand.Seed(int64(time.Now().Unix())) + + client, err := ent.Open("sqlite3", "file:ent?mode=memory&cache=shared&_fk=1") + if err != nil { + log.Fatalf("failed opening connection to sqlite: %v", err) + } + + if err := client.Schema.Create(context.Background()); err != nil { + log.Fatalf("failed creating schema resources: %v", err) + } + + testEntClient = client + testRepos = EntAllRepos(testEntClient) + + defer client.Close() + + m.Run() + + os.Exit(m.Run()) +} diff --git a/backend/internal/repo/repos_all.go b/backend/internal/repo/repos_all.go new file mode 100644 index 0000000..faf50ee --- /dev/null +++ b/backend/internal/repo/repos_all.go @@ -0,0 +1,16 @@ +package repo + +import "github.com/hay-kot/git-web-template/backend/ent" + +// AllRepos is a container for all the repository interfaces +type AllRepos struct { + Users UserRepository + AuthTokens TokenRepository +} + +func EntAllRepos(db *ent.Client) *AllRepos { + return &AllRepos{ + Users: &EntUserRepository{db}, + AuthTokens: &EntTokenRepository{db}, + } +} diff --git a/backend/internal/repo/token_ent.go b/backend/internal/repo/token_ent.go new file mode 100644 index 0000000..f96ed7c --- /dev/null +++ b/backend/internal/repo/token_ent.go @@ -0,0 +1,74 @@ +package repo + +import ( + "context" + "time" + + "github.com/hay-kot/git-web-template/backend/ent" + "github.com/hay-kot/git-web-template/backend/ent/authtokens" + "github.com/hay-kot/git-web-template/backend/internal/mapper" + "github.com/hay-kot/git-web-template/backend/internal/types" +) + +type EntTokenRepository struct { + db *ent.Client +} + +// GetUserFromToken get's a user from a token +func (r *EntTokenRepository) GetUserFromToken(ctx context.Context, token []byte) (types.UserOut, error) { + dbToken, err := r.db.AuthTokens.Query(). + Where(authtokens.Token(token)). + Where(authtokens.ExpiresAtGTE(time.Now())). + WithUser(). + Only(ctx) + + if err != nil { + return types.UserOut{}, err + } + + return mapper.UserOutFromModel(*dbToken.Edges.User), nil +} + +// Creates a token for a user +func (r *EntTokenRepository) CreateToken(ctx context.Context, createToken types.UserAuthTokenCreate) (types.UserAuthToken, error) { + tokenOut := types.UserAuthToken{} + + dbToken, err := r.db.AuthTokens.Create(). + SetToken(createToken.TokenHash). + SetUserID(createToken.UserID). + SetExpiresAt(createToken.ExpiresAt). + Save(ctx) + + if err != nil { + return tokenOut, err + } + + tokenOut.TokenHash = dbToken.Token + tokenOut.UserID = createToken.UserID + tokenOut.CreatedAt = dbToken.CreatedAt + tokenOut.ExpiresAt = dbToken.ExpiresAt + + return tokenOut, nil +} + +// DeleteToken remove a single token from the database - equivalent to revoke or logout +func (r *EntTokenRepository) DeleteToken(ctx context.Context, token []byte) error { + _, err := r.db.AuthTokens.Delete().Where(authtokens.Token(token)).Exec(ctx) + return err +} + +// PurgeExpiredTokens removes all expired tokens from the database +func (r *EntTokenRepository) PurgeExpiredTokens(ctx context.Context) (int, error) { + tokensDeleted, err := r.db.AuthTokens.Delete().Where(authtokens.ExpiresAtLTE(time.Now())).Exec(ctx) + + if err != nil { + return 0, err + } + + return tokensDeleted, nil +} + +func (r *EntTokenRepository) DeleteAll(ctx context.Context) (int, error) { + amount, err := r.db.AuthTokens.Delete().Exec(ctx) + return amount, err +} diff --git a/backend/internal/repo/token_ent_test.go b/backend/internal/repo/token_ent_test.go new file mode 100644 index 0000000..ae01a5a --- /dev/null +++ b/backend/internal/repo/token_ent_test.go @@ -0,0 +1,110 @@ +package repo + +import ( + "context" + "testing" + "time" + + "github.com/hay-kot/git-web-template/backend/internal/types" + "github.com/hay-kot/git-web-template/backend/pkgs/hasher" + "github.com/stretchr/testify/assert" +) + +func Test_EntAuthTokenRepo_CreateToken(t *testing.T) { + assert := assert.New(t) + ctx := context.Background() + + user := UserFactory() + + userOut, _ := testRepos.Users.Create(ctx, user) + + expiresAt := time.Now().Add(time.Hour) + + generatedToken := hasher.GenerateToken() + + token, err := testRepos.AuthTokens.CreateToken(ctx, types.UserAuthTokenCreate{ + TokenHash: generatedToken.Hash, + ExpiresAt: expiresAt, + UserID: userOut.ID, + }) + + assert.NoError(err) + assert.Equal(userOut.ID, token.UserID) + assert.Equal(expiresAt, token.ExpiresAt) + + // Cleanup + err = testRepos.Users.Delete(ctx, userOut.ID) + _, err = testRepos.AuthTokens.DeleteAll(ctx) +} + +func Test_EntAuthTokenRepo_GetUserByToken(t *testing.T) { + assert := assert.New(t) + ctx := context.Background() + + user := UserFactory() + userOut, _ := testRepos.Users.Create(ctx, user) + + expiresAt := time.Now().Add(time.Hour) + generatedToken := hasher.GenerateToken() + + token, err := testRepos.AuthTokens.CreateToken(ctx, types.UserAuthTokenCreate{ + TokenHash: generatedToken.Hash, + ExpiresAt: expiresAt, + UserID: userOut.ID, + }) + + // Get User from token + foundUser, err := testRepos.AuthTokens.GetUserFromToken(ctx, token.TokenHash) + + assert.NoError(err) + assert.Equal(userOut.ID, foundUser.ID) + assert.Equal(userOut.Name, foundUser.Name) + assert.Equal(userOut.Email, foundUser.Email) + + // Cleanup + err = testRepos.Users.Delete(ctx, userOut.ID) + _, err = testRepos.AuthTokens.DeleteAll(ctx) +} + +func Test_EntAuthTokenRepo_PurgeExpiredTokens(t *testing.T) { + assert := assert.New(t) + ctx := context.Background() + + user := UserFactory() + userOut, _ := testRepos.Users.Create(ctx, user) + + createdTokens := []types.UserAuthToken{} + + for i := 0; i < 5; i++ { + expiresAt := time.Now() + generatedToken := hasher.GenerateToken() + + createdToken, err := testRepos.AuthTokens.CreateToken(ctx, types.UserAuthTokenCreate{ + TokenHash: generatedToken.Hash, + ExpiresAt: expiresAt, + UserID: userOut.ID, + }) + + assert.NoError(err) + assert.NotNil(createdToken) + + createdTokens = append(createdTokens, createdToken) + + } + + // Purge expired tokens + tokensDeleted, err := testRepos.AuthTokens.PurgeExpiredTokens(ctx) + + assert.NoError(err) + assert.Equal(5, tokensDeleted) + + // Check if tokens are deleted + for _, token := range createdTokens { + _, err := testRepos.AuthTokens.GetUserFromToken(ctx, token.TokenHash) + assert.Error(err) + } + + // Cleanup + err = testRepos.Users.Delete(ctx, userOut.ID) + _, err = testRepos.AuthTokens.DeleteAll(ctx) +} diff --git a/backend/internal/repo/token_interface.go b/backend/internal/repo/token_interface.go new file mode 100644 index 0000000..4396063 --- /dev/null +++ b/backend/internal/repo/token_interface.go @@ -0,0 +1,20 @@ +package repo + +import ( + "context" + + "github.com/hay-kot/git-web-template/backend/internal/types" +) + +type TokenRepository interface { + // GetUserFromToken get's a user from a token + GetUserFromToken(ctx context.Context, token []byte) (types.UserOut, error) + // Creates a token for a user + CreateToken(ctx context.Context, createToken types.UserAuthTokenCreate) (types.UserAuthToken, error) + // DeleteToken remove a single token from the database - equivalent to revoke or logout + DeleteToken(ctx context.Context, token []byte) error + // PurgeExpiredTokens removes all expired tokens from the database + PurgeExpiredTokens(ctx context.Context) (int, error) + // DeleteAll removes all tokens from the database + DeleteAll(ctx context.Context) (int, error) +} diff --git a/backend/internal/repo/users_ent.go b/backend/internal/repo/users_ent.go new file mode 100644 index 0000000..0131ea7 --- /dev/null +++ b/backend/internal/repo/users_ent.go @@ -0,0 +1,141 @@ +package repo + +import ( + "context" + + "github.com/google/uuid" + "github.com/hay-kot/git-web-template/backend/ent" + "github.com/hay-kot/git-web-template/backend/ent/user" + "github.com/hay-kot/git-web-template/backend/internal/types" +) + +type EntUserRepository struct { + db *ent.Client +} + +func (e *EntUserRepository) toUserOut(usr *types.UserOut, entUsr *ent.User) { + usr.ID = entUsr.ID + usr.Password = entUsr.Password + usr.Name = entUsr.Name + usr.Email = entUsr.Email + usr.IsSuperuser = entUsr.IsSuperuser +} + +func (e *EntUserRepository) GetOneId(ctx context.Context, id uuid.UUID) (types.UserOut, error) { + usr, err := e.db.User.Query().Where(user.ID(id)).Only(ctx) + + usrOut := types.UserOut{} + + if err != nil { + return usrOut, err + } + + e.toUserOut(&usrOut, usr) + + return usrOut, nil +} + +func (e *EntUserRepository) GetOneEmail(ctx context.Context, email string) (types.UserOut, error) { + usr, err := e.db.User.Query().Where(user.Email(email)).Only(ctx) + + usrOut := types.UserOut{} + + if err != nil { + return usrOut, err + } + + e.toUserOut(&usrOut, usr) + + return usrOut, nil +} + +func (e *EntUserRepository) GetAll(ctx context.Context) ([]types.UserOut, error) { + users, err := e.db.User.Query().All(ctx) + + if err != nil { + return nil, err + } + + var usrs []types.UserOut + + for _, usr := range users { + usrOut := types.UserOut{} + e.toUserOut(&usrOut, usr) + usrs = append(usrs, usrOut) + } + + return usrs, nil +} + +func (e *EntUserRepository) Create(ctx context.Context, usr types.UserCreate) (types.UserOut, error) { + err := usr.Validate() + usrOut := types.UserOut{} + + if err != nil { + return usrOut, err + } + + entUser, err := e.db.User. + Create(). + SetName(usr.Name). + SetEmail(usr.Email). + SetPassword(usr.Password). + SetIsSuperuser(usr.IsSuperuser). + Save(ctx) + + e.toUserOut(&usrOut, entUser) + + return usrOut, err +} + +func (e *EntUserRepository) Update(ctx context.Context, ID uuid.UUID, data types.UserUpdate) error { + bldr := e.db.User.Update().Where(user.ID(ID)) + + if data.Name != nil { + bldr = bldr.SetName(*data.Name) + } + + if data.Email != nil { + bldr = bldr.SetEmail(*data.Email) + } + + // TODO: FUTURE + // if data.Password != nil { + // bldr = bldr.SetPassword(*data.Password) + // } + + // if data.IsSuperuser != nil { + // bldr = bldr.SetIsSuperuser(*data.IsSuperuser) + // } + + _, err := bldr.Save(ctx) + return err +} + +func (e *EntUserRepository) Delete(ctx context.Context, id uuid.UUID) error { + _, err := e.db.User.Delete().Where(user.ID(id)).Exec(ctx) + return err +} + +func (e *EntUserRepository) DeleteAll(ctx context.Context) error { + _, err := e.db.User.Delete().Exec(ctx) + return err +} + +func (e *EntUserRepository) GetSuperusers(ctx context.Context) ([]types.UserOut, error) { + users, err := e.db.User.Query().Where(user.IsSuperuser(true)).All(ctx) + + if err != nil { + return nil, err + } + + var usrs []types.UserOut + + for _, usr := range users { + usrOut := types.UserOut{} + e.toUserOut(&usrOut, usr) + usrs = append(usrs, usrOut) + } + + return usrs, nil +} diff --git a/backend/internal/repo/users_ent_test.go b/backend/internal/repo/users_ent_test.go new file mode 100644 index 0000000..01a228e --- /dev/null +++ b/backend/internal/repo/users_ent_test.go @@ -0,0 +1,148 @@ +package repo + +import ( + "context" + "fmt" + "testing" + + "github.com/hay-kot/git-web-template/backend/internal/types" + "github.com/hay-kot/git-web-template/backend/pkgs/faker" + "github.com/stretchr/testify/assert" +) + +func UserFactory() types.UserCreate { + f := faker.NewFaker() + return types.UserCreate{ + Name: f.RandomString(10), + Email: f.RandomEmail(), + Password: f.RandomString(10), + IsSuperuser: f.RandomBool(), + } +} + +func Test_EntUserRepo_GetOneEmail(t *testing.T) { + assert := assert.New(t) + user := UserFactory() + ctx := context.Background() + + testRepos.Users.Create(ctx, user) + + foundUser, err := testRepos.Users.GetOneEmail(ctx, user.Email) + + assert.NotNil(foundUser) + assert.Nil(err) + assert.Equal(user.Email, foundUser.Email) + assert.Equal(user.Name, foundUser.Name) + + // Cleanup + testRepos.Users.DeleteAll(ctx) +} + +func Test_EntUserRepo_GetOneId(t *testing.T) { + assert := assert.New(t) + user := UserFactory() + ctx := context.Background() + + userOut, _ := testRepos.Users.Create(ctx, user) + foundUser, err := testRepos.Users.GetOneId(ctx, userOut.ID) + + assert.NotNil(foundUser) + assert.Nil(err) + assert.Equal(user.Email, foundUser.Email) + assert.Equal(user.Name, foundUser.Name) + + // Cleanup + testRepos.Users.DeleteAll(ctx) +} + +func Test_EntUserRepo_GetAll(t *testing.T) { + // Setup + toCreate := []types.UserCreate{ + UserFactory(), + UserFactory(), + UserFactory(), + UserFactory(), + } + + ctx := context.Background() + + created := []types.UserOut{} + + for _, usr := range toCreate { + usrOut, _ := testRepos.Users.Create(ctx, usr) + created = append(created, usrOut) + } + + // Validate + allUsers, err := testRepos.Users.GetAll(ctx) + + assert.Nil(t, err) + assert.Equal(t, len(created), len(allUsers)) + + for _, usr := range created { + fmt.Printf("%+v\n", usr) + assert.Contains(t, allUsers, usr) + } + + for _, usr := range created { + testRepos.Users.Delete(ctx, usr.ID) + } + + // Cleanup + testRepos.Users.DeleteAll(ctx) +} + +func Test_EntUserRepo_Update(t *testing.T) { + t.Skip() +} + +func Test_EntUserRepo_Delete(t *testing.T) { + // Create 10 Users + for i := 0; i < 10; i++ { + user := UserFactory() + ctx := context.Background() + _, _ = testRepos.Users.Create(ctx, user) + } + + // Delete all + ctx := context.Background() + allUsers, _ := testRepos.Users.GetAll(ctx) + + assert.Greater(t, len(allUsers), 0) + testRepos.Users.DeleteAll(ctx) + + allUsers, _ = testRepos.Users.GetAll(ctx) + assert.Equal(t, len(allUsers), 0) + +} + +func Test_EntUserRepo_GetSuperusers(t *testing.T) { + // Create 10 Users + superuser := 0 + users := 0 + + for i := 0; i < 10; i++ { + user := UserFactory() + ctx := context.Background() + _, _ = testRepos.Users.Create(ctx, user) + + if user.IsSuperuser { + superuser++ + } else { + users++ + } + } + + // Delete all + ctx := context.Background() + + superUsers, err := testRepos.Users.GetSuperusers(ctx) + assert.NoError(t, err) + + for _, usr := range superUsers { + assert.True(t, usr.IsSuperuser) + } + + // Cleanup + testRepos.Users.DeleteAll(ctx) +} diff --git a/backend/internal/repo/users_interface.go b/backend/internal/repo/users_interface.go new file mode 100644 index 0000000..161850d --- /dev/null +++ b/backend/internal/repo/users_interface.go @@ -0,0 +1,27 @@ +package repo + +import ( + "context" + + "github.com/google/uuid" + "github.com/hay-kot/git-web-template/backend/internal/types" +) + +type UserRepository interface { + // GetOneId returns a user by id + GetOneId(ctx context.Context, ID uuid.UUID) (types.UserOut, error) + // GetOneEmail returns a user by email + GetOneEmail(ctx context.Context, email string) (types.UserOut, error) + // GetAll returns all users + GetAll(ctx context.Context) ([]types.UserOut, error) + // Get Super Users + GetSuperusers(ctx context.Context) ([]types.UserOut, error) + // Create creates a new user + Create(ctx context.Context, user types.UserCreate) (types.UserOut, error) + // Update updates a user + Update(ctx context.Context, ID uuid.UUID, user types.UserUpdate) error + // Delete deletes a user + Delete(ctx context.Context, ID uuid.UUID) error + + DeleteAll(ctx context.Context) error +} diff --git a/backend/internal/services/all.go b/backend/internal/services/all.go new file mode 100644 index 0000000..3d4273d --- /dev/null +++ b/backend/internal/services/all.go @@ -0,0 +1,15 @@ +package services + +import "github.com/hay-kot/git-web-template/backend/internal/repo" + +type AllServices struct { + User *UserService + Admin *AdminService +} + +func NewServices(repos *repo.AllRepos) *AllServices { + return &AllServices{ + User: &UserService{repos}, + Admin: &AdminService{repos}, + } +} diff --git a/backend/internal/services/contexts.go b/backend/internal/services/contexts.go new file mode 100644 index 0000000..d6a0968 --- /dev/null +++ b/backend/internal/services/contexts.go @@ -0,0 +1,40 @@ +package services + +import ( + "context" + + "github.com/hay-kot/git-web-template/backend/internal/types" +) + +type contextKeys struct { + name string +} + +var ( + ContextUser = &contextKeys{name: "User"} + ContextUserToken = &contextKeys{name: "UserToken"} +) + +// SetUserCtx is a helper function that sets the ContextUser and ContextUserToken +// values within the context of a web request (or any context). +func SetUserCtx(ctx context.Context, user *types.UserOut, token string) context.Context { + ctx = context.WithValue(ctx, ContextUser, user) + ctx = context.WithValue(ctx, ContextUserToken, token) + return ctx +} + +// UseUserCtx is a helper function that returns the user from the context. +func UseUserCtx(ctx context.Context) *types.UserOut { + if val := ctx.Value(ContextUser); val != nil { + return val.(*types.UserOut) + } + return nil +} + +// UseTokenCtx is a helper function that returns the user token from the context. +func UseTokenCtx(ctx context.Context) string { + if val := ctx.Value(ContextUserToken); val != nil { + return val.(string) + } + return "" +} diff --git a/backend/internal/services/contexts_test.go b/backend/internal/services/contexts_test.go new file mode 100644 index 0000000..9cae289 --- /dev/null +++ b/backend/internal/services/contexts_test.go @@ -0,0 +1,39 @@ +package services + +import ( + "context" + "testing" + + "github.com/google/uuid" + "github.com/hay-kot/git-web-template/backend/internal/types" + "github.com/stretchr/testify/assert" +) + +func Test_SetAuthContext(t *testing.T) { + user := &types.UserOut{ + ID: uuid.New(), + } + + token := uuid.New().String() + + ctx := SetUserCtx(context.Background(), user, token) + + ctxUser := UseUserCtx(ctx) + + assert.NotNil(t, ctxUser) + assert.Equal(t, user.ID, ctxUser.ID) + + ctxUserToken := UseTokenCtx(ctx) + assert.NotEmpty(t, ctxUserToken) +} + +func Test_SetAuthContext_Nulls(t *testing.T) { + ctx := SetUserCtx(context.Background(), nil, "") + + ctxUser := UseUserCtx(ctx) + + assert.Nil(t, ctxUser) + + ctxUserToken := UseTokenCtx(ctx) + assert.Empty(t, ctxUserToken) +} diff --git a/backend/internal/services/service_admin.go b/backend/internal/services/service_admin.go new file mode 100644 index 0000000..3d60c18 --- /dev/null +++ b/backend/internal/services/service_admin.go @@ -0,0 +1,47 @@ +package services + +import ( + "context" + + "github.com/google/uuid" + "github.com/hay-kot/git-web-template/backend/internal/repo" + "github.com/hay-kot/git-web-template/backend/internal/types" +) + +type AdminService struct { + repos *repo.AllRepos +} + +func (svc *AdminService) Create(ctx context.Context, usr types.UserCreate) (types.UserOut, error) { + return svc.repos.Users.Create(ctx, usr) +} + +func (svc *AdminService) GetAll(ctx context.Context) ([]types.UserOut, error) { + return svc.repos.Users.GetAll(ctx) +} + +func (svc *AdminService) GetByID(ctx context.Context, id uuid.UUID) (types.UserOut, error) { + return svc.repos.Users.GetOneId(ctx, id) +} + +func (svc *AdminService) GetByEmail(ctx context.Context, email string) (types.UserOut, error) { + return svc.repos.Users.GetOneEmail(ctx, email) +} + +func (svc *AdminService) UpdateProperties(ctx context.Context, ID uuid.UUID, data types.UserUpdate) (types.UserOut, error) { + err := svc.repos.Users.Update(ctx, ID, data) + + if err != nil { + return types.UserOut{}, err + } + + return svc.repos.Users.GetOneId(ctx, ID) +} + +func (svc *AdminService) Delete(ctx context.Context, id uuid.UUID) error { + return svc.repos.Users.Delete(ctx, id) +} + +func (svc *AdminService) DeleteAll(ctx context.Context) error { + return svc.repos.Users.DeleteAll(ctx) +} diff --git a/backend/internal/services/service_user.go b/backend/internal/services/service_user.go new file mode 100644 index 0000000..4dfbc74 --- /dev/null +++ b/backend/internal/services/service_user.go @@ -0,0 +1,84 @@ +package services + +import ( + "context" + "errors" + "time" + + "github.com/google/uuid" + "github.com/hay-kot/git-web-template/backend/internal/repo" + "github.com/hay-kot/git-web-template/backend/internal/types" + "github.com/hay-kot/git-web-template/backend/pkgs/hasher" +) + +var ( + oneWeek = time.Hour * 24 * 7 + ErrorInvalidLogin = errors.New("invalid username or password") + ErrorInvalidToken = errors.New("invalid token") + ErrorTokenIdMismatch = errors.New("token id mismatch") +) + +type UserService struct { + repos *repo.AllRepos +} + +// GetSelf returns the user that is currently logged in based of the token provided within +func (svc *UserService) GetSelf(ctx context.Context, requestToken string) (types.UserOut, error) { + hash := hasher.HashToken(requestToken) + return svc.repos.AuthTokens.GetUserFromToken(ctx, hash) +} + +func (svc *UserService) UpdateSelf(ctx context.Context, ID uuid.UUID, data types.UserUpdate) (types.UserOut, error) { + err := svc.repos.Users.Update(ctx, ID, data) + + if err != nil { + return types.UserOut{}, err + } + + return svc.repos.Users.GetOneId(ctx, ID) +} + +// ============================================================================ +// User Authentication + +func (svc *UserService) createToken(ctx context.Context, userId uuid.UUID) (types.UserAuthTokenDetail, error) { + newToken := hasher.GenerateToken() + + created, err := svc.repos.AuthTokens.CreateToken(ctx, types.UserAuthTokenCreate{ + UserID: userId, + TokenHash: newToken.Hash, + ExpiresAt: time.Now().Add(oneWeek), + }) + + return types.UserAuthTokenDetail{Raw: newToken.Raw, ExpiresAt: created.ExpiresAt}, err +} + +func (svc *UserService) Login(ctx context.Context, username, password string) (types.UserAuthTokenDetail, error) { + usr, err := svc.repos.Users.GetOneEmail(ctx, username) + + if err != nil || !hasher.CheckPasswordHash(password, usr.Password) { + return types.UserAuthTokenDetail{}, ErrorInvalidLogin + } + + return svc.createToken(ctx, usr.ID) +} + +func (svc *UserService) Logout(ctx context.Context, token string) error { + hash := hasher.HashToken(token) + err := svc.repos.AuthTokens.DeleteToken(ctx, hash) + return err +} + +func (svc *UserService) RenewToken(ctx context.Context, token string) (types.UserAuthTokenDetail, error) { + hash := hasher.HashToken(token) + + dbToken, err := svc.repos.AuthTokens.GetUserFromToken(ctx, hash) + + if err != nil { + return types.UserAuthTokenDetail{}, ErrorInvalidToken + } + + newToken, _ := svc.createToken(ctx, dbToken.ID) + + return newToken, nil +} diff --git a/backend/internal/types/about_types.go b/backend/internal/types/about_types.go new file mode 100644 index 0000000..7db4bd9 --- /dev/null +++ b/backend/internal/types/about_types.go @@ -0,0 +1,11 @@ +package types + +// ApiSummary +// +// @public +type ApiSummary struct { + Healthy bool `json:"health"` + Versions []string `json:"versions"` + Title string `json:"title"` + Message string `json:"message"` +} diff --git a/backend/internal/types/token_types.go b/backend/internal/types/token_types.go new file mode 100644 index 0000000..56b0b49 --- /dev/null +++ b/backend/internal/types/token_types.go @@ -0,0 +1,39 @@ +package types + +import ( + "time" + + "github.com/google/uuid" +) + +type LoginForm struct { + Username string `json:"username"` + Password string `json:"password"` +} + +type TokenResponse struct { + BearerToken string `json:"token"` + ExpiresAt time.Time `json:"expiresAt"` +} + +type UserAuthTokenDetail struct { + Raw string `json:"raw"` + ExpiresAt time.Time `json:"expiresAt"` +} + +type UserAuthToken struct { + TokenHash []byte `json:"token"` + UserID uuid.UUID `json:"userId"` + ExpiresAt time.Time `json:"expiresAt"` + CreatedAt time.Time `json:"createdAt"` +} + +func (u UserAuthToken) IsExpired() bool { + return u.ExpiresAt.Before(time.Now()) +} + +type UserAuthTokenCreate struct { + TokenHash []byte `json:"token"` + UserID uuid.UUID `json:"userId"` + ExpiresAt time.Time `json:"expiresAt"` +} diff --git a/backend/internal/types/users_types.go b/backend/internal/types/users_types.go new file mode 100644 index 0000000..db1c404 --- /dev/null +++ b/backend/internal/types/users_types.go @@ -0,0 +1,58 @@ +package types + +import ( + "errors" + + "github.com/google/uuid" +) + +var ( + ErrNameEmpty = errors.New("name is empty") + ErrEmailEmpty = errors.New("email is empty") +) + +// UserIn is a basic user input struct containing only the fields that are +// required for user creation. +type UserIn struct { + Name string `json:"name"` + Email string `json:"email"` + Password string `json:"password"` +} + +// UserCreate is the Data object contain the requirements of creating a user +// in the database. It should to create users from an API unless the user has +// rights to create SuperUsers. For regular user in data use the UserIn struct. +type UserCreate struct { + Name string `json:"name"` + Email string `json:"email"` + Password string `json:"password"` + IsSuperuser bool `json:"isSuperuser"` +} + +func (u *UserCreate) Validate() error { + if u.Name == "" { + return ErrNameEmpty + } + if u.Email == "" { + return ErrEmailEmpty + } + return nil +} + +type UserOut struct { + ID uuid.UUID `json:"id"` + Name string `json:"name"` + Email string `json:"email"` + Password string `json:"-"` + IsSuperuser bool `json:"isSuperuser"` +} + +// IsNull is a proxy call for `usr.Id == uuid.Nil` +func (usr *UserOut) IsNull() bool { + return usr.ID == uuid.Nil +} + +type UserUpdate struct { + Name *string `json:"name"` + Email *string `json:"email"` +} diff --git a/backend/internal/types/users_types_test.go b/backend/internal/types/users_types_test.go new file mode 100644 index 0000000..bc3b825 --- /dev/null +++ b/backend/internal/types/users_types_test.go @@ -0,0 +1,76 @@ +package types + +import ( + "testing" + + "github.com/google/uuid" + "github.com/stretchr/testify/assert" +) + +func TestUserCreate_Validate(t *testing.T) { + type fields struct { + Name string + Email string + Password string + IsSuperuser bool + } + tests := []struct { + name string + fields fields + wantErr bool + }{ + { + name: "no_name", + fields: fields{ + Name: "", + Email: "", + Password: "", + IsSuperuser: false, + }, + wantErr: true, + }, + { + name: "no_email", + fields: fields{ + Name: "test", + Email: "", + Password: "", + IsSuperuser: false, + }, + wantErr: true, + }, + { + name: "valid", + fields: fields{ + Name: "test", + Email: "test@email.com", + Password: "mypassword", + IsSuperuser: false, + }, + wantErr: false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + u := &UserCreate{ + Name: tt.fields.Name, + Email: tt.fields.Email, + Password: tt.fields.Password, + IsSuperuser: tt.fields.IsSuperuser, + } + if err := u.Validate(); (err != nil) != tt.wantErr { + t.Errorf("UserCreate.Validate() error = %v, wantErr %v", err, tt.wantErr) + } + }) + } +} + +func TestUserOut_IsNull(t *testing.T) { + nullUser := UserOut{} + + assert.True(t, nullUser.IsNull()) + + nullUser.ID = uuid.New() + + assert.False(t, nullUser.IsNull()) +} diff --git a/backend/pkgs/automapper/README.md b/backend/pkgs/automapper/README.md new file mode 100644 index 0000000..fc9b01b --- /dev/null +++ b/backend/pkgs/automapper/README.md @@ -0,0 +1,56 @@ +# Automapper + + +Automapper is an opinionated Go library that provides a dead simple interface to mapping 1-1 models To/From a database Model to a DTO or Schema using value semantics. It does not rely on code comments, but instead uses standard Go code to define your mapping and configuration to make it easy to use an refactor. + +Current Limitation +- flat/single level models +- single schema to model per config entry +- limited configuration (support lowercase, camelcase, snakecase, etc) + + +Future Considerations +- [ ] Recursive mapping of embed structs +- [ ] Optional generate time type checker. +- [ ] Ensure values are copied to the destination and not just a reference +- [ ] ?!?!? + + +## Example Configuration + +```go +package main + +import ( + "github.com/mealie-recipes/mealie-analytics/ent" + "github.com/mealie-recipes/mealie-analytics/internal/types" + "github.com/mealie-recipes/mealie-analytics/pkgs/automapper" +) + +// getMappers serialized the config file into a list of automapper struct +func getMappers() []automapper.AutoMapper { + return []automapper.AutoMapper{ + { + Package: "mapper", // generated package name + Prefix: "analytics", // generating file prefix -> analytics_automapper.go + Name: "Mealie Analytics", // For console output + Schema: automapper.Schema{ + Type: types.Analytics{}, + Prefix: "types", // Package namespace + }, + Model: automapper.Model{ + Type: ent.Analytics{}, + Prefix: "ent", // Package namespace + }, + Imports: []string{}, // Specify additional imports here + }, + } +} + +func main() { + automappers := getMappers() + conf := automapper.DefaultConf() + + automapper.Generate(automappers, conf) +} +``` \ No newline at end of file diff --git a/backend/pkgs/automapper/automapper.go b/backend/pkgs/automapper/automapper.go new file mode 100644 index 0000000..f620c60 --- /dev/null +++ b/backend/pkgs/automapper/automapper.go @@ -0,0 +1,92 @@ +package automapper + +import ( + "bytes" + "fmt" + "go/format" + "os" + "reflect" + "strings" + "text/template" +) + +type FieldAssignment struct { + ModelField string + SchemaField string +} + +type Model struct { + Type interface{} + Prefix string + Fields []reflect.StructField + Reference string +} + +type Schema struct { + Name string + Type interface{} + Prefix string + Fields []reflect.StructField + Reference string +} + +type AutoMapper struct { + Name string + Package string + Prefix string + Schema Schema + Model Model + Imports []string + FieldAssignments []FieldAssignment +} + +func (mapper *AutoMapper) ExecuteTemplates(conf *AutoMapperConf) { + t := template.New("automapper") + t, err := t.Parse(automapperTemplate) + if err != nil { + fmt.Println(err) + } + + // Ensure the output directory exists + os.MkdirAll(conf.OutDir, 0755) + + var path = fmt.Sprintf("%s/%s", conf.OutDir, mapper.GetFileName()) + + f, err := os.Create(path) + if err != nil { + panic(err) + } + defer f.Close() + + var buf bytes.Buffer + + err = t.Execute(&buf, mapper) + if err != nil { + fmt.Println(err) + } + + text, err := format.Source(buf.Bytes()) + + if err != nil { + fmt.Println(err) + } + + f.Write(text) + +} + +// GetFileName returns the computed file name based off user preference. +// If the Prefix has been specified on the AutoMapper it will be used +// in place of the Struct name. If the Prefix is not specified, the +// Struct name will be used. +// +// Examples: +// prefix_automapper.go +// mystructname_automapper.go +func (mapper *AutoMapper) GetFileName() string { + if mapper.Prefix == "" { + return strings.ToLower(mapper.Schema.Reference) + "_" + "automapper.go" + } + return strings.ToLower(mapper.Prefix) + "_" + "automapper.go" + +} diff --git a/backend/pkgs/automapper/conf.go b/backend/pkgs/automapper/conf.go new file mode 100644 index 0000000..96f090a --- /dev/null +++ b/backend/pkgs/automapper/conf.go @@ -0,0 +1,11 @@ +package automapper + +type AutoMapperConf struct { + OutDir string +} + +func DefaultConf() *AutoMapperConf { + return &AutoMapperConf{ + OutDir: "internal/mapper", + } +} diff --git a/backend/pkgs/automapper/main.go b/backend/pkgs/automapper/main.go new file mode 100644 index 0000000..e3eba73 --- /dev/null +++ b/backend/pkgs/automapper/main.go @@ -0,0 +1,48 @@ +package automapper + +import ( + "fmt" + "reflect" + "strings" +) + +func Generate(automappers []AutoMapper, conf *AutoMapperConf) { + for _, mapper := range automappers { + modelType := reflect.TypeOf(mapper.Model.Type) + transferObjectType := reflect.TypeOf(mapper.Schema.Type) + + fmt.Printf("%s: %s -> %s\n", mapper.Name, modelType.Name(), transferObjectType.Name()) + + // From Fields + mapper.Imports = append(mapper.Imports, modelType.PkgPath()) + mapper.Model.Reference = modelType.Name() + mapper.Model.Fields = make([]reflect.StructField, 0) + for i := 0; i < modelType.NumField(); i++ { + mapper.Model.Fields = append(mapper.Model.Fields, modelType.Field(i)) + } + + // To Fields + mapper.Imports = append(mapper.Imports, transferObjectType.PkgPath()) + mapper.Schema.Reference = transferObjectType.Name() + mapper.Schema.Fields = make([]reflect.StructField, 0) + for i := 0; i < transferObjectType.NumField(); i++ { + mapper.Schema.Fields = append(mapper.Schema.Fields, transferObjectType.Field(i)) + } + + // Determine Field Assignments by matching the To fields and From fields by name + mapper.FieldAssignments = make([]FieldAssignment, 0) + + for _, toField := range mapper.Schema.Fields { + for _, fromField := range mapper.Model.Fields { + if strings.EqualFold(toField.Name, fromField.Name) { + mapper.FieldAssignments = append(mapper.FieldAssignments, FieldAssignment{ + ModelField: fromField.Name, + SchemaField: toField.Name, + }) + } + } + } + + mapper.ExecuteTemplates(conf) + } +} diff --git a/backend/pkgs/automapper/templates.go b/backend/pkgs/automapper/templates.go new file mode 100644 index 0000000..b001f61 --- /dev/null +++ b/backend/pkgs/automapper/templates.go @@ -0,0 +1,22 @@ +package automapper + +var automapperTemplate = `// Code generated by "/pkgs/automapper"; DO NOT EDIT. +package {{ .Package }} + +import ( + {{ range $import := .Imports }}"{{ $import }}" + {{ end }} +) + +func {{ .Schema.Reference }}FromModel(from {{ .Model.Prefix}}.{{ .Model.Reference }}) {{ .Schema.Prefix}}.{{ .Schema.Reference }} { + return {{ .Schema.Prefix}}.{{ .Schema.Reference }}{ {{ range $i, $f := .FieldAssignments }} + {{ $f.SchemaField }}: from.{{ $f.ModelField }},{{ end }} + } +} + +func {{ .Schema.Reference }}ToModel(from {{ .Schema.Prefix}}.{{ .Schema.Reference }}) {{ .Model.Prefix}}.{{ .Model.Reference }} { + return {{ .Model.Prefix}}.{{ .Model.Reference }}{ {{ range $i, $f := .FieldAssignments }} + {{ $f.ModelField }}: from.{{ $f.SchemaField }},{{ end }} + } +} +` diff --git a/backend/pkgs/faker/random.go b/backend/pkgs/faker/random.go new file mode 100644 index 0000000..42ef538 --- /dev/null +++ b/backend/pkgs/faker/random.go @@ -0,0 +1,37 @@ +package faker + +import ( + "math/rand" + "time" +) + +var letters = []rune("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ") + +type Faker struct { +} + +func NewFaker() *Faker { + rand.Seed(time.Now().UnixNano()) + return &Faker{} +} + +func (f *Faker) RandomString(length int) string { + + b := make([]rune, length) + for i := range b { + b[i] = letters[rand.Intn(len(letters))] + } + return string(b) +} + +func (f *Faker) RandomEmail() string { + return f.RandomString(10) + "@email.com" +} + +func (f *Faker) RandomBool() bool { + return rand.Intn(2) == 1 +} + +func (f *Faker) RandomNumber(min, max int) int { + return rand.Intn(max-min) + min +} diff --git a/backend/pkgs/faker/randoms_test.go b/backend/pkgs/faker/randoms_test.go new file mode 100644 index 0000000..79747c2 --- /dev/null +++ b/backend/pkgs/faker/randoms_test.go @@ -0,0 +1,95 @@ +package faker + +import ( + "testing" +) + +const Loops = 500 + +func ValidateUnique(values []string) bool { + for i := 0; i < len(values); i++ { + for j := i + 1; j < len(values); j++ { + if values[i] == values[j] { + return false + } + } + } + return true +} + +func Test_GetRandomString(t *testing.T) { + t.Parallel() + // Test that the function returns a string of the correct length + var generated = make([]string, Loops) + + faker := NewFaker() + + for i := 0; i < Loops; i++ { + generated[i] = faker.RandomString(10) + } + + if !ValidateUnique(generated) { + t.Error("Generated values are not unique") + } +} + +func Test_GetRandomEmail(t *testing.T) { + t.Parallel() + // Test that the function returns a string of the correct length + var generated = make([]string, Loops) + + faker := NewFaker() + + for i := 0; i < Loops; i++ { + generated[i] = faker.RandomEmail() + } + + if !ValidateUnique(generated) { + t.Error("Generated values are not unique") + } +} + +func Test_GetRandomBool(t *testing.T) { + t.Parallel() + + var trues = 0 + var falses = 0 + + faker := NewFaker() + + for i := 0; i < Loops; i++ { + if faker.RandomBool() { + trues++ + } else { + falses++ + } + } + + if trues == 0 || falses == 0 { + t.Error("Generated boolean don't appear random") + } +} + +func Test_RandomNumber(t *testing.T) { + t.Parallel() + + f := NewFaker() + + const MIN = 0 + const MAX = 100 + + last := MIN - 1 + + for i := 0; i < Loops; i++ { + n := f.RandomNumber(MIN, MAX) + + if n == last { + t.Errorf("RandomNumber() failed to generate unique number") + } + + if n < MIN || n > MAX { + t.Errorf("RandomNumber() failed to generate a number between %v and %v", MIN, MAX) + } + } + +} diff --git a/backend/pkgs/hasher/password.go b/backend/pkgs/hasher/password.go new file mode 100644 index 0000000..f7cca4d --- /dev/null +++ b/backend/pkgs/hasher/password.go @@ -0,0 +1,13 @@ +package hasher + +import "golang.org/x/crypto/bcrypt" + +func HashPassword(password string) (string, error) { + bytes, err := bcrypt.GenerateFromPassword([]byte(password), 14) + return string(bytes), err +} + +func CheckPasswordHash(password, hash string) bool { + err := bcrypt.CompareHashAndPassword([]byte(hash), []byte(password)) + return err == nil +} diff --git a/backend/pkgs/hasher/password_test.go b/backend/pkgs/hasher/password_test.go new file mode 100644 index 0000000..6f9128e --- /dev/null +++ b/backend/pkgs/hasher/password_test.go @@ -0,0 +1,40 @@ +package hasher + +import "testing" + +func TestHashPassword(t *testing.T) { + t.Parallel() + type args struct { + password string + } + tests := []struct { + name string + args args + wantErr bool + }{ + { + name: "letters_and_numbers", + args: args{ + password: "password123456788", + }, + }, + { + name: "letters_number_and_special", + args: args{ + password: "!2afj3214pofajip3142j;fa", + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := HashPassword(tt.args.password) + if (err != nil) != tt.wantErr { + t.Errorf("HashPassword() error = %v, wantErr %v", err, tt.wantErr) + return + } + if !CheckPasswordHash(tt.args.password, got) { + t.Errorf("CheckPasswordHash() failed to validate password=%v against hash=%v", tt.args.password, got) + } + }) + } +} diff --git a/backend/pkgs/hasher/token.go b/backend/pkgs/hasher/token.go new file mode 100644 index 0000000..117eeca --- /dev/null +++ b/backend/pkgs/hasher/token.go @@ -0,0 +1,30 @@ +package hasher + +import ( + "crypto/rand" + "crypto/sha256" + "encoding/base32" +) + +type Token struct { + Raw string + Hash []byte +} + +func GenerateToken() Token { + randomBytes := make([]byte, 16) + rand.Read(randomBytes) + + plainText := base32.StdEncoding.WithPadding(base32.NoPadding).EncodeToString(randomBytes) + hash := HashToken(plainText) + + return Token{ + Raw: plainText, + Hash: hash, + } +} + +func HashToken(plainTextToken string) []byte { + hash := sha256.Sum256([]byte(plainTextToken)) + return hash[:] +} diff --git a/backend/pkgs/hasher/token_test.go b/backend/pkgs/hasher/token_test.go new file mode 100644 index 0000000..d61fddf --- /dev/null +++ b/backend/pkgs/hasher/token_test.go @@ -0,0 +1,44 @@ +package hasher + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +const ITERATIONS = 200 + +func Test_NewToken(t *testing.T) { + t.Parallel() + tokens := make([]Token, ITERATIONS) + for i := 0; i < ITERATIONS; i++ { + tokens[i] = GenerateToken() + } + + // Check if they are unique + for i := 0; i < 5; i++ { + for j := i + 1; j < 5; j++ { + if tokens[i].Raw == tokens[j].Raw { + t.Errorf("NewToken() failed to generate unique tokens") + } + } + } +} + +func Test_HashToken_CheckTokenHash(t *testing.T) { + t.Parallel() + for i := 0; i < ITERATIONS; i++ { + token := GenerateToken() + + // Check raw text is reltively random + for j := 0; j < 5; j++ { + assert.NotEqual(t, token.Raw, GenerateToken().Raw) + } + + // Check token length is less than 32 characters + assert.Less(t, len(token.Raw), 32) + + // Check hash is the same + assert.Equal(t, token.Hash, HashToken(token.Raw)) + } +} diff --git a/backend/pkgs/logger/struct_logger.go b/backend/pkgs/logger/struct_logger.go new file mode 100644 index 0000000..2007ebb --- /dev/null +++ b/backend/pkgs/logger/struct_logger.go @@ -0,0 +1,121 @@ +package logger + +import ( + "encoding/json" + "io" + "os" + "runtime/debug" + "sync" + "time" +) + +type Level int8 + +const ( + LevelDebug Level = iota + LevelInfo + LevelError + LevelFatal + LevelOff +) + +func (l Level) String() string { + switch l { + case LevelDebug: + return "DEBUG" + case LevelInfo: + return "INFO" + case LevelError: + return "ERROR" + case LevelFatal: + return "FATAL" + default: + return "" + } +} + +type Props map[string]string + +type Logger struct { + out io.Writer + minLevel Level + mu sync.Mutex +} + +func New(out io.Writer, minLevel Level) *Logger { + return &Logger{ + out: out, + minLevel: minLevel, + } +} + +func (l *Logger) Debug(message string, properties map[string]string) { + l.print(LevelDebug, message, properties) +} + +func (l *Logger) Info(message string, properties map[string]string) { + l.print(LevelInfo, message, properties) +} + +func (l *Logger) Error(err error, properties map[string]string) { + l.print(LevelError, err.Error(), properties) +} + +func (l *Logger) Fatal(err error, properties map[string]string) { + l.print(LevelFatal, err.Error(), properties) + os.Exit(1) // For entries at the FATAL level, we also terminate the application. +} + +func (l *Logger) print(level Level, message string, properties map[string]string) (int, error) { + // If the severity level of the log entry is below the minimum severity for the + // logger, then return with no further action. + if level < l.minLevel { + return 0, nil + } + + // Declare an anonymous struct holding the data for the log entry. + aux := struct { + Level string `json:"level"` + Time string `json:"time"` + Message string `json:"message"` + Properties map[string]string `json:"properties,omitempty"` + Trace string `json:"trace,omitempty"` + }{ + Level: level.String(), + Time: time.Now().UTC().Format(time.RFC3339), + Message: message, + Properties: properties, + } + + // Include a stack trace for entries at the ERROR and FATAL levels. + if level >= LevelError { + aux.Trace = string(debug.Stack()) + } + + // Declare a line variable for holding the actual log entry text. + var line []byte + + // Marshal the anonymous struct to JSON and store it in the line variable. If there + // was a problem creating the JSON, set the contents of the log entry to be that + // plain-text error message instead.” + line, err := json.Marshal(aux) + if err != nil { + line = []byte(LevelError.String() + ": unable to marshal log message:" + err.Error()) + } + + // Lock the mutex so that no two writes to the output destination cannot happen + // concurrently. If we don't do this, it's possible that the text for two or more + // log entries will be intermingled in the output. + l.mu.Lock() + defer l.mu.Unlock() + + // Write the log entry followed by a newline. + return l.out.Write(append(line, '\n')) +} + +// We also implement a Write() method on our Logger type so that it satisfies the +// io.Writer interface. This writes a log entry at the ERROR level with no additional +// properties. +func (l *Logger) Write(message []byte) (n int, err error) { + return l.print(LevelError, string(message), nil) +} diff --git a/backend/pkgs/logger/struct_logger_test.go b/backend/pkgs/logger/struct_logger_test.go new file mode 100644 index 0000000..9b8526d --- /dev/null +++ b/backend/pkgs/logger/struct_logger_test.go @@ -0,0 +1,119 @@ +package logger + +import ( + "encoding/json" + "errors" + "testing" + + "github.com/stretchr/testify/assert" +) + +var lastWrite = []byte{} + +type testLogRecorder struct { + t *testing.T +} + +func (tlr testLogRecorder) Write(p []byte) (n int, err error) { + lastWrite = p + return len(p), nil +} + +type logEntry struct { + Level string `json:"level"` + Message string `json:"message"` + Props *Props `json:"properties"` +} + +func (lr *logEntry) Unmarshal(t *testing.T, jbytes []byte) { + err := json.Unmarshal(jbytes, lr) + if err != nil { + t.Error(err) + } +} + +func Test_LevelString(t *testing.T) { + assert.Equal(t, "DEBUG", LevelDebug.String()) + assert.Equal(t, "INFO", LevelInfo.String()) + assert.Equal(t, "ERROR", LevelError.String()) + assert.Equal(t, "FATAL", LevelFatal.String()) + assert.Equal(t, "", LevelOff.String()) +} + +func Test_NewLogger(t *testing.T) { + logRecorder := testLogRecorder{t: t} + + logger := New(logRecorder, LevelInfo) + assert.NotNil(t, logger) +} + +func getTestLogger(t *testing.T, level Level) *Logger { + logRecorder := testLogRecorder{t: t} + + logger := New(logRecorder, level) + assert.NotNil(t, logger) + + return logger +} + +func checkLastEntry(t *testing.T, level Level, message string, props *Props) { + entry := &logEntry{} + entry.Unmarshal(t, lastWrite) + + assert.Equal(t, level.String(), entry.Level) + assert.Equal(t, message, entry.Message) + assert.Equal(t, props, entry.Props) + +} + +func Test_LoggerDebug(t *testing.T) { + lgr := getTestLogger(t, LevelDebug) + + lgr.Debug("Test Debug", Props{"Hello": "World"}) + checkLastEntry(t, LevelDebug, "Test Debug", &Props{"Hello": "World"}) + + lastWrite = []byte{} +} + +func Test_LoggerInfo(t *testing.T) { + lgr := getTestLogger(t, LevelInfo) + + lgr.Info("Test Info", Props{"Hello": "World"}) + checkLastEntry(t, LevelInfo, "Test Info", &Props{"Hello": "World"}) + lastWrite = []byte{} + +} + +func Test_LoggerError(t *testing.T) { + lgr := getTestLogger(t, LevelError) + + myerror := errors.New("Test Error") + + lgr.Error(myerror, Props{"Hello": "World"}) + checkLastEntry(t, LevelError, "Test Error", &Props{"Hello": "World"}) + lastWrite = []byte{} + +} + +func Test_LoggerLevelScale(t *testing.T) { + lgr := getTestLogger(t, LevelInfo) + lastWrite = []byte{} + lgr.Debug("Test Debug", Props{"Hello": "World"}) + + assert.Equal(t, []byte{}, lastWrite) + + lgr = getTestLogger(t, LevelError) + lastWrite = []byte{} + lgr.Info("Test Debug", Props{"Hello": "World"}) + lgr.Debug("Test Debug", Props{"Hello": "World"}) + + assert.Equal(t, []byte{}, lastWrite) + + lgr = getTestLogger(t, LevelFatal) + + lgr.Info("Test Debug", Props{"Hello": "World"}) + lgr.Debug("Test Debug", Props{"Hello": "World"}) + lgr.Error(errors.New("Test Error"), Props{"Hello": "World"}) + + assert.Equal(t, []byte{}, lastWrite) +} diff --git a/backend/pkgs/mailer/mailer.go b/backend/pkgs/mailer/mailer.go new file mode 100644 index 0000000..22609aa --- /dev/null +++ b/backend/pkgs/mailer/mailer.go @@ -0,0 +1,51 @@ +package mailer + +import ( + "encoding/base64" + "fmt" + "mime" + "net/smtp" + "strconv" +) + +type Mailer struct { + Host string `json:"host,omitempty"` + Port int `json:"port,omitempty"` + Username string `json:"username,omitempty"` + Password string `json:"password,omitempty"` + From string `json:"from,omitempty"` +} + +func (m *Mailer) Ready() bool { + return m.Host != "" && m.Port != 0 && m.Username != "" && m.Password != "" && m.From != "" +} + +func (m *Mailer) server() string { + return m.Host + ":" + strconv.Itoa(m.Port) +} + +func (m *Mailer) Send(msg *Message) error { + server := m.server() + + header := make(map[string]string) + header["From"] = msg.From.String() + header["To"] = msg.To.String() + header["Subject"] = mime.QEncoding.Encode("UTF-8", msg.Subject) + header["MIME-Version"] = "1.0" + header["Content-Type"] = "text/html; charset=\"utf-8\"" + header["Content-Transfer-Encoding"] = "base64" + + message := "" + for k, v := range header { + message += fmt.Sprintf("%s: %s\r\n", k, v) + } + message += "\r\n" + base64.StdEncoding.EncodeToString([]byte(msg.Body)) + + return smtp.SendMail( + server, + smtp.PlainAuth("", m.Username, m.Password, m.Host), + m.From, + []string{msg.To.Address}, + []byte(message), + ) +} diff --git a/backend/pkgs/mailer/mailer_test.go b/backend/pkgs/mailer/mailer_test.go new file mode 100644 index 0000000..f1b71db --- /dev/null +++ b/backend/pkgs/mailer/mailer_test.go @@ -0,0 +1,66 @@ +package mailer + +import ( + "encoding/json" + "io/ioutil" + "testing" + + "github.com/stretchr/testify/assert" +) + +const ( + TestMailerConfig = "test-mailer.json" +) + +func GetTestMailer() (*Mailer, error) { + // Read JSON File + bytes, err := ioutil.ReadFile(TestMailerConfig) + + mailer := &Mailer{} + + if err != nil { + return nil, err + } + + // Unmarshal JSON + err = json.Unmarshal(bytes, mailer) + + if err != nil { + return nil, err + } + + return mailer, nil + +} + +func Test_Mailer(t *testing.T) { + t.Parallel() + + mailer, err := GetTestMailer() + + if err != nil { + t.Skip("Error Reading Test Mailer Config - Skipping") + } + + if !mailer.Ready() { + t.Skip("Mailer not ready - Skipping") + } + + message, err := RenderWelcome() + + if err != nil { + t.Error(err) + } + + mb := NewMessageBuilder(). + SetBody(message). + SetSubject("Hello"). + SetTo("John Doe", "john@doe.com"). + SetFrom("Jane Doe", "jane@doe.com") + + msg := mb.Build() + + err = mailer.Send(msg) + + assert.Nil(t, err) +} diff --git a/backend/pkgs/mailer/message.go b/backend/pkgs/mailer/message.go new file mode 100644 index 0000000..e0552b3 --- /dev/null +++ b/backend/pkgs/mailer/message.go @@ -0,0 +1,56 @@ +package mailer + +import "net/mail" + +type Message struct { + Subject string + To mail.Address + From mail.Address + Body string +} + +type MessageBuilder struct { + subject string + to mail.Address + from mail.Address + body string +} + +func NewMessageBuilder() *MessageBuilder { + return &MessageBuilder{} +} + +func (mb *MessageBuilder) Build() *Message { + return &Message{ + Subject: mb.subject, + To: mb.to, + From: mb.from, + Body: mb.body, + } +} + +func (mb *MessageBuilder) SetSubject(subject string) *MessageBuilder { + mb.subject = subject + return mb +} + +func (mb *MessageBuilder) SetTo(name, to string) *MessageBuilder { + mb.to = mail.Address{ + Name: name, + Address: to, + } + return mb +} + +func (mb *MessageBuilder) SetFrom(name, from string) *MessageBuilder { + mb.from = mail.Address{ + Name: name, + Address: from, + } + return mb +} + +func (mb *MessageBuilder) SetBody(body string) *MessageBuilder { + mb.body = body + return mb +} diff --git a/backend/pkgs/mailer/message_test.go b/backend/pkgs/mailer/message_test.go new file mode 100644 index 0000000..ca465b7 --- /dev/null +++ b/backend/pkgs/mailer/message_test.go @@ -0,0 +1,26 @@ +package mailer + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_MessageBuilder(t *testing.T) { + t.Parallel() + + mb := NewMessageBuilder(). + SetBody("Hello World!"). + SetSubject("Hello"). + SetTo("John Doe", "john@doe.com"). + SetFrom("Jane Doe", "jane@doe.com") + + msg := mb.Build() + + assert.Equal(t, "Hello", msg.Subject) + assert.Equal(t, "Hello World!", msg.Body) + assert.Equal(t, "John Doe", msg.To.Name) + assert.Equal(t, "john@doe.com", msg.To.Address) + assert.Equal(t, "Jane Doe", msg.From.Name) + assert.Equal(t, "jane@doe.com", msg.From.Address) +} diff --git a/backend/pkgs/mailer/templates.go b/backend/pkgs/mailer/templates.go new file mode 100644 index 0000000..b7984c0 --- /dev/null +++ b/backend/pkgs/mailer/templates.go @@ -0,0 +1,62 @@ +package mailer + +import ( + "bytes" + _ "embed" + "html/template" +) + +//go:embed templates/welcome.html +var templatesWelcome string + +type TemplateDefaults struct { + CompanyName string + CompanyAddress string + CompanyURL string + ActivateAccountURL string + UnsubscribeURL string +} + +type TemplateProps struct { + Defaults TemplateDefaults + Data map[string]string +} + +func (tp *TemplateProps) Set(key, value string) { + tp.Data[key] = value +} + +func DefaultTemplateData() TemplateProps { + return TemplateProps{ + Defaults: TemplateDefaults{ + CompanyName: "Haybytes.com", + CompanyAddress: "123 Main St, Anytown, CA 12345", + CompanyURL: "https://haybytes.com", + ActivateAccountURL: "https://google.com", + UnsubscribeURL: "https://google.com", + }, + Data: make(map[string]string), + } +} + +func render(tpl string, data TemplateProps) (string, error) { + tmpl, err := template.New("name").Parse(tpl) + + if err != nil { + return "", err + } + + var tplBuffer bytes.Buffer + + err = tmpl.Execute(&tplBuffer, data) + + if err != nil { + return "", err + } + + return tplBuffer.String(), nil +} + +func RenderWelcome() (string, error) { + return render(templatesWelcome, DefaultTemplateData()) +} diff --git a/backend/pkgs/mailer/templates/welcome.html b/backend/pkgs/mailer/templates/welcome.html new file mode 100644 index 0000000..ed8d9c8 --- /dev/null +++ b/backend/pkgs/mailer/templates/welcome.html @@ -0,0 +1,444 @@ + + + + + + Welcome! + + + + This is preheader text. Some clients will show this text as a + preview. + + + + + + + + + diff --git a/backend/pkgs/mailer/test-mailer-template.json b/backend/pkgs/mailer/test-mailer-template.json new file mode 100644 index 0000000..9ff353e --- /dev/null +++ b/backend/pkgs/mailer/test-mailer-template.json @@ -0,0 +1,7 @@ +{ + "host": "", + "port": 465, + "username": "", + "password": "", + "from": "" +} \ No newline at end of file diff --git a/backend/pkgs/server/constants.go b/backend/pkgs/server/constants.go new file mode 100644 index 0000000..1d07ef5 --- /dev/null +++ b/backend/pkgs/server/constants.go @@ -0,0 +1,7 @@ +package server + +const ( + ContentType = "Content-Type" + ContentJSON = "application/json" + ContentXML = "application/xml" +) diff --git a/backend/pkgs/server/request.go b/backend/pkgs/server/request.go new file mode 100644 index 0000000..c4b30a4 --- /dev/null +++ b/backend/pkgs/server/request.go @@ -0,0 +1,48 @@ +package server + +import ( + "encoding/json" + "net/http" +) + +// Decode reads the body of an HTTP request looking for a JSON document. The +// body is decoded into the provided value. +func Decode(r *http.Request, val interface{}) error { + decoder := json.NewDecoder(r.Body) + decoder.DisallowUnknownFields() + if err := decoder.Decode(val); err != nil { + return err + } + return nil +} + +// GetId is a shotcut to get the id from the request URL or return a default value +func GetParam(r *http.Request, key, d string) string { + val := r.URL.Query().Get(key) + + if val == "" { + return d + } + + return val +} + +// GetSkip is a shotcut to get the skip from the request URL parameters +func GetSkip(r *http.Request, d string) string { + return GetParam(r, "skip", d) +} + +// GetSkip is a shotcut to get the skip from the request URL parameters +func GetId(r *http.Request, d string) string { + return GetParam(r, "id", d) +} + +// GetLimit is a shotcut to get the limit from the request URL parameters +func GetLimit(r *http.Request, d string) string { + return GetParam(r, "limit", d) +} + +// GetQuery is a shotcut to get the sort from the request URL parameters +func GetQuery(r *http.Request, d string) string { + return GetParam(r, "query", d) +} diff --git a/backend/pkgs/server/request_test.go b/backend/pkgs/server/request_test.go new file mode 100644 index 0000000..05dc8c5 --- /dev/null +++ b/backend/pkgs/server/request_test.go @@ -0,0 +1,210 @@ +package server + +import ( + "net/http" + "net/http/httptest" + "strings" + "testing" +) + +type TestStruct struct { + Name string `json:"name"` + Data string `json:"data"` +} + +func TestDecode(t *testing.T) { + type args struct { + r *http.Request + val interface{} + } + tests := []struct { + name string + args args + wantErr bool + }{ + { + name: "check_error", + args: args{ + r: &http.Request{ + Body: http.NoBody, + }, + val: make(map[string]interface{}), + }, + wantErr: true, + }, + { + name: "check_success", + args: args{ + r: httptest.NewRequest("POST", "/", strings.NewReader(`{"name":"test","data":"test"}`)), + val: TestStruct{ + Name: "test", + Data: "test", + }, + }, + wantErr: false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if err := Decode(tt.args.r, &tt.args.val); (err != nil) != tt.wantErr { + t.Errorf("Decode() error = %v, wantErr %v", err, tt.wantErr) + } + }) + } +} + +func TestGetParam(t *testing.T) { + type args struct { + r *http.Request + key string + d string + } + tests := []struct { + name string + args args + want string + }{ + { + name: "check_default", + args: args{ + r: httptest.NewRequest("POST", "/", strings.NewReader(`{"name":"test","data":"test"}`)), + key: "id", + d: "default", + }, + want: "default", + }, + { + name: "check_id", + args: args{ + r: httptest.NewRequest("POST", "/item?id=123", strings.NewReader(`{"name":"test","data":"test"}`)), + key: "id", + d: "", + }, + want: "123", + }, + { + name: "check_query", + args: args{ + r: httptest.NewRequest("POST", "/item?query=hello-world", strings.NewReader(`{"name":"test","data":"test"}`)), + key: "query", + d: "", + }, + want: "hello-world", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := GetParam(tt.args.r, tt.args.key, tt.args.d); got != tt.want { + t.Errorf("GetParam() = %v, want %v", got, tt.want) + } + }) + } +} + +func TestGetSkip(t *testing.T) { + type args struct { + r *http.Request + d string + } + tests := []struct { + name string + args args + want string + }{ + { + name: "check_default", + args: args{ + r: httptest.NewRequest("POST", "/", strings.NewReader(`{"name":"test","data":"test"}`)), + d: "0", + }, + want: "0", + }, + { + name: "check_skip", + args: args{ + r: httptest.NewRequest("POST", "/item?skip=107", strings.NewReader(`{"name":"test","data":"test"}`)), + d: "0", + }, + want: "107", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := GetSkip(tt.args.r, tt.args.d); got != tt.want { + t.Errorf("GetSkip() = %v, want %v", got, tt.want) + } + }) + } +} + +func TestGetLimit(t *testing.T) { + type args struct { + r *http.Request + d string + } + tests := []struct { + name string + args args + want string + }{ + { + name: "check_default", + args: args{ + r: httptest.NewRequest("POST", "/", strings.NewReader(`{"name":"test","data":"test"}`)), + d: "0", + }, + want: "0", + }, + { + name: "check_limit", + args: args{ + r: httptest.NewRequest("POST", "/item?limit=107", strings.NewReader(`{"name":"test","data":"test"}`)), + d: "0", + }, + want: "107", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := GetLimit(tt.args.r, tt.args.d); got != tt.want { + t.Errorf("GetLimit() = %v, want %v", got, tt.want) + } + }) + } +} + +func TestGetQuery(t *testing.T) { + type args struct { + r *http.Request + d string + } + tests := []struct { + name string + args args + want string + }{ + { + name: "check_default", + args: args{ + r: httptest.NewRequest("POST", "/", strings.NewReader(`{"name":"test","data":"test"}`)), + d: "0", + }, + want: "0", + }, + { + name: "check_query", + args: args{ + r: httptest.NewRequest("POST", "/item?query=hello-query", strings.NewReader(`{"name":"test","data":"test"}`)), + d: "0", + }, + want: "hello-query", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := GetQuery(tt.args.r, tt.args.d); got != tt.want { + t.Errorf("GetQuery() = %v, want %v", got, tt.want) + } + }) + } +} diff --git a/backend/pkgs/server/response.go b/backend/pkgs/server/response.go new file mode 100644 index 0000000..d4d008f --- /dev/null +++ b/backend/pkgs/server/response.go @@ -0,0 +1,61 @@ +package server + +import ( + "encoding/json" + "errors" + "net/http" +) + +// Respond converts a Go value to JSON and sends it to the client. +// Adapted from https://github.com/ardanlabs/service/tree/master/foundation/web +func Respond(w http.ResponseWriter, statusCode int, data interface{}) error { + // If there is nothing to marshal then set status code and return. + if statusCode == http.StatusNoContent { + w.WriteHeader(statusCode) + return nil + } + + // Convert the response value to JSON. + jsonData, err := json.Marshal(data) + if err != nil { + return err + } + + // Set the content type and headers once we know marshaling has succeeded. + w.Header().Set("Content-Type", "application/json") + + // Write the status code to the response. + w.WriteHeader(statusCode) + + // Send the result back to the client. + if _, err := w.Write(jsonData); err != nil { + return err + } + + return nil +} + +// ResponseError is a helper function that sends a JSON response of an error message +func RespondError(w http.ResponseWriter, statusCode int, err error) { + eb := ErrorBuilder{} + eb.AddError(err) + eb.Respond(w, statusCode) +} + +// RespondInternalServerError is a wrapper around RespondError that sends a 500 internal server error. Useful for +// Sending generic errors when everything went wrong. +func RespondInternalServerError(w http.ResponseWriter) { + RespondError(w, http.StatusInternalServerError, errors.New("internal server error")) +} + +// RespondNotFound is a helper utility for responding with a generic +// "unauthorized" error. +func RespondUnauthorized(w http.ResponseWriter) { + RespondError(w, http.StatusUnauthorized, errors.New("unauthorized")) +} + +// RespondForbidden is a helper utility for responding with a generic +// "forbidden" error. +func RespondForbidden(w http.ResponseWriter) { + RespondError(w, http.StatusForbidden, errors.New("forbidden")) +} diff --git a/backend/pkgs/server/response_error_builder.go b/backend/pkgs/server/response_error_builder.go new file mode 100644 index 0000000..ac8d34d --- /dev/null +++ b/backend/pkgs/server/response_error_builder.go @@ -0,0 +1,51 @@ +package server + +import ( + "net/http" +) + +// ErrorBuilder is a helper type to build a response that contains an array of errors. +// Typical use cases are for returning an array of validation errors back to the user. +// +// Example: +// +// +// { +// "errors": [ +// "invalid id", +// "invalid name", +// "invalid description" +// ], +// "message": "Unprocessable Entity", +// "status": 422 +// } +// +type ErrorBuilder struct { + errs []string +} + +// HasErrors returns true if the ErrorBuilder has any errors. +func (eb *ErrorBuilder) HasErrors() bool { + if (eb.errs == nil) || (len(eb.errs) == 0) { + return false + } + return true +} + +// AddError adds an error to the ErrorBuilder if an error is not nil. If the +// Error is nil, then nothing is added. +func (eb *ErrorBuilder) AddError(err error) { + if err != nil { + if eb.errs == nil { + eb.errs = make([]string, 0) + } + + eb.errs = append(eb.errs, err.Error()) + } +} + +// Respond sends a JSON response with the ErrorBuilder's errors. If there are no errors, then +// the errors field will be an empty array. +func (eb *ErrorBuilder) Respond(w http.ResponseWriter, statusCode int) { + Respond(w, statusCode, Wrap(nil).AddError(http.StatusText(statusCode), eb.errs)) +} diff --git a/backend/pkgs/server/response_error_builder_test.go b/backend/pkgs/server/response_error_builder_test.go new file mode 100644 index 0000000..012e744 --- /dev/null +++ b/backend/pkgs/server/response_error_builder_test.go @@ -0,0 +1,107 @@ +package server + +import ( + "encoding/json" + "errors" + "net/http" + "net/http/httptest" + "testing" + + "github.com/hay-kot/git-web-template/backend/pkgs/faker" + "github.com/stretchr/testify/assert" +) + +func Test_ErrorBuilder_HasErrors_NilList(t *testing.T) { + t.Parallel() + + var ebNilList = ErrorBuilder{} + assert.False(t, ebNilList.HasErrors(), "ErrorBuilder.HasErrors() should return false when list is nil") + +} + +func Test_ErrorBuilder_HasErrors_EmptyList(t *testing.T) { + t.Parallel() + + var ebEmptyList = ErrorBuilder{ + errs: []string{}, + } + assert.False(t, ebEmptyList.HasErrors(), "ErrorBuilder.HasErrors() should return false when list is empty") + +} + +func Test_ErrorBuilder_HasErrors_WithError(t *testing.T) { + t.Parallel() + + var ebList = ErrorBuilder{} + ebList.AddError(errors.New("test error")) + + assert.True(t, ebList.HasErrors(), "ErrorBuilder.HasErrors() should return true when list is not empty") + +} + +func Test_ErrorBuilder_AddError(t *testing.T) { + t.Parallel() + + randomError := make([]error, 10) + + f := faker.NewFaker() + + errorStrings := make([]string, 10) + + for i := 0; i < 10; i++ { + err := errors.New(f.RandomString(10)) + randomError[i] = err + errorStrings[i] = err.Error() + } + + // Check Results + var ebList = ErrorBuilder{} + + for _, err := range randomError { + ebList.AddError(err) + } + + assert.Equal(t, errorStrings, ebList.errs, "ErrorBuilder.AddError() should add an error to the list") +} + +func Test_ErrorBuilder_Respond(t *testing.T) { + t.Parallel() + + f := faker.NewFaker() + + randomError := make([]error, 5) + + for i := 0; i < 5; i++ { + err := errors.New(f.RandomString(5)) + randomError[i] = err + } + + // Check Results + var ebList = ErrorBuilder{} + + for _, err := range randomError { + ebList.AddError(err) + } + + fakeWriter := httptest.NewRecorder() + + ebList.Respond(fakeWriter, 422) + + assert.Equal(t, 422, fakeWriter.Code, "ErrorBuilder.Respond() should return a status code of 422") + + // Check errors payload is correct + + errorsStruct := struct { + Errors []string `json:"details"` + Message string `json:"message"` + Error bool `json:"error"` + }{ + Errors: ebList.errs, + Message: http.StatusText(http.StatusUnprocessableEntity), + Error: true, + } + + asJson, _ := json.Marshal(errorsStruct) + assert.JSONEq(t, string(asJson), fakeWriter.Body.String(), "ErrorBuilder.Respond() should return a JSON response with the errors") + +} diff --git a/backend/pkgs/server/response_test.go b/backend/pkgs/server/response_test.go new file mode 100644 index 0000000..2e98365 --- /dev/null +++ b/backend/pkgs/server/response_test.go @@ -0,0 +1,78 @@ +package server + +import ( + "errors" + "net/http" + "net/http/httptest" + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_Respond_NoContent(t *testing.T) { + recorder := httptest.NewRecorder() + dummystruct := struct { + Name string + }{ + Name: "dummy", + } + + Respond(recorder, http.StatusNoContent, dummystruct) + + assert.Equal(t, http.StatusNoContent, recorder.Code) + assert.Empty(t, recorder.Body.String()) +} + +func Test_Respond_JSON(t *testing.T) { + recorder := httptest.NewRecorder() + dummystruct := struct { + Name string `json:"name"` + }{ + Name: "dummy", + } + + Respond(recorder, http.StatusCreated, dummystruct) + + assert.Equal(t, http.StatusCreated, recorder.Code) + assert.JSONEq(t, recorder.Body.String(), `{"name":"dummy"}`) + assert.Equal(t, "application/json", recorder.Header().Get("Content-Type")) + +} + +func Test_RespondError(t *testing.T) { + recorder := httptest.NewRecorder() + var customError = errors.New("custom error") + + RespondError(recorder, http.StatusBadRequest, customError) + + assert.Equal(t, http.StatusBadRequest, recorder.Code) + assert.JSONEq(t, recorder.Body.String(), `{"details":["custom error"], "message":"Bad Request", "error":true}`) + +} +func Test_RespondInternalServerError(t *testing.T) { + recorder := httptest.NewRecorder() + + RespondInternalServerError(recorder) + + assert.Equal(t, http.StatusInternalServerError, recorder.Code) + assert.JSONEq(t, recorder.Body.String(), `{"details":["internal server error"], "message":"Internal Server Error", "error":true}`) + +} +func Test_RespondUnauthorized(t *testing.T) { + recorder := httptest.NewRecorder() + + RespondUnauthorized(recorder) + + assert.Equal(t, http.StatusUnauthorized, recorder.Code) + assert.JSONEq(t, recorder.Body.String(), `{"details":["unauthorized"], "message":"Unauthorized", "error":true}`) + +} +func Test_RespondForbidden(t *testing.T) { + recorder := httptest.NewRecorder() + + RespondForbidden(recorder) + + assert.Equal(t, http.StatusForbidden, recorder.Code) + assert.JSONEq(t, recorder.Body.String(), `{"details":["forbidden"], "message":"Forbidden", "error":true}`) + +} diff --git a/backend/pkgs/server/result.go b/backend/pkgs/server/result.go new file mode 100644 index 0000000..c2340a5 --- /dev/null +++ b/backend/pkgs/server/result.go @@ -0,0 +1,27 @@ +package server + +type Result struct { + Error bool `json:"error,omitempty"` + Details interface{} `json:"details,omitempty"` + Message string `json:"message,omitempty"` + Item interface{} `json:"item,omitempty"` +} + +// Wrap creates a Wrapper instance and adds the initial namespace and data to be returned. +func Wrap(data interface{}) Result { + return Result{ + Item: data, + } +} + +func (r Result) AddMessage(message string) Result { + r.Message = message + return r +} + +func (r Result) AddError(err string, details interface{}) Result { + r.Message = err + r.Details = details + r.Error = true + return r +} diff --git a/backend/pkgs/server/server.go b/backend/pkgs/server/server.go new file mode 100644 index 0000000..628f234 --- /dev/null +++ b/backend/pkgs/server/server.go @@ -0,0 +1,123 @@ +package server + +import ( + "context" + "errors" + "fmt" + "net/http" + "os" + "os/signal" + "sync" + "syscall" + "time" +) + +// TODO: #2 Implement Go routine pool/job queue + +var ErrServerNotStarted = errors.New("server not started") +var ErrServerAlreadyStarted = errors.New("server already started") + +type Server struct { + Host string + Port string + + Worker Worker + wg sync.WaitGroup + + started bool + activeServer *http.Server +} + +func NewServer(host, port string) *Server { + return &Server{ + Host: host, + Port: port, + wg: sync.WaitGroup{}, + Worker: NewSimpleWorker(), + } +} + +func (s *Server) Shutdown(sig string) error { + if !s.started { + return ErrServerNotStarted + } + fmt.Printf("Received %s signal, shutting down\n", sig) + + // Create a context with a 5-second timeout. + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second) + defer cancel() + + err := s.activeServer.Shutdown(ctx) + s.started = false + if err != nil { + return err + } + + fmt.Println("Http server shutdown, waiting for all tasks to finish") + s.wg.Wait() + + return nil + +} + +func (s *Server) Start(router http.Handler) error { + if s.started { + return ErrServerAlreadyStarted + } + + s.activeServer = &http.Server{ + Addr: s.Host + ":" + s.Port, + Handler: router, + IdleTimeout: time.Minute, + ReadTimeout: 10 * time.Second, + WriteTimeout: 10 * time.Second, + } + + shutdownError := make(chan error) + + go func() { + // Create a quit channel which carries os.Signal values. + quit := make(chan os.Signal, 1) + + // Use signal.Notify() to listen for incoming SIGINT and SIGTERM signals and + // relay them to the quit channel. + signal.Notify(quit, syscall.SIGINT, syscall.SIGTERM) + + // Read the signal from the quit channel. block until received + sig := <-quit + + err := s.Shutdown(sig.String()) + if err != nil { + shutdownError <- err + } + + // Exit the application with a 0 (success) status code. + os.Exit(0) + }() + + s.started = true + err := s.activeServer.ListenAndServe() + + if !errors.Is(err, http.ErrServerClosed) { + return err + } + + err = <-shutdownError + if err != nil { + return err + } + + fmt.Println("Server shutdown successfully") + + return nil +} + +// Background starts a go routine that runs on the servers pool. In the event of a shutdown +// request, the server will wait until all open goroutines have finished before shutting down. +func (svr *Server) Background(task func()) { + svr.wg.Add(1) + svr.Worker.Add(func() { + defer svr.wg.Done() + task() + }) +} diff --git a/backend/pkgs/server/server_test.go b/backend/pkgs/server/server_test.go new file mode 100644 index 0000000..18eed9e --- /dev/null +++ b/backend/pkgs/server/server_test.go @@ -0,0 +1,97 @@ +package server + +import ( + "net/http" + "testing" + "time" + + "github.com/stretchr/testify/assert" +) + +func testServer(t *testing.T, r http.Handler) *Server { + svr := NewServer("127.0.0.1", "19245") + + go func() { + svr.Start(r) + }() + + ping := func() error { + _, err := http.Get("http://127.0.0.1:19245") + return err + } + + for { + if err := ping(); err == nil { + break + } + time.Sleep(time.Millisecond * 100) + } + + return svr +} + +func Test_ServerShutdown_Error(t *testing.T) { + svr := NewServer("127.0.0.1", "19245") + + err := svr.Shutdown("test") + assert.ErrorIs(t, err, ErrServerNotStarted) +} + +func Test_ServerStarts_Error(t *testing.T) { + svr := testServer(t, nil) + + err := svr.Start(nil) + assert.ErrorIs(t, err, ErrServerAlreadyStarted) + + err = svr.Shutdown("test") + assert.NoError(t, err) +} + +func Test_ServerStarts(t *testing.T) { + svr := testServer(t, nil) + err := svr.Shutdown("test") + assert.NoError(t, err) +} + +func Test_GracefulServerShutdownWithWorkers(t *testing.T) { + isFinished := false + + svr := testServer(t, nil) + + svr.Background(func() { + time.Sleep(time.Second * 4) + isFinished = true + }) + + err := svr.Shutdown("test") + + assert.NoError(t, err) + assert.True(t, isFinished) + +} + +func Test_GracefulServerShutdownWithRequests(t *testing.T) { + isFinished := false + + router := http.NewServeMux() + + // add long running handler func + router.HandleFunc("/test", func(rw http.ResponseWriter, r *http.Request) { + time.Sleep(time.Second * 3) + isFinished = true + }) + + svr := testServer(t, router) + + // Make request to "/test" + go func() { + http.Get("http://127.0.0.1:19245/test") // This is probably bad? + }() + + time.Sleep(time.Second) // Hack to wait for the request to be made + + err := svr.Shutdown("test") + assert.NoError(t, err) + + assert.True(t, isFinished) +} diff --git a/backend/pkgs/server/worker.go b/backend/pkgs/server/worker.go new file mode 100644 index 0000000..682d5d6 --- /dev/null +++ b/backend/pkgs/server/worker.go @@ -0,0 +1,20 @@ +package server + +type Worker interface { + Add(func()) +} + +// SimpleWorker is a simple background worker that implements +// the Worker interface and runs all tasks in a go routine without +// a pool or que or limits. It's useful for simple or small applications +// with minimal/short background tasks +type SimpleWorker struct { +} + +func NewSimpleWorker() *SimpleWorker { + return &SimpleWorker{} +} + +func (sw *SimpleWorker) Add(task func()) { + go task() +} diff --git a/backend/static/favicon.ico b/backend/static/favicon.ico new file mode 100644 index 0000000000000000000000000000000000000000..c6f7f740621c2f01f10b5c6614ed1960a7e66244 GIT binary patch literal 1366 zcmZ`(c~DbF9DV{R9n_B1QhLm2I*Jz}NjyS8AhD#7LLLwjfk7mMaDyN@6!8d$RICDm zk`zQDI$kXj2q9FICT2=Uz)DdGhQmNWLKvw?5&|Ue^~LFo(?7cN?f!Ouzwi5Yc6Mh= z!@;1{E?>C-0I-@897sbV+TK5QM8?1y=em&KltQAC0HC>ir8NO%FD3@lr~q($GXMyR z0bmJ93G@KK!vKJR2>?DK0C3B>azA1p0664@htkOifx%$lL7I0M-45^r@F0C}7#$D@ z2=+V!0YWBFBfY`*phhCg4}2<}5c*ylMkmYd}{nJ3hwYCe+=Rd zTLR)!d>L_!%Ii@--Sv-4BC;}gBa1BQW3Z?)Z^+d8H#D*UPcFj!CRl8Mj*dasXP`gk z;a(#worMHCi1X4!zj*EN(`nAoLQemiG}RD=U&tApkGxPvE34yZ%*8sJVF^ZBj}b1= zL0SuJw!t;i5MK+WbPhz6iel=UlRNtPudIBn?d1Y&vB4#i5dC5USWuDqcwDH5OeRzO z!PxDa-Sdv}=a!+WDX72yFQT6>cIS>3iR95cI8`x7&N`MT>lRg4yRG|FsZjhd2{q3` zk>O#R(Cded%h`Q1H6o$vk&;0RcmHO?h{>AWXSl3~%w{v2#q{#ncKT%gTPu8V3L1Fv zg1n!E--T3Y@dTu|p9p%`-MbOJVf)rC>d_IS z4HoI4{BcOP2tPH!vO8kcdF~ljB!#dmbl;we^q7`biRvBvcph$=g-+|BykU#t0$*N! zf;YO%thsZd>XN+vlJaKFqg%C-D^AJ2@8J`0T7#V^cOW zm7U2-Pfku}WwKMnO)(Zw?l&^N*u;=- z%*|C664$UZYMm1%-3B!!ZiXH#=Jp*oSD~Qz`<`x(?Y6tbUPF!M{_&xb?^aVQ0%XS8 zeWZpbo}QkKS#QwEK_MY2!p7Rpz20c9v7?CC;73F~L@`k(Jw0vfGPf9J3SITnLsW-S z@G6d{;r{j|`gL!NbgEY-d!}~u=u^vObvmf2d&_P~ex3FHrhN3$A+%p4Muwse;PZ z`<8_iyeX}^1=jm>%uXPxt!)aKmTMCta>mAg4>{&4E{s5Nk-sGWoa3R~hIt}#d+uCv zG?$&gP4Y?1O+o^I!(a)XSez#YPsd?>uo$1+SPu-w2ZNd7Y{>YJAS;`lmYnzhg6f^O Q)QA9}kio!v` { + item: T; +} + +interface Status { + status: string; + message: string; +} + +export interface ApiSummary { + health: boolean; + versions: string[]; + title: string; + message: string; +} + +export interface ApiToken { + token: string; + expiresAt: string; +} + +export interface UserSelf { + id: string; + name: string; + email: string; + isSuperuser: boolean; +} + +export class v1ApiClient { + version: string; + baseUrl: string; + requests: Axios; + + token: string; + expires: Date; + + constructor(baseUrl: string, version = "v1") { + this.version = version; + this.baseUrl = baseUrl; + this.requests = axios.create({ + baseURL: `${this.baseUrl}/${this.version}`, + }); + } + + v1(url: string) { + return `${this.baseUrl}/api/v1${url}`; + } + + api(url: string) { + return `${this.baseUrl}/api${url}`; + } + + setToken(token: string, expires: Date) { + this.token = token; + this.expires = expires; + + this.requests.defaults.headers.common["Authorization"] = token; + } + + async login(username: string, password: string) { + const response = await this.requests.post( + this.v1("/users/login"), + { + username, + password, + } + ); + + this.setToken(response.data.token, new Date(response.data.expiresAt)); + + return response; + } + + async logout() { + const response = await this.requests.post(this.v1("/users/logout")); + + if (response.status === 200) { + this.setToken("", new Date()); + } + + return response; + } + + async self() { + return this.requests.get>(this.v1("/users/self")); + } + + async status() { + return this.requests.get>(this.api("/status")); + } +} diff --git a/client/package-lock.json b/client/package-lock.json new file mode 100644 index 0000000..a5b40df --- /dev/null +++ b/client/package-lock.json @@ -0,0 +1,3024 @@ +{ + "name": "client", + "lockfileVersion": 2, + "requires": true, + "packages": { + "": { + "dependencies": { + "axios": "^0.25.0" + }, + "devDependencies": { + "@types/expect": "^24.3.0", + "@types/mocha": "^9.1.0", + "@types/node": "^17.0.14", + "typescript": "^4.5.5", + "vitest": "^0.2.5" + } + }, + "node_modules/@babel/code-frame": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.16.7.tgz", + "integrity": "sha512-iAXqUn8IIeBTNd72xsFlgaXHkMBMt6y4HJp1tIaK465CWLT/fG1aqB7ykr95gHHmlBdGbFeWWfyB4NJJ0nmeIg==", + "dev": true, + "dependencies": { + "@babel/highlight": "^7.16.7" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-identifier": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.16.7.tgz", + "integrity": "sha512-hsEnFemeiW4D08A5gUAZxLBTXpZ39P+a+DGDsHw1yxqyQ/jzFEnxf5uTEGp+3bzAbNOxU1paTgYS4ECU/IgfDw==", + "dev": true, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/highlight": { + "version": "7.16.10", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.16.10.tgz", + "integrity": "sha512-5FnTQLSLswEj6IkgVw5KusNUUFY9ZGqe/TRFnP/BKYHYgfh7tc+C7mwiy95/yNP7Dh9x580Vv8r7u7ZfTBFxdw==", + "dev": true, + "dependencies": { + "@babel/helper-validator-identifier": "^7.16.7", + "chalk": "^2.0.0", + "js-tokens": "^4.0.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/highlight/node_modules/ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dev": true, + "dependencies": { + "color-convert": "^1.9.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/highlight/node_modules/chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dev": true, + "dependencies": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/highlight/node_modules/color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dev": true, + "dependencies": { + "color-name": "1.1.3" + } + }, + "node_modules/@babel/highlight/node_modules/color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=", + "dev": true + }, + "node_modules/@babel/highlight/node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=", + "dev": true, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/@babel/highlight/node_modules/has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/highlight/node_modules/supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dev": true, + "dependencies": { + "has-flag": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@jest/types": { + "version": "27.4.2", + "resolved": "https://registry.npmjs.org/@jest/types/-/types-27.4.2.tgz", + "integrity": "sha512-j35yw0PMTPpZsUoOBiuHzr1zTYoad1cVIE0ajEjcrJONxxrko/IRGKkXx3os0Nsi4Hu3+5VmDbVfq5WhG/pWAg==", + "dev": true, + "dependencies": { + "@types/istanbul-lib-coverage": "^2.0.0", + "@types/istanbul-reports": "^3.0.0", + "@types/node": "*", + "@types/yargs": "^16.0.0", + "chalk": "^4.0.0" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/@types/chai": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/@types/chai/-/chai-4.3.0.tgz", + "integrity": "sha512-/ceqdqeRraGolFTcfoXNiqjyQhZzbINDngeoAq9GoHa8PPK1yNzTaxWjA6BFWp5Ua9JpXEMSS4s5i9tS0hOJtw==", + "dev": true + }, + "node_modules/@types/chai-subset": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/@types/chai-subset/-/chai-subset-1.3.3.tgz", + "integrity": "sha512-frBecisrNGz+F4T6bcc+NLeolfiojh5FxW2klu669+8BARtyQv2C/GkNW6FUodVe4BroGMP/wER/YDGc7rEllw==", + "dev": true, + "dependencies": { + "@types/chai": "*" + } + }, + "node_modules/@types/concat-stream": { + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/@types/concat-stream/-/concat-stream-1.6.1.tgz", + "integrity": "sha512-eHE4cQPoj6ngxBZMvVf6Hw7Mh4jMW4U9lpGmS5GBPB9RYxlFg+CHaVN7ErNY4W9XfLIEn20b4VDYaIrbq0q4uA==", + "dev": true, + "optional": true, + "peer": true, + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/expect": { + "version": "24.3.0", + "resolved": "https://registry.npmjs.org/@types/expect/-/expect-24.3.0.tgz", + "integrity": "sha512-aq5Z+YFBz5o2b6Sp1jigx5nsmoZMK5Ceurjwy6PZmRv7dEi1jLtkARfvB1ME+OXJUG+7TZUDcv3WoCr/aor6dQ==", + "deprecated": "This is a stub types definition. expect provides its own type definitions, so you do not need this installed.", + "dev": true, + "dependencies": { + "expect": "*" + } + }, + "node_modules/@types/form-data": { + "version": "0.0.33", + "resolved": "https://registry.npmjs.org/@types/form-data/-/form-data-0.0.33.tgz", + "integrity": "sha1-yayFsqX9GENbjIXZ7LUObWyJP/g=", + "dev": true, + "optional": true, + "peer": true, + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/istanbul-lib-coverage": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.4.tgz", + "integrity": "sha512-z/QT1XN4K4KYuslS23k62yDIDLwLFkzxOuMplDtObz0+y7VqJCaO2o+SPwHCvLFZh7xazvvoor2tA/hPz9ee7g==", + "dev": true + }, + "node_modules/@types/istanbul-lib-report": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz", + "integrity": "sha512-plGgXAPfVKFoYfa9NpYDAkseG+g6Jr294RqeqcqDixSbU34MZVJRi/P+7Y8GDpzkEwLaGZZOpKIEmeVZNtKsrg==", + "dev": true, + "dependencies": { + "@types/istanbul-lib-coverage": "*" + } + }, + "node_modules/@types/istanbul-reports": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@types/istanbul-reports/-/istanbul-reports-3.0.1.tgz", + "integrity": "sha512-c3mAZEuK0lvBp8tmuL74XRKn1+y2dcwOUpH7x4WrF6gk1GIgiluDRgMYQtw2OFcBvAJWlt6ASU3tSqxp0Uu0Aw==", + "dev": true, + "dependencies": { + "@types/istanbul-lib-report": "*" + } + }, + "node_modules/@types/mocha": { + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/@types/mocha/-/mocha-9.1.0.tgz", + "integrity": "sha512-QCWHkbMv4Y5U9oW10Uxbr45qMMSzl4OzijsozynUAgx3kEHUdXB00udx2dWDQ7f2TU2a2uuiFaRZjCe3unPpeg==", + "dev": true + }, + "node_modules/@types/node": { + "version": "17.0.14", + "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.14.tgz", + "integrity": "sha512-SbjLmERksKOGzWzPNuW7fJM7fk3YXVTFiZWB/Hs99gwhk+/dnrQRPBQjPW9aO+fi1tAffi9PrwFvsmOKmDTyng==", + "dev": true + }, + "node_modules/@types/qs": { + "version": "6.9.7", + "resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.9.7.tgz", + "integrity": "sha512-FGa1F62FT09qcrueBA6qYTrJPVDzah9a+493+o2PCXsesWHIn27G98TsSMs3WPNbZIEj4+VJf6saSFpvD+3Zsw==", + "dev": true, + "optional": true, + "peer": true + }, + "node_modules/@types/stack-utils": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@types/stack-utils/-/stack-utils-2.0.1.tgz", + "integrity": "sha512-Hl219/BT5fLAaz6NDkSuhzasy49dwQS/DSdu4MdggFB8zcXv7vflBI3xp7FEmkmdDkBUI2bPUNeMttp2knYdxw==", + "dev": true + }, + "node_modules/@types/yargs": { + "version": "16.0.4", + "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-16.0.4.tgz", + "integrity": "sha512-T8Yc9wt/5LbJyCaLiHPReJa0kApcIgJ7Bn735GjItUfh08Z1pJvu8QZqb9s+mMvKV6WUQRV7K2R46YbjMXTTJw==", + "dev": true, + "dependencies": { + "@types/yargs-parser": "*" + } + }, + "node_modules/@types/yargs-parser": { + "version": "20.2.1", + "resolved": "https://registry.npmjs.org/@types/yargs-parser/-/yargs-parser-20.2.1.tgz", + "integrity": "sha512-7tFImggNeNBVMsn0vLrpn1H1uPrUBdnARPTpZoitY37ZrdJREzf7I16tMrlK3hen349gr1NYh8CmZQa7CTG6Aw==", + "dev": true + }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/asap": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz", + "integrity": "sha1-5QNHYR1+aQlDIIu9r+vLwvuGbUY=", + "dev": true, + "optional": true, + "peer": true + }, + "node_modules/assertion-error": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz", + "integrity": "sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw==", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=", + "dev": true, + "optional": true, + "peer": true + }, + "node_modules/axios": { + "version": "0.25.0", + "resolved": "https://registry.npmjs.org/axios/-/axios-0.25.0.tgz", + "integrity": "sha512-cD8FOb0tRH3uuEe6+evtAbgJtfxr7ly3fQjYcMcuPlgkwVS9xboaVIpcDV+cYQe+yGykgwZCs1pzjntcGa6l5g==", + "dependencies": { + "follow-redirects": "^1.14.7" + } + }, + "node_modules/braces": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", + "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "dev": true, + "dependencies": { + "fill-range": "^7.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/buffer-from": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz", + "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==", + "dev": true, + "optional": true, + "peer": true + }, + "node_modules/call-bind": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz", + "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==", + "dev": true, + "optional": true, + "peer": true, + "dependencies": { + "function-bind": "^1.1.1", + "get-intrinsic": "^1.0.2" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/caseless": { + "version": "0.12.0", + "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", + "integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw=", + "dev": true, + "optional": true, + "peer": true + }, + "node_modules/chai": { + "version": "4.3.6", + "resolved": "https://registry.npmjs.org/chai/-/chai-4.3.6.tgz", + "integrity": "sha512-bbcp3YfHCUzMOvKqsztczerVgBKSsEijCySNlHHbX3VG1nskvqjz5Rfso1gGwD6w6oOV3eI60pKuMOV5MV7p3Q==", + "dev": true, + "dependencies": { + "assertion-error": "^1.1.0", + "check-error": "^1.0.2", + "deep-eql": "^3.0.1", + "get-func-name": "^2.0.0", + "loupe": "^2.3.1", + "pathval": "^1.1.1", + "type-detect": "^4.0.5" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/chalk/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/check-error": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/check-error/-/check-error-1.0.2.tgz", + "integrity": "sha1-V00xLt2Iu13YkS6Sht1sCu1KrII=", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "dev": true, + "optional": true, + "peer": true, + "dependencies": { + "delayed-stream": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/concat-stream": { + "version": "1.6.2", + "resolved": "https://registry.npmjs.org/concat-stream/-/concat-stream-1.6.2.tgz", + "integrity": "sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw==", + "dev": true, + "engines": [ + "node >= 0.8" + ], + "optional": true, + "peer": true, + "dependencies": { + "buffer-from": "^1.0.0", + "inherits": "^2.0.3", + "readable-stream": "^2.2.2", + "typedarray": "^0.0.6" + } + }, + "node_modules/core-util-is": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", + "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==", + "dev": true, + "optional": true, + "peer": true + }, + "node_modules/deep-eql": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-3.0.1.tgz", + "integrity": "sha512-+QeIQyN5ZuO+3Uk5DYh6/1eKO0m0YmJFGNmFHGACpf1ClL1nmlV/p4gNgbl2pJGxgXb4faqo6UE+M5ACEMyVcw==", + "dev": true, + "dependencies": { + "type-detect": "^4.0.0" + }, + "engines": { + "node": ">=0.12" + } + }, + "node_modules/delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=", + "dev": true, + "optional": true, + "peer": true, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/diff-sequences": { + "version": "27.4.0", + "resolved": "https://registry.npmjs.org/diff-sequences/-/diff-sequences-27.4.0.tgz", + "integrity": "sha512-YqiQzkrsmHMH5uuh8OdQFU9/ZpADnwzml8z0O5HvRNda+5UZsaX/xN+AAxfR2hWq1Y7HZnAzO9J5lJXOuDz2Ww==", + "dev": true, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/esbuild": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.13.15.tgz", + "integrity": "sha512-raCxt02HBKv8RJxE8vkTSCXGIyKHdEdGfUmiYb8wnabnaEmHzyW7DCHb5tEN0xU8ryqg5xw54mcwnYkC4x3AIw==", + "dev": true, + "hasInstallScript": true, + "bin": { + "esbuild": "bin/esbuild" + }, + "optionalDependencies": { + "esbuild-android-arm64": "0.13.15", + "esbuild-darwin-64": "0.13.15", + "esbuild-darwin-arm64": "0.13.15", + "esbuild-freebsd-64": "0.13.15", + "esbuild-freebsd-arm64": "0.13.15", + "esbuild-linux-32": "0.13.15", + "esbuild-linux-64": "0.13.15", + "esbuild-linux-arm": "0.13.15", + "esbuild-linux-arm64": "0.13.15", + "esbuild-linux-mips64le": "0.13.15", + "esbuild-linux-ppc64le": "0.13.15", + "esbuild-netbsd-64": "0.13.15", + "esbuild-openbsd-64": "0.13.15", + "esbuild-sunos-64": "0.13.15", + "esbuild-windows-32": "0.13.15", + "esbuild-windows-64": "0.13.15", + "esbuild-windows-arm64": "0.13.15" + } + }, + "node_modules/esbuild-android-arm64": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-android-arm64/-/esbuild-android-arm64-0.13.15.tgz", + "integrity": "sha512-m602nft/XXeO8YQPUDVoHfjyRVPdPgjyyXOxZ44MK/agewFFkPa8tUo6lAzSWh5Ui5PB4KR9UIFTSBKh/RrCmg==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/esbuild-darwin-64": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-darwin-64/-/esbuild-darwin-64-0.13.15.tgz", + "integrity": "sha512-ihOQRGs2yyp7t5bArCwnvn2Atr6X4axqPpEdCFPVp7iUj4cVSdisgvEKdNR7yH3JDjW6aQDw40iQFoTqejqxvQ==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/esbuild-darwin-arm64": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-darwin-arm64/-/esbuild-darwin-arm64-0.13.15.tgz", + "integrity": "sha512-i1FZssTVxUqNlJ6cBTj5YQj4imWy3m49RZRnHhLpefFIh0To05ow9DTrXROTE1urGTQCloFUXTX8QfGJy1P8dQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/esbuild-freebsd-64": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-freebsd-64/-/esbuild-freebsd-64-0.13.15.tgz", + "integrity": "sha512-G3dLBXUI6lC6Z09/x+WtXBXbOYQZ0E8TDBqvn7aMaOCzryJs8LyVXKY4CPnHFXZAbSwkCbqiPuSQ1+HhrNk7EA==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/esbuild-freebsd-arm64": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-freebsd-arm64/-/esbuild-freebsd-arm64-0.13.15.tgz", + "integrity": "sha512-KJx0fzEDf1uhNOZQStV4ujg30WlnwqUASaGSFPhznLM/bbheu9HhqZ6mJJZM32lkyfGJikw0jg7v3S0oAvtvQQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/esbuild-linux-32": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-linux-32/-/esbuild-linux-32-0.13.15.tgz", + "integrity": "sha512-ZvTBPk0YWCLMCXiFmD5EUtB30zIPvC5Itxz0mdTu/xZBbbHJftQgLWY49wEPSn2T/TxahYCRDWun5smRa0Tu+g==", + "cpu": [ + "ia32" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/esbuild-linux-64": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-linux-64/-/esbuild-linux-64-0.13.15.tgz", + "integrity": "sha512-eCKzkNSLywNeQTRBxJRQ0jxRCl2YWdMB3+PkWFo2BBQYC5mISLIVIjThNtn6HUNqua1pnvgP5xX0nHbZbPj5oA==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/esbuild-linux-arm": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-linux-arm/-/esbuild-linux-arm-0.13.15.tgz", + "integrity": "sha512-wUHttDi/ol0tD8ZgUMDH8Ef7IbDX+/UsWJOXaAyTdkT7Yy9ZBqPg8bgB/Dn3CZ9SBpNieozrPRHm0BGww7W/jA==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/esbuild-linux-arm64": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-linux-arm64/-/esbuild-linux-arm64-0.13.15.tgz", + "integrity": "sha512-bYpuUlN6qYU9slzr/ltyLTR9YTBS7qUDymO8SV7kjeNext61OdmqFAzuVZom+OLW1HPHseBfJ/JfdSlx8oTUoA==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/esbuild-linux-mips64le": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-linux-mips64le/-/esbuild-linux-mips64le-0.13.15.tgz", + "integrity": "sha512-KlVjIG828uFPyJkO/8gKwy9RbXhCEUeFsCGOJBepUlpa7G8/SeZgncUEz/tOOUJTcWMTmFMtdd3GElGyAtbSWg==", + "cpu": [ + "mips64el" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/esbuild-linux-ppc64le": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-linux-ppc64le/-/esbuild-linux-ppc64le-0.13.15.tgz", + "integrity": "sha512-h6gYF+OsaqEuBjeesTBtUPw0bmiDu7eAeuc2OEH9S6mV9/jPhPdhOWzdeshb0BskRZxPhxPOjqZ+/OqLcxQwEQ==", + "cpu": [ + "ppc64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/esbuild-netbsd-64": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-netbsd-64/-/esbuild-netbsd-64-0.13.15.tgz", + "integrity": "sha512-3+yE9emwoevLMyvu+iR3rsa+Xwhie7ZEHMGDQ6dkqP/ndFzRHkobHUKTe+NCApSqG5ce2z4rFu+NX/UHnxlh3w==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "netbsd" + ] + }, + "node_modules/esbuild-openbsd-64": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-openbsd-64/-/esbuild-openbsd-64-0.13.15.tgz", + "integrity": "sha512-wTfvtwYJYAFL1fSs8yHIdf5GEE4NkbtbXtjLWjM3Cw8mmQKqsg8kTiqJ9NJQe5NX/5Qlo7Xd9r1yKMMkHllp5g==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "openbsd" + ] + }, + "node_modules/esbuild-sunos-64": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-sunos-64/-/esbuild-sunos-64-0.13.15.tgz", + "integrity": "sha512-lbivT9Bx3t1iWWrSnGyBP9ODriEvWDRiweAs69vI+miJoeKwHWOComSRukttbuzjZ8r1q0mQJ8Z7yUsDJ3hKdw==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "sunos" + ] + }, + "node_modules/esbuild-windows-32": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-windows-32/-/esbuild-windows-32-0.13.15.tgz", + "integrity": "sha512-fDMEf2g3SsJ599MBr50cY5ve5lP1wyVwTe6aLJsM01KtxyKkB4UT+fc5MXQFn3RLrAIAZOG+tHC+yXObpSn7Nw==", + "cpu": [ + "ia32" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/esbuild-windows-64": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-windows-64/-/esbuild-windows-64-0.13.15.tgz", + "integrity": "sha512-9aMsPRGDWCd3bGjUIKG/ZOJPKsiztlxl/Q3C1XDswO6eNX/Jtwu4M+jb6YDH9hRSUflQWX0XKAfWzgy5Wk54JQ==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/esbuild-windows-arm64": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-windows-arm64/-/esbuild-windows-arm64-0.13.15.tgz", + "integrity": "sha512-zzvyCVVpbwQQATaf3IG8mu1IwGEiDxKkYUdA4FpoCHi1KtPa13jeScYDjlW0Qh+ebWzpKfR2ZwvqAQkSWNcKjA==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/expect": { + "version": "27.4.6", + "resolved": "https://registry.npmjs.org/expect/-/expect-27.4.6.tgz", + "integrity": "sha512-1M/0kAALIaj5LaG66sFJTbRsWTADnylly82cu4bspI0nl+pgP4E6Bh/aqdHlTUjul06K7xQnnrAoqfxVU0+/ag==", + "dev": true, + "dependencies": { + "@jest/types": "^27.4.2", + "jest-get-type": "^27.4.0", + "jest-matcher-utils": "^27.4.6", + "jest-message-util": "^27.4.6" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/fill-range": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", + "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "dev": true, + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/follow-redirects": { + "version": "1.14.7", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.14.7.tgz", + "integrity": "sha512-+hbxoLbFMbRKDwohX8GkTataGqO6Jb7jGwpAlwgy2bIz25XtRm7KEzJM76R1WiNT5SwZkX4Y75SwBolkpmE7iQ==", + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/RubenVerborgh" + } + ], + "engines": { + "node": ">=4.0" + }, + "peerDependenciesMeta": { + "debug": { + "optional": true + } + } + }, + "node_modules/form-data": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.1.tgz", + "integrity": "sha512-m21N3WOmEEURgk6B9GLOE4RuWOFf28Lhh9qGYeNlGq4VDXUlJy2th2slBNU8Gp8EzloYZOibZJ7t5ecIrFSjVA==", + "dev": true, + "optional": true, + "peer": true, + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.6", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 0.12" + } + }, + "node_modules/fsevents": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", + "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", + "dev": true, + "hasInstallScript": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/function-bind": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", + "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==", + "dev": true + }, + "node_modules/get-func-name": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.0.tgz", + "integrity": "sha1-6td0q+5y4gQJQzoGY2YCPdaIekE=", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/get-intrinsic": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.1.tgz", + "integrity": "sha512-kWZrnVM42QCiEA2Ig1bG8zjoIMOgxWwYCEeNdwY6Tv/cOSeGpcoX4pXHfKUxNKVoArnrEr2e9srnAxxGIraS9Q==", + "dev": true, + "optional": true, + "peer": true, + "dependencies": { + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.1" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-port": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/get-port/-/get-port-3.2.0.tgz", + "integrity": "sha1-3Xzn3hh8Bsi/NTeWrHHgmfCYDrw=", + "dev": true, + "optional": true, + "peer": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/graceful-fs": { + "version": "4.2.9", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.9.tgz", + "integrity": "sha512-NtNxqUcXgpW2iMrfqSfR73Glt39K+BLwWsPs94yR63v45T0Wbej7eRmL5cWfwEgqXnmjQp3zaJTshdRW/qC2ZQ==", + "dev": true + }, + "node_modules/happy-dom": { + "version": "2.31.1", + "resolved": "https://registry.npmjs.org/happy-dom/-/happy-dom-2.31.1.tgz", + "integrity": "sha512-hbTLxMqyluLT06nRN4TDGLjjKni73tZlvLdF6qGfdv5U4EnrSYSwcZK3ESmv0LEEa5St7NY7e62rhISotH8O3Q==", + "dev": true, + "optional": true, + "peer": true, + "dependencies": { + "he": "^1.1.1", + "node-fetch": "^2.6.1", + "sync-request": "^6.1.0", + "webidl-conversions": "^7.0.0", + "whatwg-encoding": "^1.0.5", + "whatwg-mimetype": "^2.3.0" + } + }, + "node_modules/happy-dom/node_modules/webidl-conversions": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-7.0.0.tgz", + "integrity": "sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==", + "dev": true, + "optional": true, + "peer": true, + "engines": { + "node": ">=12" + } + }, + "node_modules/has": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", + "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "dev": true, + "dependencies": { + "function-bind": "^1.1.1" + }, + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/has-symbols": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.2.tgz", + "integrity": "sha512-chXa79rL/UC2KlX17jo3vRGz0azaWEx5tGqZg5pO3NUyEJVB17dMruQlzCCOfUvElghKcm5194+BCRvi2Rv/Gw==", + "dev": true, + "optional": true, + "peer": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/he": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz", + "integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==", + "dev": true, + "optional": true, + "peer": true, + "bin": { + "he": "bin/he" + } + }, + "node_modules/http-basic": { + "version": "8.1.3", + "resolved": "https://registry.npmjs.org/http-basic/-/http-basic-8.1.3.tgz", + "integrity": "sha512-/EcDMwJZh3mABI2NhGfHOGOeOZITqfkEO4p/xK+l3NpyncIHUQBoMvCSF/b5GqvKtySC2srL/GGG3+EtlqlmCw==", + "dev": true, + "optional": true, + "peer": true, + "dependencies": { + "caseless": "^0.12.0", + "concat-stream": "^1.6.2", + "http-response-object": "^3.0.1", + "parse-cache-control": "^1.0.1" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/http-response-object": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/http-response-object/-/http-response-object-3.0.2.tgz", + "integrity": "sha512-bqX0XTF6fnXSQcEJ2Iuyr75yVakyjIDCqroJQ/aHfSdlM743Cwqoi2nDYMzLGWUcuTWGWy8AAvOKXTfiv6q9RA==", + "dev": true, + "optional": true, + "peer": true, + "dependencies": { + "@types/node": "^10.0.3" + } + }, + "node_modules/http-response-object/node_modules/@types/node": { + "version": "10.17.60", + "resolved": "https://registry.npmjs.org/@types/node/-/node-10.17.60.tgz", + "integrity": "sha512-F0KIgDJfy2nA3zMLmWGKxcH2ZVEtCZXHHdOQs2gSaQ27+lNeEfGxzkIw90aXswATX7AZ33tahPbzy6KAfUreVw==", + "dev": true, + "optional": true, + "peer": true + }, + "node_modules/iconv-lite": { + "version": "0.4.24", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", + "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", + "dev": true, + "optional": true, + "peer": true, + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true, + "optional": true, + "peer": true + }, + "node_modules/is-core-module": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.8.1.tgz", + "integrity": "sha512-SdNCUs284hr40hFTFP6l0IfZ/RSrMXF3qgoRHd3/79unUTvrFO/JoXwkGm+5J/Oe3E/b5GsnG330uUNgRpu1PA==", + "dev": true, + "dependencies": { + "has": "^1.0.3" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=", + "dev": true, + "optional": true, + "peer": true + }, + "node_modules/jest-diff": { + "version": "27.4.6", + "resolved": "https://registry.npmjs.org/jest-diff/-/jest-diff-27.4.6.tgz", + "integrity": "sha512-zjaB0sh0Lb13VyPsd92V7HkqF6yKRH9vm33rwBt7rPYrpQvS1nCvlIy2pICbKta+ZjWngYLNn4cCK4nyZkjS/w==", + "dev": true, + "dependencies": { + "chalk": "^4.0.0", + "diff-sequences": "^27.4.0", + "jest-get-type": "^27.4.0", + "pretty-format": "^27.4.6" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/jest-get-type": { + "version": "27.4.0", + "resolved": "https://registry.npmjs.org/jest-get-type/-/jest-get-type-27.4.0.tgz", + "integrity": "sha512-tk9o+ld5TWq41DkK14L4wox4s2D9MtTpKaAVzXfr5CUKm5ZK2ExcaFE0qls2W71zE/6R2TxxrK9w2r6svAFDBQ==", + "dev": true, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/jest-matcher-utils": { + "version": "27.4.6", + "resolved": "https://registry.npmjs.org/jest-matcher-utils/-/jest-matcher-utils-27.4.6.tgz", + "integrity": "sha512-XD4PKT3Wn1LQnRAq7ZsTI0VRuEc9OrCPFiO1XL7bftTGmfNF0DcEwMHRgqiu7NGf8ZoZDREpGrCniDkjt79WbA==", + "dev": true, + "dependencies": { + "chalk": "^4.0.0", + "jest-diff": "^27.4.6", + "jest-get-type": "^27.4.0", + "pretty-format": "^27.4.6" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/jest-message-util": { + "version": "27.4.6", + "resolved": "https://registry.npmjs.org/jest-message-util/-/jest-message-util-27.4.6.tgz", + "integrity": "sha512-0p5szriFU0U74czRSFjH6RyS7UYIAkn/ntwMuOwTGWrQIOh5NzXXrq72LOqIkJKKvFbPq+byZKuBz78fjBERBA==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.12.13", + "@jest/types": "^27.4.2", + "@types/stack-utils": "^2.0.0", + "chalk": "^4.0.0", + "graceful-fs": "^4.2.4", + "micromatch": "^4.0.4", + "pretty-format": "^27.4.6", + "slash": "^3.0.0", + "stack-utils": "^2.0.3" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "dev": true + }, + "node_modules/local-pkg": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/local-pkg/-/local-pkg-0.4.1.tgz", + "integrity": "sha512-lL87ytIGP2FU5PWwNDo0w3WhIo2gopIAxPg9RxDYF7m4rr5ahuZxP22xnJHIvaLTe4Z9P6uKKY2UHiwyB4pcrw==", + "dev": true, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/antfu" + } + }, + "node_modules/loupe": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/loupe/-/loupe-2.3.2.tgz", + "integrity": "sha512-QgVamnvj0jX1LMPlCAq0MK6hATORFtGqHoUKXTkwNe13BqlN6aePQCKnnTcFvdDYEEITcJ+gBl4mTW7YJtJbyQ==", + "dev": true, + "dependencies": { + "get-func-name": "^2.0.0" + } + }, + "node_modules/micromatch": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.4.tgz", + "integrity": "sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==", + "dev": true, + "dependencies": { + "braces": "^3.0.1", + "picomatch": "^2.2.3" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/mime-db": { + "version": "1.51.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.51.0.tgz", + "integrity": "sha512-5y8A56jg7XVQx2mbv1lu49NR4dokRnhZYTtL+KGfaa27uq4pSTXkwQkFJl4pkRMyNFz/EtYDSkiiEHx3F7UN6g==", + "dev": true, + "optional": true, + "peer": true, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.34", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.34.tgz", + "integrity": "sha512-6cP692WwGIs9XXdOO4++N+7qjqv0rqxxVvJ3VHPh/Sc9mVZcQP+ZGhkKiTvWMQRr2tbHkJP/Yn7Y0npb3ZBs4A==", + "dev": true, + "optional": true, + "peer": true, + "dependencies": { + "mime-db": "1.51.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/nanoid": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.2.0.tgz", + "integrity": "sha512-fmsZYa9lpn69Ad5eDn7FMcnnSR+8R34W9qJEijxYhTbfOWzr22n1QxCMzXLK+ODyW2973V3Fux959iQoUxzUIA==", + "dev": true, + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/node-fetch": { + "version": "2.6.7", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz", + "integrity": "sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==", + "dev": true, + "optional": true, + "peer": true, + "dependencies": { + "whatwg-url": "^5.0.0" + }, + "engines": { + "node": "4.x || >=6.0.0" + }, + "peerDependencies": { + "encoding": "^0.1.0" + }, + "peerDependenciesMeta": { + "encoding": { + "optional": true + } + } + }, + "node_modules/object-inspect": { + "version": "1.12.0", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.0.tgz", + "integrity": "sha512-Ho2z80bVIvJloH+YzRmpZVQe87+qASmBUKZDWgx9cu+KDrX2ZDH/3tMy+gXbZETVGs2M8YdxObOh7XAtim9Y0g==", + "dev": true, + "optional": true, + "peer": true, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/parse-cache-control": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/parse-cache-control/-/parse-cache-control-1.0.1.tgz", + "integrity": "sha1-juqz5U+laSD+Fro493+iGqzC104=", + "dev": true, + "optional": true, + "peer": true + }, + "node_modules/path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "dev": true + }, + "node_modules/pathval": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/pathval/-/pathval-1.1.1.tgz", + "integrity": "sha512-Dp6zGqpTdETdR63lehJYPeIOqpiNBNtc7BpWSLrOje7UaIsE5aY92r/AunQA7rsXvet3lrJ3JnZX29UPTKXyKQ==", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/picocolors": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz", + "integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==", + "dev": true + }, + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true, + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/postcss": { + "version": "8.4.6", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.6.tgz", + "integrity": "sha512-OovjwIzs9Te46vlEx7+uXB0PLijpwjXGKXjVGGPIGubGpq7uh5Xgf6D6FiJ/SzJMBosHDp6a2hiXOS97iBXcaA==", + "dev": true, + "dependencies": { + "nanoid": "^3.2.0", + "picocolors": "^1.0.0", + "source-map-js": "^1.0.2" + }, + "engines": { + "node": "^10 || ^12 || >=14" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + } + }, + "node_modules/pretty-format": { + "version": "27.4.6", + "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-27.4.6.tgz", + "integrity": "sha512-NblstegA1y/RJW2VyML+3LlpFjzx62cUrtBIKIWDXEDkjNeleA7Od7nrzcs/VLQvAeV4CgSYhrN39DRN88Qi/g==", + "dev": true, + "dependencies": { + "ansi-regex": "^5.0.1", + "ansi-styles": "^5.0.0", + "react-is": "^17.0.1" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/pretty-format/node_modules/ansi-styles": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", + "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/process-nextick-args": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", + "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==", + "dev": true, + "optional": true, + "peer": true + }, + "node_modules/promise": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/promise/-/promise-8.1.0.tgz", + "integrity": "sha512-W04AqnILOL/sPRXziNicCjSNRruLAuIHEOVBazepu0545DDNGYHz7ar9ZgZ1fMU8/MA4mVxp5rkBWRi6OXIy3Q==", + "dev": true, + "optional": true, + "peer": true, + "dependencies": { + "asap": "~2.0.6" + } + }, + "node_modules/qs": { + "version": "6.10.3", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.10.3.tgz", + "integrity": "sha512-wr7M2E0OFRfIfJZjKGieI8lBKb7fRCH4Fv5KNPEs7gJ8jadvotdsS08PzOKR7opXhZ/Xkjtt3WF9g38drmyRqQ==", + "dev": true, + "optional": true, + "peer": true, + "dependencies": { + "side-channel": "^1.0.4" + }, + "engines": { + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/react-is": { + "version": "17.0.2", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-17.0.2.tgz", + "integrity": "sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w==", + "dev": true + }, + "node_modules/readable-stream": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "dev": true, + "optional": true, + "peer": true, + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "node_modules/readable-stream/node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "dev": true, + "optional": true, + "peer": true + }, + "node_modules/resolve": { + "version": "1.22.0", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.0.tgz", + "integrity": "sha512-Hhtrw0nLeSrFQ7phPp4OOcVjLPIeMnRlr5mcnVuMe7M/7eBn98A3hmFRLoFo3DLZkivSYwhRUJTyPyWAk56WLw==", + "dev": true, + "dependencies": { + "is-core-module": "^2.8.1", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/rollup": { + "version": "2.67.0", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-2.67.0.tgz", + "integrity": "sha512-W83AaERwvDiHwHEF/dfAfS3z1Be5wf7n+pO3ZAO5IQadCT2lBTr7WQ2MwZZe+nodbD+n3HtC4OCOAdsOPPcKZQ==", + "dev": true, + "bin": { + "rollup": "dist/bin/rollup" + }, + "engines": { + "node": ">=10.0.0" + }, + "optionalDependencies": { + "fsevents": "~2.3.2" + } + }, + "node_modules/safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", + "dev": true, + "optional": true, + "peer": true + }, + "node_modules/side-channel": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", + "integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==", + "dev": true, + "optional": true, + "peer": true, + "dependencies": { + "call-bind": "^1.0.0", + "get-intrinsic": "^1.0.2", + "object-inspect": "^1.9.0" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/source-map-js": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.0.2.tgz", + "integrity": "sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/stack-utils": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/stack-utils/-/stack-utils-2.0.5.tgz", + "integrity": "sha512-xrQcmYhOsn/1kX+Vraq+7j4oE2j/6BFscZ0etmYg81xuM8Gq0022Pxb8+IqgOFUIaxHs0KaSb7T1+OegiNrNFA==", + "dev": true, + "dependencies": { + "escape-string-regexp": "^2.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/stack-utils/node_modules/escape-string-regexp": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz", + "integrity": "sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dev": true, + "optional": true, + "peer": true, + "dependencies": { + "safe-buffer": "~5.1.0" + } + }, + "node_modules/string_decoder/node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "dev": true, + "optional": true, + "peer": true + }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/sync-request": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/sync-request/-/sync-request-6.1.0.tgz", + "integrity": "sha512-8fjNkrNlNCrVc/av+Jn+xxqfCjYaBoHqCsDz6mt030UMxJGr+GSfCV1dQt2gRtlL63+VPidwDVLr7V2OcTSdRw==", + "dev": true, + "optional": true, + "peer": true, + "dependencies": { + "http-response-object": "^3.0.1", + "sync-rpc": "^1.2.1", + "then-request": "^6.0.0" + }, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/sync-rpc": { + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/sync-rpc/-/sync-rpc-1.3.6.tgz", + "integrity": "sha512-J8jTXuZzRlvU7HemDgHi3pGnh/rkoqR/OZSjhTyyZrEkkYQbk7Z33AXp37mkPfPpfdOuj7Ex3H/TJM1z48uPQw==", + "dev": true, + "optional": true, + "peer": true, + "dependencies": { + "get-port": "^3.1.0" + } + }, + "node_modules/then-request": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/then-request/-/then-request-6.0.2.tgz", + "integrity": "sha512-3ZBiG7JvP3wbDzA9iNY5zJQcHL4jn/0BWtXIkagfz7QgOL/LqjCEOBQuJNZfu0XYnv5JhKh+cDxCPM4ILrqruA==", + "dev": true, + "optional": true, + "peer": true, + "dependencies": { + "@types/concat-stream": "^1.6.0", + "@types/form-data": "0.0.33", + "@types/node": "^8.0.0", + "@types/qs": "^6.2.31", + "caseless": "~0.12.0", + "concat-stream": "^1.6.0", + "form-data": "^2.2.0", + "http-basic": "^8.1.1", + "http-response-object": "^3.0.1", + "promise": "^8.0.0", + "qs": "^6.4.0" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/then-request/node_modules/@types/node": { + "version": "8.10.66", + "resolved": "https://registry.npmjs.org/@types/node/-/node-8.10.66.tgz", + "integrity": "sha512-tktOkFUA4kXx2hhhrB8bIFb5TbwzS4uOhKEmwiD+NoiL0qtP2OQ9mFldbgD4dV1djrlBYP6eBuQZiWjuHUpqFw==", + "dev": true, + "optional": true, + "peer": true + }, + "node_modules/tinypool": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/tinypool/-/tinypool-0.1.1.tgz", + "integrity": "sha512-sW2fQZ2BRb/GX5v55NkHiTrbMLx0eX0xNpP+VGhOe2f7Oo04+LeClDyM19zCE/WCy7jJ8kzIJ0Ojrxj3UhN9Sg==", + "dev": true, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/tinyspy": { + "version": "0.2.10", + "resolved": "https://registry.npmjs.org/tinyspy/-/tinyspy-0.2.10.tgz", + "integrity": "sha512-Qij6rGWCDjWIejxCXXVi6bNgvrYBp3PbqC4cBP/0fD6WHDOHCw09Zd13CsxrDqSR5PFq01WeqDws8t5lz5sH0A==", + "dev": true, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/tr46": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha1-gYT9NH2snNwYWZLzpmIuFLnZq2o=", + "dev": true, + "optional": true, + "peer": true + }, + "node_modules/type-detect": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", + "integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/typedarray": { + "version": "0.0.6", + "resolved": "https://registry.npmjs.org/typedarray/-/typedarray-0.0.6.tgz", + "integrity": "sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c=", + "dev": true, + "optional": true, + "peer": true + }, + "node_modules/typescript": { + "version": "4.5.5", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.5.5.tgz", + "integrity": "sha512-TCTIul70LyWe6IJWT8QSYeA54WQe8EjQFU4wY52Fasj5UKx88LNYKCgBEHcOMOrFF1rKGbD8v/xcNWVUq9SymA==", + "dev": true, + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=4.2.0" + } + }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=", + "dev": true, + "optional": true, + "peer": true + }, + "node_modules/vite": { + "version": "2.7.13", + "resolved": "https://registry.npmjs.org/vite/-/vite-2.7.13.tgz", + "integrity": "sha512-Mq8et7f3aK0SgSxjDNfOAimZGW9XryfHRa/uV0jseQSilg+KhYDSoNb9h1rknOy6SuMkvNDLKCYAYYUMCE+IgQ==", + "dev": true, + "dependencies": { + "esbuild": "^0.13.12", + "postcss": "^8.4.5", + "resolve": "^1.20.0", + "rollup": "^2.59.0" + }, + "bin": { + "vite": "bin/vite.js" + }, + "engines": { + "node": ">=12.2.0" + }, + "optionalDependencies": { + "fsevents": "~2.3.2" + }, + "peerDependencies": { + "less": "*", + "sass": "*", + "stylus": "*" + }, + "peerDependenciesMeta": { + "less": { + "optional": true + }, + "sass": { + "optional": true + }, + "stylus": { + "optional": true + } + } + }, + "node_modules/vitest": { + "version": "0.2.5", + "resolved": "https://registry.npmjs.org/vitest/-/vitest-0.2.5.tgz", + "integrity": "sha512-QruEhsNxy8ycLxYG9rrGUfHZzJ8A6YvA9ULZ4w/ecvm0Zejm1nxUar/XkRWkL2xzrqA5AjmfqDSQZ8q2bFbA0Q==", + "dev": true, + "dependencies": { + "@types/chai": "^4.3.0", + "@types/chai-subset": "^1.3.3", + "chai": "^4.3.6", + "local-pkg": "^0.4.1", + "tinypool": "^0.1.1", + "tinyspy": "^0.2.10", + "vite": ">=2.7.13" + }, + "bin": { + "vitest": "vitest.mjs" + }, + "engines": { + "node": ">=14.14.0" + }, + "funding": { + "url": "https://github.com/sponsors/antfu" + }, + "peerDependencies": { + "@vitest/ui": "*", + "c8": "*", + "happy-dom": "*", + "jsdom": "*" + }, + "peerDependenciesMeta": { + "@vitest/ui": { + "optional": true + }, + "c8": { + "optional": true + }, + "happy-dom": { + "optional": true + }, + "jsdom": { + "optional": true + } + } + }, + "node_modules/webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha1-JFNCdeKnvGvnvIZhHMFq4KVlSHE=", + "dev": true, + "optional": true, + "peer": true + }, + "node_modules/whatwg-encoding": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/whatwg-encoding/-/whatwg-encoding-1.0.5.tgz", + "integrity": "sha512-b5lim54JOPN9HtzvK9HFXvBma/rnfFeqsic0hSpjtDbVxR3dJKLc+KB4V6GgiGOvl7CY/KNh8rxSo9DKQrnUEw==", + "dev": true, + "optional": true, + "peer": true, + "dependencies": { + "iconv-lite": "0.4.24" + } + }, + "node_modules/whatwg-mimetype": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/whatwg-mimetype/-/whatwg-mimetype-2.3.0.tgz", + "integrity": "sha512-M4yMwr6mAnQz76TbJm914+gPpB/nCwvZbJU28cUD6dR004SAxDLOOSUaB1JDRqLtaOV/vi0IC5lEAGFgrjGv/g==", + "dev": true, + "optional": true, + "peer": true + }, + "node_modules/whatwg-url": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha1-lmRU6HZUYuN2RNNib2dCzotwll0=", + "dev": true, + "optional": true, + "peer": true, + "dependencies": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" + } + } + }, + "dependencies": { + "@babel/code-frame": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.16.7.tgz", + "integrity": "sha512-iAXqUn8IIeBTNd72xsFlgaXHkMBMt6y4HJp1tIaK465CWLT/fG1aqB7ykr95gHHmlBdGbFeWWfyB4NJJ0nmeIg==", + "dev": true, + "requires": { + "@babel/highlight": "^7.16.7" + } + }, + "@babel/helper-validator-identifier": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.16.7.tgz", + "integrity": "sha512-hsEnFemeiW4D08A5gUAZxLBTXpZ39P+a+DGDsHw1yxqyQ/jzFEnxf5uTEGp+3bzAbNOxU1paTgYS4ECU/IgfDw==", + "dev": true + }, + "@babel/highlight": { + "version": "7.16.10", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.16.10.tgz", + "integrity": "sha512-5FnTQLSLswEj6IkgVw5KusNUUFY9ZGqe/TRFnP/BKYHYgfh7tc+C7mwiy95/yNP7Dh9x580Vv8r7u7ZfTBFxdw==", + "dev": true, + "requires": { + "@babel/helper-validator-identifier": "^7.16.7", + "chalk": "^2.0.0", + "js-tokens": "^4.0.0" + }, + "dependencies": { + "ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dev": true, + "requires": { + "color-convert": "^1.9.0" + } + }, + "chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dev": true, + "requires": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + } + }, + "color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dev": true, + "requires": { + "color-name": "1.1.3" + } + }, + "color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=", + "dev": true + }, + "escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=", + "dev": true + }, + "has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=", + "dev": true + }, + "supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dev": true, + "requires": { + "has-flag": "^3.0.0" + } + } + } + }, + "@jest/types": { + "version": "27.4.2", + "resolved": "https://registry.npmjs.org/@jest/types/-/types-27.4.2.tgz", + "integrity": "sha512-j35yw0PMTPpZsUoOBiuHzr1zTYoad1cVIE0ajEjcrJONxxrko/IRGKkXx3os0Nsi4Hu3+5VmDbVfq5WhG/pWAg==", + "dev": true, + "requires": { + "@types/istanbul-lib-coverage": "^2.0.0", + "@types/istanbul-reports": "^3.0.0", + "@types/node": "*", + "@types/yargs": "^16.0.0", + "chalk": "^4.0.0" + } + }, + "@types/chai": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/@types/chai/-/chai-4.3.0.tgz", + "integrity": "sha512-/ceqdqeRraGolFTcfoXNiqjyQhZzbINDngeoAq9GoHa8PPK1yNzTaxWjA6BFWp5Ua9JpXEMSS4s5i9tS0hOJtw==", + "dev": true + }, + "@types/chai-subset": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/@types/chai-subset/-/chai-subset-1.3.3.tgz", + "integrity": "sha512-frBecisrNGz+F4T6bcc+NLeolfiojh5FxW2klu669+8BARtyQv2C/GkNW6FUodVe4BroGMP/wER/YDGc7rEllw==", + "dev": true, + "requires": { + "@types/chai": "*" + } + }, + "@types/concat-stream": { + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/@types/concat-stream/-/concat-stream-1.6.1.tgz", + "integrity": "sha512-eHE4cQPoj6ngxBZMvVf6Hw7Mh4jMW4U9lpGmS5GBPB9RYxlFg+CHaVN7ErNY4W9XfLIEn20b4VDYaIrbq0q4uA==", + "dev": true, + "optional": true, + "peer": true, + "requires": { + "@types/node": "*" + } + }, + "@types/expect": { + "version": "24.3.0", + "resolved": "https://registry.npmjs.org/@types/expect/-/expect-24.3.0.tgz", + "integrity": "sha512-aq5Z+YFBz5o2b6Sp1jigx5nsmoZMK5Ceurjwy6PZmRv7dEi1jLtkARfvB1ME+OXJUG+7TZUDcv3WoCr/aor6dQ==", + "dev": true, + "requires": { + "expect": "*" + } + }, + "@types/form-data": { + "version": "0.0.33", + "resolved": "https://registry.npmjs.org/@types/form-data/-/form-data-0.0.33.tgz", + "integrity": "sha1-yayFsqX9GENbjIXZ7LUObWyJP/g=", + "dev": true, + "optional": true, + "peer": true, + "requires": { + "@types/node": "*" + } + }, + "@types/istanbul-lib-coverage": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.4.tgz", + "integrity": "sha512-z/QT1XN4K4KYuslS23k62yDIDLwLFkzxOuMplDtObz0+y7VqJCaO2o+SPwHCvLFZh7xazvvoor2tA/hPz9ee7g==", + "dev": true + }, + "@types/istanbul-lib-report": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz", + "integrity": "sha512-plGgXAPfVKFoYfa9NpYDAkseG+g6Jr294RqeqcqDixSbU34MZVJRi/P+7Y8GDpzkEwLaGZZOpKIEmeVZNtKsrg==", + "dev": true, + "requires": { + "@types/istanbul-lib-coverage": "*" + } + }, + "@types/istanbul-reports": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@types/istanbul-reports/-/istanbul-reports-3.0.1.tgz", + "integrity": "sha512-c3mAZEuK0lvBp8tmuL74XRKn1+y2dcwOUpH7x4WrF6gk1GIgiluDRgMYQtw2OFcBvAJWlt6ASU3tSqxp0Uu0Aw==", + "dev": true, + "requires": { + "@types/istanbul-lib-report": "*" + } + }, + "@types/mocha": { + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/@types/mocha/-/mocha-9.1.0.tgz", + "integrity": "sha512-QCWHkbMv4Y5U9oW10Uxbr45qMMSzl4OzijsozynUAgx3kEHUdXB00udx2dWDQ7f2TU2a2uuiFaRZjCe3unPpeg==", + "dev": true + }, + "@types/node": { + "version": "17.0.14", + "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.14.tgz", + "integrity": "sha512-SbjLmERksKOGzWzPNuW7fJM7fk3YXVTFiZWB/Hs99gwhk+/dnrQRPBQjPW9aO+fi1tAffi9PrwFvsmOKmDTyng==", + "dev": true + }, + "@types/qs": { + "version": "6.9.7", + "resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.9.7.tgz", + "integrity": "sha512-FGa1F62FT09qcrueBA6qYTrJPVDzah9a+493+o2PCXsesWHIn27G98TsSMs3WPNbZIEj4+VJf6saSFpvD+3Zsw==", + "dev": true, + "optional": true, + "peer": true + }, + "@types/stack-utils": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@types/stack-utils/-/stack-utils-2.0.1.tgz", + "integrity": "sha512-Hl219/BT5fLAaz6NDkSuhzasy49dwQS/DSdu4MdggFB8zcXv7vflBI3xp7FEmkmdDkBUI2bPUNeMttp2knYdxw==", + "dev": true + }, + "@types/yargs": { + "version": "16.0.4", + "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-16.0.4.tgz", + "integrity": "sha512-T8Yc9wt/5LbJyCaLiHPReJa0kApcIgJ7Bn735GjItUfh08Z1pJvu8QZqb9s+mMvKV6WUQRV7K2R46YbjMXTTJw==", + "dev": true, + "requires": { + "@types/yargs-parser": "*" + } + }, + "@types/yargs-parser": { + "version": "20.2.1", + "resolved": "https://registry.npmjs.org/@types/yargs-parser/-/yargs-parser-20.2.1.tgz", + "integrity": "sha512-7tFImggNeNBVMsn0vLrpn1H1uPrUBdnARPTpZoitY37ZrdJREzf7I16tMrlK3hen349gr1NYh8CmZQa7CTG6Aw==", + "dev": true + }, + "ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true + }, + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "asap": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz", + "integrity": "sha1-5QNHYR1+aQlDIIu9r+vLwvuGbUY=", + "dev": true, + "optional": true, + "peer": true + }, + "assertion-error": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz", + "integrity": "sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw==", + "dev": true + }, + "asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=", + "dev": true, + "optional": true, + "peer": true + }, + "axios": { + "version": "0.25.0", + "resolved": "https://registry.npmjs.org/axios/-/axios-0.25.0.tgz", + "integrity": "sha512-cD8FOb0tRH3uuEe6+evtAbgJtfxr7ly3fQjYcMcuPlgkwVS9xboaVIpcDV+cYQe+yGykgwZCs1pzjntcGa6l5g==", + "requires": { + "follow-redirects": "^1.14.7" + } + }, + "braces": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", + "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "dev": true, + "requires": { + "fill-range": "^7.0.1" + } + }, + "buffer-from": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz", + "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==", + "dev": true, + "optional": true, + "peer": true + }, + "call-bind": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz", + "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==", + "dev": true, + "optional": true, + "peer": true, + "requires": { + "function-bind": "^1.1.1", + "get-intrinsic": "^1.0.2" + } + }, + "caseless": { + "version": "0.12.0", + "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", + "integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw=", + "dev": true, + "optional": true, + "peer": true + }, + "chai": { + "version": "4.3.6", + "resolved": "https://registry.npmjs.org/chai/-/chai-4.3.6.tgz", + "integrity": "sha512-bbcp3YfHCUzMOvKqsztczerVgBKSsEijCySNlHHbX3VG1nskvqjz5Rfso1gGwD6w6oOV3eI60pKuMOV5MV7p3Q==", + "dev": true, + "requires": { + "assertion-error": "^1.1.0", + "check-error": "^1.0.2", + "deep-eql": "^3.0.1", + "get-func-name": "^2.0.0", + "loupe": "^2.3.1", + "pathval": "^1.1.1", + "type-detect": "^4.0.5" + } + }, + "chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "requires": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "dependencies": { + "supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + } + } + }, + "check-error": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/check-error/-/check-error-1.0.2.tgz", + "integrity": "sha1-V00xLt2Iu13YkS6Sht1sCu1KrII=", + "dev": true + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "dev": true, + "optional": true, + "peer": true, + "requires": { + "delayed-stream": "~1.0.0" + } + }, + "concat-stream": { + "version": "1.6.2", + "resolved": "https://registry.npmjs.org/concat-stream/-/concat-stream-1.6.2.tgz", + "integrity": "sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw==", + "dev": true, + "optional": true, + "peer": true, + "requires": { + "buffer-from": "^1.0.0", + "inherits": "^2.0.3", + "readable-stream": "^2.2.2", + "typedarray": "^0.0.6" + } + }, + "core-util-is": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", + "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==", + "dev": true, + "optional": true, + "peer": true + }, + "deep-eql": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-3.0.1.tgz", + "integrity": "sha512-+QeIQyN5ZuO+3Uk5DYh6/1eKO0m0YmJFGNmFHGACpf1ClL1nmlV/p4gNgbl2pJGxgXb4faqo6UE+M5ACEMyVcw==", + "dev": true, + "requires": { + "type-detect": "^4.0.0" + } + }, + "delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=", + "dev": true, + "optional": true, + "peer": true + }, + "diff-sequences": { + "version": "27.4.0", + "resolved": "https://registry.npmjs.org/diff-sequences/-/diff-sequences-27.4.0.tgz", + "integrity": "sha512-YqiQzkrsmHMH5uuh8OdQFU9/ZpADnwzml8z0O5HvRNda+5UZsaX/xN+AAxfR2hWq1Y7HZnAzO9J5lJXOuDz2Ww==", + "dev": true + }, + "esbuild": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.13.15.tgz", + "integrity": "sha512-raCxt02HBKv8RJxE8vkTSCXGIyKHdEdGfUmiYb8wnabnaEmHzyW7DCHb5tEN0xU8ryqg5xw54mcwnYkC4x3AIw==", + "dev": true, + "requires": { + "esbuild-android-arm64": "0.13.15", + "esbuild-darwin-64": "0.13.15", + "esbuild-darwin-arm64": "0.13.15", + "esbuild-freebsd-64": "0.13.15", + "esbuild-freebsd-arm64": "0.13.15", + "esbuild-linux-32": "0.13.15", + "esbuild-linux-64": "0.13.15", + "esbuild-linux-arm": "0.13.15", + "esbuild-linux-arm64": "0.13.15", + "esbuild-linux-mips64le": "0.13.15", + "esbuild-linux-ppc64le": "0.13.15", + "esbuild-netbsd-64": "0.13.15", + "esbuild-openbsd-64": "0.13.15", + "esbuild-sunos-64": "0.13.15", + "esbuild-windows-32": "0.13.15", + "esbuild-windows-64": "0.13.15", + "esbuild-windows-arm64": "0.13.15" + } + }, + "esbuild-android-arm64": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-android-arm64/-/esbuild-android-arm64-0.13.15.tgz", + "integrity": "sha512-m602nft/XXeO8YQPUDVoHfjyRVPdPgjyyXOxZ44MK/agewFFkPa8tUo6lAzSWh5Ui5PB4KR9UIFTSBKh/RrCmg==", + "dev": true, + "optional": true + }, + "esbuild-darwin-64": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-darwin-64/-/esbuild-darwin-64-0.13.15.tgz", + "integrity": "sha512-ihOQRGs2yyp7t5bArCwnvn2Atr6X4axqPpEdCFPVp7iUj4cVSdisgvEKdNR7yH3JDjW6aQDw40iQFoTqejqxvQ==", + "dev": true, + "optional": true + }, + "esbuild-darwin-arm64": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-darwin-arm64/-/esbuild-darwin-arm64-0.13.15.tgz", + "integrity": "sha512-i1FZssTVxUqNlJ6cBTj5YQj4imWy3m49RZRnHhLpefFIh0To05ow9DTrXROTE1urGTQCloFUXTX8QfGJy1P8dQ==", + "dev": true, + "optional": true + }, + "esbuild-freebsd-64": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-freebsd-64/-/esbuild-freebsd-64-0.13.15.tgz", + "integrity": "sha512-G3dLBXUI6lC6Z09/x+WtXBXbOYQZ0E8TDBqvn7aMaOCzryJs8LyVXKY4CPnHFXZAbSwkCbqiPuSQ1+HhrNk7EA==", + "dev": true, + "optional": true + }, + "esbuild-freebsd-arm64": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-freebsd-arm64/-/esbuild-freebsd-arm64-0.13.15.tgz", + "integrity": "sha512-KJx0fzEDf1uhNOZQStV4ujg30WlnwqUASaGSFPhznLM/bbheu9HhqZ6mJJZM32lkyfGJikw0jg7v3S0oAvtvQQ==", + "dev": true, + "optional": true + }, + "esbuild-linux-32": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-linux-32/-/esbuild-linux-32-0.13.15.tgz", + "integrity": "sha512-ZvTBPk0YWCLMCXiFmD5EUtB30zIPvC5Itxz0mdTu/xZBbbHJftQgLWY49wEPSn2T/TxahYCRDWun5smRa0Tu+g==", + "dev": true, + "optional": true + }, + "esbuild-linux-64": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-linux-64/-/esbuild-linux-64-0.13.15.tgz", + "integrity": "sha512-eCKzkNSLywNeQTRBxJRQ0jxRCl2YWdMB3+PkWFo2BBQYC5mISLIVIjThNtn6HUNqua1pnvgP5xX0nHbZbPj5oA==", + "dev": true, + "optional": true + }, + "esbuild-linux-arm": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-linux-arm/-/esbuild-linux-arm-0.13.15.tgz", + "integrity": "sha512-wUHttDi/ol0tD8ZgUMDH8Ef7IbDX+/UsWJOXaAyTdkT7Yy9ZBqPg8bgB/Dn3CZ9SBpNieozrPRHm0BGww7W/jA==", + "dev": true, + "optional": true + }, + "esbuild-linux-arm64": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-linux-arm64/-/esbuild-linux-arm64-0.13.15.tgz", + "integrity": "sha512-bYpuUlN6qYU9slzr/ltyLTR9YTBS7qUDymO8SV7kjeNext61OdmqFAzuVZom+OLW1HPHseBfJ/JfdSlx8oTUoA==", + "dev": true, + "optional": true + }, + "esbuild-linux-mips64le": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-linux-mips64le/-/esbuild-linux-mips64le-0.13.15.tgz", + "integrity": "sha512-KlVjIG828uFPyJkO/8gKwy9RbXhCEUeFsCGOJBepUlpa7G8/SeZgncUEz/tOOUJTcWMTmFMtdd3GElGyAtbSWg==", + "dev": true, + "optional": true + }, + "esbuild-linux-ppc64le": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-linux-ppc64le/-/esbuild-linux-ppc64le-0.13.15.tgz", + "integrity": "sha512-h6gYF+OsaqEuBjeesTBtUPw0bmiDu7eAeuc2OEH9S6mV9/jPhPdhOWzdeshb0BskRZxPhxPOjqZ+/OqLcxQwEQ==", + "dev": true, + "optional": true + }, + "esbuild-netbsd-64": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-netbsd-64/-/esbuild-netbsd-64-0.13.15.tgz", + "integrity": "sha512-3+yE9emwoevLMyvu+iR3rsa+Xwhie7ZEHMGDQ6dkqP/ndFzRHkobHUKTe+NCApSqG5ce2z4rFu+NX/UHnxlh3w==", + "dev": true, + "optional": true + }, + "esbuild-openbsd-64": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-openbsd-64/-/esbuild-openbsd-64-0.13.15.tgz", + "integrity": "sha512-wTfvtwYJYAFL1fSs8yHIdf5GEE4NkbtbXtjLWjM3Cw8mmQKqsg8kTiqJ9NJQe5NX/5Qlo7Xd9r1yKMMkHllp5g==", + "dev": true, + "optional": true + }, + "esbuild-sunos-64": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-sunos-64/-/esbuild-sunos-64-0.13.15.tgz", + "integrity": "sha512-lbivT9Bx3t1iWWrSnGyBP9ODriEvWDRiweAs69vI+miJoeKwHWOComSRukttbuzjZ8r1q0mQJ8Z7yUsDJ3hKdw==", + "dev": true, + "optional": true + }, + "esbuild-windows-32": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-windows-32/-/esbuild-windows-32-0.13.15.tgz", + "integrity": "sha512-fDMEf2g3SsJ599MBr50cY5ve5lP1wyVwTe6aLJsM01KtxyKkB4UT+fc5MXQFn3RLrAIAZOG+tHC+yXObpSn7Nw==", + "dev": true, + "optional": true + }, + "esbuild-windows-64": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-windows-64/-/esbuild-windows-64-0.13.15.tgz", + "integrity": "sha512-9aMsPRGDWCd3bGjUIKG/ZOJPKsiztlxl/Q3C1XDswO6eNX/Jtwu4M+jb6YDH9hRSUflQWX0XKAfWzgy5Wk54JQ==", + "dev": true, + "optional": true + }, + "esbuild-windows-arm64": { + "version": "0.13.15", + "resolved": "https://registry.npmjs.org/esbuild-windows-arm64/-/esbuild-windows-arm64-0.13.15.tgz", + "integrity": "sha512-zzvyCVVpbwQQATaf3IG8mu1IwGEiDxKkYUdA4FpoCHi1KtPa13jeScYDjlW0Qh+ebWzpKfR2ZwvqAQkSWNcKjA==", + "dev": true, + "optional": true + }, + "expect": { + "version": "27.4.6", + "resolved": "https://registry.npmjs.org/expect/-/expect-27.4.6.tgz", + "integrity": "sha512-1M/0kAALIaj5LaG66sFJTbRsWTADnylly82cu4bspI0nl+pgP4E6Bh/aqdHlTUjul06K7xQnnrAoqfxVU0+/ag==", + "dev": true, + "requires": { + "@jest/types": "^27.4.2", + "jest-get-type": "^27.4.0", + "jest-matcher-utils": "^27.4.6", + "jest-message-util": "^27.4.6" + } + }, + "fill-range": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", + "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "dev": true, + "requires": { + "to-regex-range": "^5.0.1" + } + }, + "follow-redirects": { + "version": "1.14.7", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.14.7.tgz", + "integrity": "sha512-+hbxoLbFMbRKDwohX8GkTataGqO6Jb7jGwpAlwgy2bIz25XtRm7KEzJM76R1WiNT5SwZkX4Y75SwBolkpmE7iQ==" + }, + "form-data": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.1.tgz", + "integrity": "sha512-m21N3WOmEEURgk6B9GLOE4RuWOFf28Lhh9qGYeNlGq4VDXUlJy2th2slBNU8Gp8EzloYZOibZJ7t5ecIrFSjVA==", + "dev": true, + "optional": true, + "peer": true, + "requires": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.6", + "mime-types": "^2.1.12" + } + }, + "fsevents": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", + "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", + "dev": true, + "optional": true + }, + "function-bind": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", + "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==", + "dev": true + }, + "get-func-name": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.0.tgz", + "integrity": "sha1-6td0q+5y4gQJQzoGY2YCPdaIekE=", + "dev": true + }, + "get-intrinsic": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.1.tgz", + "integrity": "sha512-kWZrnVM42QCiEA2Ig1bG8zjoIMOgxWwYCEeNdwY6Tv/cOSeGpcoX4pXHfKUxNKVoArnrEr2e9srnAxxGIraS9Q==", + "dev": true, + "optional": true, + "peer": true, + "requires": { + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.1" + } + }, + "get-port": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/get-port/-/get-port-3.2.0.tgz", + "integrity": "sha1-3Xzn3hh8Bsi/NTeWrHHgmfCYDrw=", + "dev": true, + "optional": true, + "peer": true + }, + "graceful-fs": { + "version": "4.2.9", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.9.tgz", + "integrity": "sha512-NtNxqUcXgpW2iMrfqSfR73Glt39K+BLwWsPs94yR63v45T0Wbej7eRmL5cWfwEgqXnmjQp3zaJTshdRW/qC2ZQ==", + "dev": true + }, + "happy-dom": { + "version": "2.31.1", + "resolved": "https://registry.npmjs.org/happy-dom/-/happy-dom-2.31.1.tgz", + "integrity": "sha512-hbTLxMqyluLT06nRN4TDGLjjKni73tZlvLdF6qGfdv5U4EnrSYSwcZK3ESmv0LEEa5St7NY7e62rhISotH8O3Q==", + "dev": true, + "optional": true, + "peer": true, + "requires": { + "he": "^1.1.1", + "node-fetch": "^2.6.1", + "sync-request": "^6.1.0", + "webidl-conversions": "^7.0.0", + "whatwg-encoding": "^1.0.5", + "whatwg-mimetype": "^2.3.0" + }, + "dependencies": { + "webidl-conversions": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-7.0.0.tgz", + "integrity": "sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==", + "dev": true, + "optional": true, + "peer": true + } + } + }, + "has": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", + "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "dev": true, + "requires": { + "function-bind": "^1.1.1" + } + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "has-symbols": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.2.tgz", + "integrity": "sha512-chXa79rL/UC2KlX17jo3vRGz0azaWEx5tGqZg5pO3NUyEJVB17dMruQlzCCOfUvElghKcm5194+BCRvi2Rv/Gw==", + "dev": true, + "optional": true, + "peer": true + }, + "he": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz", + "integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==", + "dev": true, + "optional": true, + "peer": true + }, + "http-basic": { + "version": "8.1.3", + "resolved": "https://registry.npmjs.org/http-basic/-/http-basic-8.1.3.tgz", + "integrity": "sha512-/EcDMwJZh3mABI2NhGfHOGOeOZITqfkEO4p/xK+l3NpyncIHUQBoMvCSF/b5GqvKtySC2srL/GGG3+EtlqlmCw==", + "dev": true, + "optional": true, + "peer": true, + "requires": { + "caseless": "^0.12.0", + "concat-stream": "^1.6.2", + "http-response-object": "^3.0.1", + "parse-cache-control": "^1.0.1" + } + }, + "http-response-object": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/http-response-object/-/http-response-object-3.0.2.tgz", + "integrity": "sha512-bqX0XTF6fnXSQcEJ2Iuyr75yVakyjIDCqroJQ/aHfSdlM743Cwqoi2nDYMzLGWUcuTWGWy8AAvOKXTfiv6q9RA==", + "dev": true, + "optional": true, + "peer": true, + "requires": { + "@types/node": "^10.0.3" + }, + "dependencies": { + "@types/node": { + "version": "10.17.60", + "resolved": "https://registry.npmjs.org/@types/node/-/node-10.17.60.tgz", + "integrity": "sha512-F0KIgDJfy2nA3zMLmWGKxcH2ZVEtCZXHHdOQs2gSaQ27+lNeEfGxzkIw90aXswATX7AZ33tahPbzy6KAfUreVw==", + "dev": true, + "optional": true, + "peer": true + } + } + }, + "iconv-lite": { + "version": "0.4.24", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", + "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", + "dev": true, + "optional": true, + "peer": true, + "requires": { + "safer-buffer": ">= 2.1.2 < 3" + } + }, + "inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true, + "optional": true, + "peer": true + }, + "is-core-module": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.8.1.tgz", + "integrity": "sha512-SdNCUs284hr40hFTFP6l0IfZ/RSrMXF3qgoRHd3/79unUTvrFO/JoXwkGm+5J/Oe3E/b5GsnG330uUNgRpu1PA==", + "dev": true, + "requires": { + "has": "^1.0.3" + } + }, + "is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true + }, + "isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=", + "dev": true, + "optional": true, + "peer": true + }, + "jest-diff": { + "version": "27.4.6", + "resolved": "https://registry.npmjs.org/jest-diff/-/jest-diff-27.4.6.tgz", + "integrity": "sha512-zjaB0sh0Lb13VyPsd92V7HkqF6yKRH9vm33rwBt7rPYrpQvS1nCvlIy2pICbKta+ZjWngYLNn4cCK4nyZkjS/w==", + "dev": true, + "requires": { + "chalk": "^4.0.0", + "diff-sequences": "^27.4.0", + "jest-get-type": "^27.4.0", + "pretty-format": "^27.4.6" + } + }, + "jest-get-type": { + "version": "27.4.0", + "resolved": "https://registry.npmjs.org/jest-get-type/-/jest-get-type-27.4.0.tgz", + "integrity": "sha512-tk9o+ld5TWq41DkK14L4wox4s2D9MtTpKaAVzXfr5CUKm5ZK2ExcaFE0qls2W71zE/6R2TxxrK9w2r6svAFDBQ==", + "dev": true + }, + "jest-matcher-utils": { + "version": "27.4.6", + "resolved": "https://registry.npmjs.org/jest-matcher-utils/-/jest-matcher-utils-27.4.6.tgz", + "integrity": "sha512-XD4PKT3Wn1LQnRAq7ZsTI0VRuEc9OrCPFiO1XL7bftTGmfNF0DcEwMHRgqiu7NGf8ZoZDREpGrCniDkjt79WbA==", + "dev": true, + "requires": { + "chalk": "^4.0.0", + "jest-diff": "^27.4.6", + "jest-get-type": "^27.4.0", + "pretty-format": "^27.4.6" + } + }, + "jest-message-util": { + "version": "27.4.6", + "resolved": "https://registry.npmjs.org/jest-message-util/-/jest-message-util-27.4.6.tgz", + "integrity": "sha512-0p5szriFU0U74czRSFjH6RyS7UYIAkn/ntwMuOwTGWrQIOh5NzXXrq72LOqIkJKKvFbPq+byZKuBz78fjBERBA==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.12.13", + "@jest/types": "^27.4.2", + "@types/stack-utils": "^2.0.0", + "chalk": "^4.0.0", + "graceful-fs": "^4.2.4", + "micromatch": "^4.0.4", + "pretty-format": "^27.4.6", + "slash": "^3.0.0", + "stack-utils": "^2.0.3" + } + }, + "js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "dev": true + }, + "local-pkg": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/local-pkg/-/local-pkg-0.4.1.tgz", + "integrity": "sha512-lL87ytIGP2FU5PWwNDo0w3WhIo2gopIAxPg9RxDYF7m4rr5ahuZxP22xnJHIvaLTe4Z9P6uKKY2UHiwyB4pcrw==", + "dev": true + }, + "loupe": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/loupe/-/loupe-2.3.2.tgz", + "integrity": "sha512-QgVamnvj0jX1LMPlCAq0MK6hATORFtGqHoUKXTkwNe13BqlN6aePQCKnnTcFvdDYEEITcJ+gBl4mTW7YJtJbyQ==", + "dev": true, + "requires": { + "get-func-name": "^2.0.0" + } + }, + "micromatch": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.4.tgz", + "integrity": "sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==", + "dev": true, + "requires": { + "braces": "^3.0.1", + "picomatch": "^2.2.3" + } + }, + "mime-db": { + "version": "1.51.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.51.0.tgz", + "integrity": "sha512-5y8A56jg7XVQx2mbv1lu49NR4dokRnhZYTtL+KGfaa27uq4pSTXkwQkFJl4pkRMyNFz/EtYDSkiiEHx3F7UN6g==", + "dev": true, + "optional": true, + "peer": true + }, + "mime-types": { + "version": "2.1.34", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.34.tgz", + "integrity": "sha512-6cP692WwGIs9XXdOO4++N+7qjqv0rqxxVvJ3VHPh/Sc9mVZcQP+ZGhkKiTvWMQRr2tbHkJP/Yn7Y0npb3ZBs4A==", + "dev": true, + "optional": true, + "peer": true, + "requires": { + "mime-db": "1.51.0" + } + }, + "nanoid": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.2.0.tgz", + "integrity": "sha512-fmsZYa9lpn69Ad5eDn7FMcnnSR+8R34W9qJEijxYhTbfOWzr22n1QxCMzXLK+ODyW2973V3Fux959iQoUxzUIA==", + "dev": true + }, + "node-fetch": { + "version": "2.6.7", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz", + "integrity": "sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==", + "dev": true, + "optional": true, + "peer": true, + "requires": { + "whatwg-url": "^5.0.0" + } + }, + "object-inspect": { + "version": "1.12.0", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.0.tgz", + "integrity": "sha512-Ho2z80bVIvJloH+YzRmpZVQe87+qASmBUKZDWgx9cu+KDrX2ZDH/3tMy+gXbZETVGs2M8YdxObOh7XAtim9Y0g==", + "dev": true, + "optional": true, + "peer": true + }, + "parse-cache-control": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/parse-cache-control/-/parse-cache-control-1.0.1.tgz", + "integrity": "sha1-juqz5U+laSD+Fro493+iGqzC104=", + "dev": true, + "optional": true, + "peer": true + }, + "path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "dev": true + }, + "pathval": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/pathval/-/pathval-1.1.1.tgz", + "integrity": "sha512-Dp6zGqpTdETdR63lehJYPeIOqpiNBNtc7BpWSLrOje7UaIsE5aY92r/AunQA7rsXvet3lrJ3JnZX29UPTKXyKQ==", + "dev": true + }, + "picocolors": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz", + "integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==", + "dev": true + }, + "picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true + }, + "postcss": { + "version": "8.4.6", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.6.tgz", + "integrity": "sha512-OovjwIzs9Te46vlEx7+uXB0PLijpwjXGKXjVGGPIGubGpq7uh5Xgf6D6FiJ/SzJMBosHDp6a2hiXOS97iBXcaA==", + "dev": true, + "requires": { + "nanoid": "^3.2.0", + "picocolors": "^1.0.0", + "source-map-js": "^1.0.2" + } + }, + "pretty-format": { + "version": "27.4.6", + "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-27.4.6.tgz", + "integrity": "sha512-NblstegA1y/RJW2VyML+3LlpFjzx62cUrtBIKIWDXEDkjNeleA7Od7nrzcs/VLQvAeV4CgSYhrN39DRN88Qi/g==", + "dev": true, + "requires": { + "ansi-regex": "^5.0.1", + "ansi-styles": "^5.0.0", + "react-is": "^17.0.1" + }, + "dependencies": { + "ansi-styles": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", + "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", + "dev": true + } + } + }, + "process-nextick-args": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", + "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==", + "dev": true, + "optional": true, + "peer": true + }, + "promise": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/promise/-/promise-8.1.0.tgz", + "integrity": "sha512-W04AqnILOL/sPRXziNicCjSNRruLAuIHEOVBazepu0545DDNGYHz7ar9ZgZ1fMU8/MA4mVxp5rkBWRi6OXIy3Q==", + "dev": true, + "optional": true, + "peer": true, + "requires": { + "asap": "~2.0.6" + } + }, + "qs": { + "version": "6.10.3", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.10.3.tgz", + "integrity": "sha512-wr7M2E0OFRfIfJZjKGieI8lBKb7fRCH4Fv5KNPEs7gJ8jadvotdsS08PzOKR7opXhZ/Xkjtt3WF9g38drmyRqQ==", + "dev": true, + "optional": true, + "peer": true, + "requires": { + "side-channel": "^1.0.4" + } + }, + "react-is": { + "version": "17.0.2", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-17.0.2.tgz", + "integrity": "sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w==", + "dev": true + }, + "readable-stream": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "dev": true, + "optional": true, + "peer": true, + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + }, + "dependencies": { + "safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "dev": true, + "optional": true, + "peer": true + } + } + }, + "resolve": { + "version": "1.22.0", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.0.tgz", + "integrity": "sha512-Hhtrw0nLeSrFQ7phPp4OOcVjLPIeMnRlr5mcnVuMe7M/7eBn98A3hmFRLoFo3DLZkivSYwhRUJTyPyWAk56WLw==", + "dev": true, + "requires": { + "is-core-module": "^2.8.1", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + } + }, + "rollup": { + "version": "2.67.0", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-2.67.0.tgz", + "integrity": "sha512-W83AaERwvDiHwHEF/dfAfS3z1Be5wf7n+pO3ZAO5IQadCT2lBTr7WQ2MwZZe+nodbD+n3HtC4OCOAdsOPPcKZQ==", + "dev": true, + "requires": { + "fsevents": "~2.3.2" + } + }, + "safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", + "dev": true, + "optional": true, + "peer": true + }, + "side-channel": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", + "integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==", + "dev": true, + "optional": true, + "peer": true, + "requires": { + "call-bind": "^1.0.0", + "get-intrinsic": "^1.0.2", + "object-inspect": "^1.9.0" + } + }, + "slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true + }, + "source-map-js": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.0.2.tgz", + "integrity": "sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==", + "dev": true + }, + "stack-utils": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/stack-utils/-/stack-utils-2.0.5.tgz", + "integrity": "sha512-xrQcmYhOsn/1kX+Vraq+7j4oE2j/6BFscZ0etmYg81xuM8Gq0022Pxb8+IqgOFUIaxHs0KaSb7T1+OegiNrNFA==", + "dev": true, + "requires": { + "escape-string-regexp": "^2.0.0" + }, + "dependencies": { + "escape-string-regexp": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz", + "integrity": "sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==", + "dev": true + } + } + }, + "string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dev": true, + "optional": true, + "peer": true, + "requires": { + "safe-buffer": "~5.1.0" + }, + "dependencies": { + "safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "dev": true, + "optional": true, + "peer": true + } + } + }, + "supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "dev": true + }, + "sync-request": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/sync-request/-/sync-request-6.1.0.tgz", + "integrity": "sha512-8fjNkrNlNCrVc/av+Jn+xxqfCjYaBoHqCsDz6mt030UMxJGr+GSfCV1dQt2gRtlL63+VPidwDVLr7V2OcTSdRw==", + "dev": true, + "optional": true, + "peer": true, + "requires": { + "http-response-object": "^3.0.1", + "sync-rpc": "^1.2.1", + "then-request": "^6.0.0" + } + }, + "sync-rpc": { + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/sync-rpc/-/sync-rpc-1.3.6.tgz", + "integrity": "sha512-J8jTXuZzRlvU7HemDgHi3pGnh/rkoqR/OZSjhTyyZrEkkYQbk7Z33AXp37mkPfPpfdOuj7Ex3H/TJM1z48uPQw==", + "dev": true, + "optional": true, + "peer": true, + "requires": { + "get-port": "^3.1.0" + } + }, + "then-request": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/then-request/-/then-request-6.0.2.tgz", + "integrity": "sha512-3ZBiG7JvP3wbDzA9iNY5zJQcHL4jn/0BWtXIkagfz7QgOL/LqjCEOBQuJNZfu0XYnv5JhKh+cDxCPM4ILrqruA==", + "dev": true, + "optional": true, + "peer": true, + "requires": { + "@types/concat-stream": "^1.6.0", + "@types/form-data": "0.0.33", + "@types/node": "^8.0.0", + "@types/qs": "^6.2.31", + "caseless": "~0.12.0", + "concat-stream": "^1.6.0", + "form-data": "^2.2.0", + "http-basic": "^8.1.1", + "http-response-object": "^3.0.1", + "promise": "^8.0.0", + "qs": "^6.4.0" + }, + "dependencies": { + "@types/node": { + "version": "8.10.66", + "resolved": "https://registry.npmjs.org/@types/node/-/node-8.10.66.tgz", + "integrity": "sha512-tktOkFUA4kXx2hhhrB8bIFb5TbwzS4uOhKEmwiD+NoiL0qtP2OQ9mFldbgD4dV1djrlBYP6eBuQZiWjuHUpqFw==", + "dev": true, + "optional": true, + "peer": true + } + } + }, + "tinypool": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/tinypool/-/tinypool-0.1.1.tgz", + "integrity": "sha512-sW2fQZ2BRb/GX5v55NkHiTrbMLx0eX0xNpP+VGhOe2f7Oo04+LeClDyM19zCE/WCy7jJ8kzIJ0Ojrxj3UhN9Sg==", + "dev": true + }, + "tinyspy": { + "version": "0.2.10", + "resolved": "https://registry.npmjs.org/tinyspy/-/tinyspy-0.2.10.tgz", + "integrity": "sha512-Qij6rGWCDjWIejxCXXVi6bNgvrYBp3PbqC4cBP/0fD6WHDOHCw09Zd13CsxrDqSR5PFq01WeqDws8t5lz5sH0A==", + "dev": true + }, + "to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "requires": { + "is-number": "^7.0.0" + } + }, + "tr46": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha1-gYT9NH2snNwYWZLzpmIuFLnZq2o=", + "dev": true, + "optional": true, + "peer": true + }, + "type-detect": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", + "integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==", + "dev": true + }, + "typedarray": { + "version": "0.0.6", + "resolved": "https://registry.npmjs.org/typedarray/-/typedarray-0.0.6.tgz", + "integrity": "sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c=", + "dev": true, + "optional": true, + "peer": true + }, + "typescript": { + "version": "4.5.5", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.5.5.tgz", + "integrity": "sha512-TCTIul70LyWe6IJWT8QSYeA54WQe8EjQFU4wY52Fasj5UKx88LNYKCgBEHcOMOrFF1rKGbD8v/xcNWVUq9SymA==", + "dev": true + }, + "util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=", + "dev": true, + "optional": true, + "peer": true + }, + "vite": { + "version": "2.7.13", + "resolved": "https://registry.npmjs.org/vite/-/vite-2.7.13.tgz", + "integrity": "sha512-Mq8et7f3aK0SgSxjDNfOAimZGW9XryfHRa/uV0jseQSilg+KhYDSoNb9h1rknOy6SuMkvNDLKCYAYYUMCE+IgQ==", + "dev": true, + "requires": { + "esbuild": "^0.13.12", + "fsevents": "~2.3.2", + "postcss": "^8.4.5", + "resolve": "^1.20.0", + "rollup": "^2.59.0" + } + }, + "vitest": { + "version": "0.2.5", + "resolved": "https://registry.npmjs.org/vitest/-/vitest-0.2.5.tgz", + "integrity": "sha512-QruEhsNxy8ycLxYG9rrGUfHZzJ8A6YvA9ULZ4w/ecvm0Zejm1nxUar/XkRWkL2xzrqA5AjmfqDSQZ8q2bFbA0Q==", + "dev": true, + "requires": { + "@types/chai": "^4.3.0", + "@types/chai-subset": "^1.3.3", + "chai": "^4.3.6", + "local-pkg": "^0.4.1", + "tinypool": "^0.1.1", + "tinyspy": "^0.2.10", + "vite": ">=2.7.13" + } + }, + "webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha1-JFNCdeKnvGvnvIZhHMFq4KVlSHE=", + "dev": true, + "optional": true, + "peer": true + }, + "whatwg-encoding": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/whatwg-encoding/-/whatwg-encoding-1.0.5.tgz", + "integrity": "sha512-b5lim54JOPN9HtzvK9HFXvBma/rnfFeqsic0hSpjtDbVxR3dJKLc+KB4V6GgiGOvl7CY/KNh8rxSo9DKQrnUEw==", + "dev": true, + "optional": true, + "peer": true, + "requires": { + "iconv-lite": "0.4.24" + } + }, + "whatwg-mimetype": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/whatwg-mimetype/-/whatwg-mimetype-2.3.0.tgz", + "integrity": "sha512-M4yMwr6mAnQz76TbJm914+gPpB/nCwvZbJU28cUD6dR004SAxDLOOSUaB1JDRqLtaOV/vi0IC5lEAGFgrjGv/g==", + "dev": true, + "optional": true, + "peer": true + }, + "whatwg-url": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha1-lmRU6HZUYuN2RNNib2dCzotwll0=", + "dev": true, + "optional": true, + "peer": true, + "requires": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" + } + } + } +} diff --git a/client/package.json b/client/package.json new file mode 100644 index 0000000..f0e428e --- /dev/null +++ b/client/package.json @@ -0,0 +1,17 @@ +{ + "devDependencies": { + "@types/expect": "^24.3.0", + "@types/mocha": "^9.1.0", + "@types/node": "^17.0.14", + "typescript": "^4.5.5", + "vitest": "^0.2.5" + }, + "scripts": { + "test:ci": "TEST_SHUTDOWN_API_SERVER=true vitest --run --config ./test/vitest.config.ts", + "test:local": "TEST_SHUTDOWN_API_SERVER=false && vitest --run --config ./test/vitest.config.ts", + "test:watch": " TEST_SHUTDOWN_API_SERVER=false vitest --config ./test/vitest.config.ts" + }, + "dependencies": { + "axios": "^0.25.0" + } +} \ No newline at end of file diff --git a/client/test/base/base.test.ts b/client/test/base/base.test.ts new file mode 100644 index 0000000..c7bf610 --- /dev/null +++ b/client/test/base/base.test.ts @@ -0,0 +1,26 @@ +import { getClientV1 } from "../../client"; +import { describe, it, expect } from "vitest"; +import * as config from "../config"; + +const client = getClientV1(config.BASE_URL); + +describe("GET /api/status", function () { + it("server is available", async function (done) { + try { + const res = await client.status(); + expect(res.status).toBe(200); + expect(res.statusText).toBe("OK"); + + expect(res.data.item).toEqual({ + health: true, + versions: ["v1"], + title: "Go API Template", + message: "Welcome to the Go API Template Application!", + }); + + done(); + } catch (err) { + done(err); + } + }); +}); diff --git a/client/test/config.ts b/client/test/config.ts new file mode 100644 index 0000000..d8db927 --- /dev/null +++ b/client/test/config.ts @@ -0,0 +1,4 @@ +export const PORT = "7745"; +export const HOST = "http://127.0.0.1"; +export const BASE_URL = HOST + ":" + PORT; + diff --git a/client/test/setup.ts b/client/test/setup.ts new file mode 100644 index 0000000..2315637 --- /dev/null +++ b/client/test/setup.ts @@ -0,0 +1,20 @@ +import { exec } from "child_process"; +import * as config from "./config"; + +export const setup = () => { + console.log("Starting Client Tests"); + console.log({ + PORT: config.PORT, + HOST: config.HOST, + BASE_URL: config.BASE_URL, + }); +}; + +export const teardown = () => { + if (process.env.TEST_SHUTDOWN_API_SERVER) { + const pc = exec("pkill -SIGTERM api"); // Kill background API process + pc.stdout.on("data", (data) => { + console.log(`stdout: ${data}`); + }); + } +}; diff --git a/client/test/v1/login.test.ts b/client/test/v1/login.test.ts new file mode 100644 index 0000000..3492aa8 --- /dev/null +++ b/client/test/v1/login.test.ts @@ -0,0 +1,75 @@ +import { getClientV1 } from "../../client"; +import { describe, it, expect } from "vitest"; +import * as config from "../config"; +import axios, { AxiosError } from "axios"; + +const client = getClientV1(config.BASE_URL); + +describe("POST /api/v1/login", function () { + it("user can login", async function (done) { + try { + const res = await client.login("admin@admin.com", "admin"); + expect(res.status).toBe(200); + expect(res.statusText).toBe("OK"); + + expect(res.data.expiresAt).exist; + expect(res.data.token).exist; + + done(); + } catch (err) { + done(err); + } + }); +}); + +describe("POST /api/v1/users/logout", function () { + it("user can logout", async function (done) { + try { + const myclient = getClientV1(config.BASE_URL); + + const res = await myclient.login("admin@admin.com", "admin"); + expect(res.status).toBe(200); + expect(res.statusText).toBe("OK"); + + const res2 = await myclient.logout(); + expect(res2.status).toBe(204); + expect(res2.statusText).toBe("No Content"); + + // Try to get self again + try { + const res3 = await myclient.self(); + expect(res3.status).toBe(401); + expect(res3.statusText).toBe("Unauthorized"); + } catch (e) { + if (axios.isAxiosError(e)) { + expect(e.response.status).toBe(401); + done(); + } else { + done(e); + } + } + + done(); + } catch (err) { + done(err); + } + }); +}); + +describe("GET /api/v1/users/self", function () { + it("user can access basic self details", async function (done) { + try { + const res = await client.self(); + expect(res.status).toBe(200); + expect(res.statusText).toBe("OK"); + + expect(res.data.item.id).exist; + expect(res.data.item.name).toBe("Admin"); + expect(res.data.item.email).toBe("admin@admin.com"); + + done(); + } catch (err) { + done(err); + } + }); +}); diff --git a/client/test/vitest.config.ts b/client/test/vitest.config.ts new file mode 100644 index 0000000..25f08e4 --- /dev/null +++ b/client/test/vitest.config.ts @@ -0,0 +1,8 @@ +/// +import { defineConfig } from "vite"; + +export default defineConfig({ + test: { + globalSetup: "./test/setup.ts", + }, +}); diff --git a/client/tsconfig.json b/client/tsconfig.json new file mode 100644 index 0000000..249dc6d --- /dev/null +++ b/client/tsconfig.json @@ -0,0 +1,17 @@ +{ + "compilerOptions": { + "target": "es2020", + "module": "commonjs", + "outDir": "build", + "sourceMap": true, + "allowJs": true, + "checkJs": false, + "resolveJsonModule": true, + "skipLibCheck": true, + "strict": false, + "esModuleInterop": true, + "removeComments": true + }, + "include": ["client/**/*", "test/**/*"], + "exclude": ["node_modules", "**/*.spec.ts"] +}