mirror of
https://github.com/hay-kot/homebox.git
synced 2025-08-01 15:20:29 +00:00
Merge branch 'pr/1'
This commit is contained in:
commit
0091192ac6
125 changed files with 1959 additions and 2284 deletions
2
.github/workflows/partial-backend.yaml
vendored
2
.github/workflows/partial-backend.yaml
vendored
|
@ -7,7 +7,7 @@ jobs:
|
|||
Go:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v4
|
||||
|
|
10
.github/workflows/partial-frontend.yaml
vendored
10
.github/workflows/partial-frontend.yaml
vendored
|
@ -9,11 +9,11 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: pnpm/action-setup@v2.2.4
|
||||
- uses: pnpm/action-setup@v2.4.0
|
||||
with:
|
||||
version: 6.0.2
|
||||
|
||||
|
@ -34,7 +34,7 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
|
@ -48,11 +48,11 @@ jobs:
|
|||
with:
|
||||
go-version: "1.20"
|
||||
|
||||
- uses: actions/setup-node@v3
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 18
|
||||
|
||||
- uses: pnpm/action-setup@v2.2.4
|
||||
- uses: pnpm/action-setup@v2.4.0
|
||||
with:
|
||||
version: 6.0.2
|
||||
|
||||
|
|
6
.github/workflows/partial-publish.yaml
vendored
6
.github/workflows/partial-publish.yaml
vendored
|
@ -20,7 +20,7 @@ jobs:
|
|||
name: "Publish Homebox"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
|
@ -28,14 +28,14 @@ jobs:
|
|||
|
||||
- name: Set up QEMU
|
||||
id: qemu
|
||||
uses: docker/setup-qemu-action@v2
|
||||
uses: docker/setup-qemu-action@v3
|
||||
with:
|
||||
image: tonistiigi/binfmt:latest
|
||||
platforms: all
|
||||
|
||||
- name: install buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
install: true
|
||||
|
||||
|
|
2
.github/workflows/publish.yaml
vendored
2
.github/workflows/publish.yaml
vendored
|
@ -13,7 +13,7 @@ jobs:
|
|||
name: "Deploy Nightly to Fly.io"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
- uses: superfly/flyctl-actions/setup-flyctl@master
|
||||
- run: flyctl deploy --remote-only
|
||||
|
||||
|
|
6
.github/workflows/tag.yaml
vendored
6
.github/workflows/tag.yaml
vendored
|
@ -22,7 +22,7 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
|
@ -41,7 +41,7 @@ jobs:
|
|||
cp -r ./.output/public ../backend/app/api/static/
|
||||
|
||||
- name: Run GoReleaser
|
||||
uses: goreleaser/goreleaser-action@v4
|
||||
uses: goreleaser/goreleaser-action@v5
|
||||
with:
|
||||
workdir: "backend"
|
||||
distribution: goreleaser
|
||||
|
@ -67,7 +67,7 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout main
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Deploy docs
|
||||
uses: mhausenblas/mkdocs-deploy-gh-pages@master
|
||||
|
|
|
@ -1,16 +1,16 @@
|
|||
# Contributing
|
||||
|
||||
## We Develop with Github
|
||||
## We Develop with GitHub
|
||||
|
||||
We use github to host code, to track issues and feature requests, as well as accept pull requests.
|
||||
We use GitHub to host code, to track issues and feature requests, as well as accept pull requests.
|
||||
|
||||
## Branch Flow
|
||||
|
||||
We use the `main` branch as the development branch. All PRs should be made to the `main` branch from a feature branch. To create a pull request you can use the following steps:
|
||||
We use the `main` branch as the development branch. All PRs should be made to the `main` branch from a feature branch. To create a pull request, you can use the following steps:
|
||||
|
||||
1. Fork the repository and create a new branch from `main`.
|
||||
2. If you've added code that should be tested, add tests.
|
||||
3. If you've changed API's, update the documentation.
|
||||
3. If you've changed APIs, update the documentation.
|
||||
4. Ensure that the test suite and linters pass
|
||||
5. Issue your pull request
|
||||
|
||||
|
@ -18,7 +18,7 @@ We use the `main` branch as the development branch. All PRs should be made to th
|
|||
|
||||
### Prerequisites
|
||||
|
||||
There is a devcontainer available for this project. If you are using VSCode, you can use the devcontainer to get started. If you are not using VSCode, you can need to ensure that you have the following tools installed:
|
||||
There is a devcontainer available for this project. If you are using VSCode, you can use the devcontainer to get started. If you are not using VSCode, you need to ensure that you have the following tools installed:
|
||||
|
||||
- [Go 1.19+](https://golang.org/doc/install)
|
||||
- [Swaggo](https://github.com/swaggo/swag)
|
||||
|
@ -31,27 +31,27 @@ If you're using `taskfile` you can run `task --list-all` for a list of all comma
|
|||
|
||||
### Setup
|
||||
|
||||
If you're using the taskfile you can use the `task setup` command to run the required setup commands. Otherwise you can review the commands required in the `Taskfile.yml` file.
|
||||
If you're using the taskfile, you can use the `task setup` command to run the required setup commands. Otherwise, you can review the commands required in the `Taskfile.yml` file.
|
||||
|
||||
Note that when installing dependencies with pnpm you must use the `--shamefully-hoist` flag. If you don't use this flag you will get an error when running the the frontend server.
|
||||
Note that when installing dependencies with pnpm you must use the `--shamefully-hoist` flag. If you don't use this flag, you will get an error when running the frontend server.
|
||||
|
||||
### API Development Notes
|
||||
|
||||
start command `task go:run`
|
||||
|
||||
1. API Server does not auto reload. You'll need to restart the server after making changes.
|
||||
2. Unit tests should be written in Go, however end-to-end or user story tests should be written in TypeScript using the client library in the frontend directory.
|
||||
2. Unit tests should be written in Go, however, end-to-end or user story tests should be written in TypeScript using the client library in the frontend directory.
|
||||
|
||||
### Frontend Development Notes
|
||||
|
||||
start command `task: ui:dev`
|
||||
|
||||
1. The frontend is a Vue 3 app with Nuxt.js that uses Tailwind and DaisyUI for styling.
|
||||
2. We're using Vitest for our automated testing. you can run these with `task ui:watch`.
|
||||
3. Tests require the API server to be running and in some cases the first run will fail due to a race condition. If this happens just run the tests again and they should pass.
|
||||
2. We're using Vitest for our automated testing. You can run these with `task ui:watch`.
|
||||
3. Tests require the API server to be running, and in some cases the first run will fail due to a race condition. If this happens, just run the tests again and they should pass.
|
||||
|
||||
## Publishing Release
|
||||
|
||||
Create a new tag in github with the version number vX.X.X. This will trigger a new release to be created.
|
||||
Create a new tag in GitHub with the version number vX.X.X. This will trigger a new release to be created.
|
||||
|
||||
Test -> Goreleaser -> Publish Release -> Trigger Docker Builds -> Deploy Docs + Fly.io Demo
|
41
Taskfile.yml
41
Taskfile.yml
|
@ -12,15 +12,25 @@ tasks:
|
|||
- cd backend && go mod tidy
|
||||
- cd frontend && pnpm install --shamefully-hoist
|
||||
|
||||
generate:
|
||||
desc: |
|
||||
Generates collateral files from the backend project
|
||||
including swagger docs and typescripts type for the frontend
|
||||
deps:
|
||||
- db:generate
|
||||
swag:
|
||||
desc: Generate swagger docs
|
||||
dir: backend/app/api/static/
|
||||
vars:
|
||||
API: "../"
|
||||
INTERNAL: "../../../internal"
|
||||
PKGS: "../../../pkgs"
|
||||
cmds:
|
||||
- swag fmt --dir={{ .API }}
|
||||
- swag init --dir={{ .API }},{{ .INTERNAL }}/core/services,{{ .INTERNAL }}/data/repo --parseDependency
|
||||
sources:
|
||||
- "./backend/app/api/**/*"
|
||||
- "./backend/internal/data/**"
|
||||
- "./backend/internal/core/services/**/*"
|
||||
- "./backend/app/tools/typegen/main.go"
|
||||
|
||||
typescript-types:
|
||||
desc: Generates typescript types from swagger definition
|
||||
cmds:
|
||||
- cd backend/app/api/static && swag fmt --dir=../
|
||||
- cd backend/app/api/static && swag init --dir=../,../../../internal,../../../pkgs --parseDependency
|
||||
- |
|
||||
npx swagger-typescript-api \
|
||||
--no-client \
|
||||
|
@ -28,12 +38,17 @@ tasks:
|
|||
--path ./backend/app/api/static/docs/swagger.json \
|
||||
--output ./frontend/lib/api/types
|
||||
- go run ./backend/app/tools/typegen/main.go ./frontend/lib/api/types/data-contracts.ts
|
||||
- cp ./backend/app/api/static/docs/swagger.json docs/docs/api/openapi-2.0.json
|
||||
sources:
|
||||
- "./backend/app/api/**/*"
|
||||
- "./backend/internal/data/**"
|
||||
- "./backend/internal/core/services/**/*"
|
||||
- "./backend/app/tools/typegen/main.go"
|
||||
- ./backend/app/tools/typegen/main.go
|
||||
- ./backend/app/api/static/docs/swagger.json
|
||||
|
||||
generate:
|
||||
deps:
|
||||
- db:generate
|
||||
cmds:
|
||||
- task: swag
|
||||
- task: typescript-types
|
||||
- cp ./backend/app/api/static/docs/swagger.json docs/docs/api/openapi-2.0.json
|
||||
|
||||
go:run:
|
||||
desc: Starts the backend api server (depends on generate task)
|
||||
|
|
|
@ -49,7 +49,14 @@ func WithRegistration(allowRegistration bool) func(*V1Controller) {
|
|||
}
|
||||
}
|
||||
|
||||
func WithSecureCookies(secure bool) func(*V1Controller) {
|
||||
return func(ctrl *V1Controller) {
|
||||
ctrl.cookieSecure = secure
|
||||
}
|
||||
}
|
||||
|
||||
type V1Controller struct {
|
||||
cookieSecure bool
|
||||
repo *repo.AllRepos
|
||||
svc *services.AllServices
|
||||
maxUploadSize int64
|
||||
|
|
|
@ -68,3 +68,16 @@ func (ctrl *V1Controller) HandleEnsureImportRefs() errchain.HandlerFunc {
|
|||
func (ctrl *V1Controller) HandleItemDateZeroOut() errchain.HandlerFunc {
|
||||
return actionHandlerFactory("zero out date time", ctrl.repo.Items.ZeroOutTimeFields)
|
||||
}
|
||||
|
||||
// HandleSetPrimaryPhotos godoc
|
||||
//
|
||||
// @Summary Set Primary Photos
|
||||
// @Description Sets the first photo of each item as the primary photo
|
||||
// @Tags Actions
|
||||
// @Produce json
|
||||
// @Success 200 {object} ActionAmountResult
|
||||
// @Router /v1/actions/set-primary-photos [Post]
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleSetPrimaryPhotos() errchain.HandlerFunc {
|
||||
return actionHandlerFactory("ensure asset IDs", ctrl.repo.Items.SetPrimaryPhotos)
|
||||
}
|
||||
|
|
|
@ -3,6 +3,7 @@ package v1
|
|||
import (
|
||||
"errors"
|
||||
"net/http"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
|
@ -13,6 +14,12 @@ import (
|
|||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
const (
|
||||
cookieNameToken = "hb.auth.token"
|
||||
cookieNameRemember = "hb.auth.remember"
|
||||
cookieNameSession = "hb.auth.session"
|
||||
)
|
||||
|
||||
type (
|
||||
TokenResponse struct {
|
||||
Token string `json:"token"`
|
||||
|
@ -27,6 +34,30 @@ type (
|
|||
}
|
||||
)
|
||||
|
||||
type CookieContents struct {
|
||||
Token string
|
||||
ExpiresAt time.Time
|
||||
Remember bool
|
||||
}
|
||||
|
||||
func GetCookies(r *http.Request) (*CookieContents, error) {
|
||||
cookie, err := r.Cookie(cookieNameToken)
|
||||
if err != nil {
|
||||
return nil, errors.New("authorization cookie is required")
|
||||
}
|
||||
|
||||
rememberCookie, err := r.Cookie(cookieNameRemember)
|
||||
if err != nil {
|
||||
return nil, errors.New("remember cookie is required")
|
||||
}
|
||||
|
||||
return &CookieContents{
|
||||
Token: cookie.Value,
|
||||
ExpiresAt: cookie.Expires,
|
||||
Remember: rememberCookie.Value == "true",
|
||||
}, nil
|
||||
}
|
||||
|
||||
// HandleAuthLogin godoc
|
||||
//
|
||||
// @Summary User Login
|
||||
|
@ -81,6 +112,7 @@ func (ctrl *V1Controller) HandleAuthLogin() errchain.HandlerFunc {
|
|||
return validate.NewRequestError(errors.New("authentication failed"), http.StatusInternalServerError)
|
||||
}
|
||||
|
||||
ctrl.setCookies(w, noPort(r.Host), newToken.Raw, newToken.ExpiresAt, loginForm.StayLoggedIn)
|
||||
return server.JSON(w, http.StatusOK, TokenResponse{
|
||||
Token: "Bearer " + newToken.Raw,
|
||||
ExpiresAt: newToken.ExpiresAt,
|
||||
|
@ -108,6 +140,7 @@ func (ctrl *V1Controller) HandleAuthLogout() errchain.HandlerFunc {
|
|||
return validate.NewRequestError(err, http.StatusInternalServerError)
|
||||
}
|
||||
|
||||
ctrl.unsetCookies(w, noPort(r.Host))
|
||||
return server.JSON(w, http.StatusNoContent, nil)
|
||||
}
|
||||
}
|
||||
|
@ -133,6 +166,78 @@ func (ctrl *V1Controller) HandleAuthRefresh() errchain.HandlerFunc {
|
|||
return validate.NewUnauthorizedError()
|
||||
}
|
||||
|
||||
ctrl.setCookies(w, noPort(r.Host), newToken.Raw, newToken.ExpiresAt, false)
|
||||
return server.JSON(w, http.StatusOK, newToken)
|
||||
}
|
||||
}
|
||||
|
||||
func noPort(host string) string {
|
||||
return strings.Split(host, ":")[0]
|
||||
}
|
||||
|
||||
func (ctrl *V1Controller) setCookies(w http.ResponseWriter, domain, token string, expires time.Time, remember bool) {
|
||||
http.SetCookie(w, &http.Cookie{
|
||||
Name: cookieNameRemember,
|
||||
Value: strconv.FormatBool(remember),
|
||||
Expires: expires,
|
||||
Domain: domain,
|
||||
Secure: ctrl.cookieSecure,
|
||||
HttpOnly: true,
|
||||
Path: "/",
|
||||
})
|
||||
|
||||
// Set HTTP only cookie
|
||||
http.SetCookie(w, &http.Cookie{
|
||||
Name: cookieNameToken,
|
||||
Value: token,
|
||||
Expires: expires,
|
||||
Domain: domain,
|
||||
Secure: ctrl.cookieSecure,
|
||||
HttpOnly: true,
|
||||
Path: "/",
|
||||
})
|
||||
|
||||
// Set Fake Session cookie
|
||||
http.SetCookie(w, &http.Cookie{
|
||||
Name: cookieNameSession,
|
||||
Value: "true",
|
||||
Expires: expires,
|
||||
Domain: domain,
|
||||
Secure: ctrl.cookieSecure,
|
||||
HttpOnly: false,
|
||||
Path: "/",
|
||||
})
|
||||
}
|
||||
|
||||
func (ctrl *V1Controller) unsetCookies(w http.ResponseWriter, domain string) {
|
||||
http.SetCookie(w, &http.Cookie{
|
||||
Name: cookieNameToken,
|
||||
Value: "",
|
||||
Expires: time.Unix(0, 0),
|
||||
Domain: domain,
|
||||
Secure: ctrl.cookieSecure,
|
||||
HttpOnly: true,
|
||||
Path: "/",
|
||||
})
|
||||
|
||||
http.SetCookie(w, &http.Cookie{
|
||||
Name: cookieNameRemember,
|
||||
Value: "false",
|
||||
Expires: time.Unix(0, 0),
|
||||
Domain: domain,
|
||||
Secure: ctrl.cookieSecure,
|
||||
HttpOnly: true,
|
||||
Path: "/",
|
||||
})
|
||||
|
||||
// Set Fake Session cookie
|
||||
http.SetCookie(w, &http.Cookie{
|
||||
Name: cookieNameSession,
|
||||
Value: "false",
|
||||
Expires: time.Unix(0, 0),
|
||||
Domain: domain,
|
||||
Secure: ctrl.cookieSecure,
|
||||
HttpOnly: false,
|
||||
Path: "/",
|
||||
})
|
||||
}
|
||||
|
|
|
@ -27,6 +27,7 @@ import (
|
|||
// @Param pageSize query int false "items per page"
|
||||
// @Param labels query []string false "label Ids" collectionFormat(multi)
|
||||
// @Param locations query []string false "location Ids" collectionFormat(multi)
|
||||
// @Param parentIds query []string false "parent Ids" collectionFormat(multi)
|
||||
// @Success 200 {object} repo.PaginationResult[repo.ItemSummary]{}
|
||||
// @Router /v1/items [GET]
|
||||
// @Security Bearer
|
||||
|
@ -56,6 +57,7 @@ func (ctrl *V1Controller) HandleItemsGetAll() errchain.HandlerFunc {
|
|||
Search: params.Get("q"),
|
||||
LocationIDs: queryUUIDList(params, "locations"),
|
||||
LabelIDs: queryUUIDList(params, "labels"),
|
||||
ParentItemIDs: queryUUIDList(params, "parentIds"),
|
||||
IncludeArchived: queryBool(params.Get("includeArchived")),
|
||||
Fields: filterFieldItems(params["fields"]),
|
||||
OrderBy: params.Get("orderBy"),
|
||||
|
@ -167,7 +169,6 @@ func (ctrl *V1Controller) HandleItemUpdate() errchain.HandlerFunc {
|
|||
return adapters.ActionID("id", fn, http.StatusOK)
|
||||
}
|
||||
|
||||
|
||||
// HandleItemPatch godocs
|
||||
//
|
||||
// @Summary Update Item
|
||||
|
@ -183,12 +184,12 @@ func (ctrl *V1Controller) HandleItemPatch() errchain.HandlerFunc {
|
|||
auth := services.NewContext(r.Context())
|
||||
|
||||
body.ID = ID
|
||||
err := ctrl.repo.Items.Patch(auth, auth.GID, ID, body)
|
||||
if err != nil {
|
||||
return repo.ItemOut{}, err
|
||||
}
|
||||
err := ctrl.repo.Items.Patch(auth, auth.GID, ID, body)
|
||||
if err != nil {
|
||||
return repo.ItemOut{}, err
|
||||
}
|
||||
|
||||
return ctrl.repo.Items.GetOneByGroup(auth, auth.GID, ID)
|
||||
return ctrl.repo.Items.GetOneByGroup(auth, auth.GID, ID)
|
||||
}
|
||||
|
||||
return adapters.ActionID("id", fn, http.StatusOK)
|
||||
|
@ -231,7 +232,7 @@ func (ctrl *V1Controller) HandleGetAllCustomFieldValues() errchain.HandlerFunc {
|
|||
return ctrl.repo.Items.GetAllCustomFieldValues(auth, auth.GID, q.Field)
|
||||
}
|
||||
|
||||
return adapters.Action(fn, http.StatusOK)
|
||||
return adapters.Query(fn, http.StatusOK)
|
||||
|
||||
}
|
||||
|
||||
|
|
|
@ -3,6 +3,7 @@ package v1
|
|||
import (
|
||||
"errors"
|
||||
"net/http"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/attachment"
|
||||
|
@ -67,7 +68,15 @@ func (ctrl *V1Controller) HandleItemAttachmentCreate() errchain.HandlerFunc {
|
|||
|
||||
attachmentType := r.FormValue("type")
|
||||
if attachmentType == "" {
|
||||
attachmentType = attachment.TypeAttachment.String()
|
||||
// Attempt to auto-detect the type of the file
|
||||
ext := filepath.Ext(attachmentName)
|
||||
|
||||
switch ext {
|
||||
case ".jpg", ".jpeg", ".png", ".webp", ".gif", ".bmp", ".tiff":
|
||||
attachmentType = attachment.TypePhoto.String()
|
||||
default:
|
||||
attachmentType = attachment.TypeAttachment.String()
|
||||
}
|
||||
}
|
||||
|
||||
id, err := ctrl.routeID(r)
|
||||
|
|
|
@ -15,6 +15,14 @@ import (
|
|||
"image/png"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
|
||||
"github.com/hay-kot/homebox/backend/internal/web/adapters"
|
||||
"github.com/hay-kot/httpkit/errchain"
|
||||
"github.com/yeqown/go-qrcode/v2"
|
||||
"github.com/yeqown/go-qrcode/writer/standard"
|
||||
|
||||
_ "embed"
|
||||
)
|
||||
|
||||
//go:embed assets/QRIcon.png
|
||||
|
@ -69,7 +77,12 @@ func (ctrl *V1Controller) HandleGenerateQRCode() errchain.HandlerFunc {
|
|||
panic(err)
|
||||
}
|
||||
|
||||
qrc, err := qrcode.New(q.Data)
|
||||
decodedStr, err := url.QueryUnescape(q.Data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
qrc, err := qrcode.New(decodedStr)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
|
|
@ -35,6 +35,15 @@ var (
|
|||
buildTime = "now"
|
||||
)
|
||||
|
||||
func build() string {
|
||||
short := commit
|
||||
if len(short) > 7 {
|
||||
short = short[:7]
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%s, commit %s, built at %s", version, short, buildTime)
|
||||
}
|
||||
|
||||
// @title Homebox API
|
||||
// @version 1.0
|
||||
// @description Track, Manage, and Organize your Things.
|
||||
|
@ -47,7 +56,7 @@ var (
|
|||
func main() {
|
||||
zerolog.ErrorStackMarshaler = pkgerrors.MarshalStack
|
||||
|
||||
cfg, err := config.New()
|
||||
cfg, err := config.New(build(), "Homebox inventory management system")
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
@ -146,6 +155,9 @@ func run(cfg *config.Config) error {
|
|||
app.server = server.NewServer(
|
||||
server.WithHost(app.conf.Web.Host),
|
||||
server.WithPort(app.conf.Web.Port),
|
||||
server.WithReadTimeout(app.conf.Web.ReadTimeout),
|
||||
server.WithWriteTimeout(app.conf.Web.WriteTimeout),
|
||||
server.WithIdleTimeout(app.conf.Web.IdleTimeout),
|
||||
)
|
||||
log.Info().Msgf("Starting HTTP Server on %s:%s", app.server.Host, app.server.Port)
|
||||
|
||||
|
|
|
@ -7,6 +7,7 @@ import (
|
|||
"net/url"
|
||||
"strings"
|
||||
|
||||
v1 "github.com/hay-kot/homebox/backend/app/api/handlers/v1"
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services"
|
||||
"github.com/hay-kot/homebox/backend/internal/sys/validate"
|
||||
"github.com/hay-kot/httpkit/errchain"
|
||||
|
@ -94,20 +95,6 @@ func getQuery(r *http.Request) (string, error) {
|
|||
return token, nil
|
||||
}
|
||||
|
||||
func getCookie(r *http.Request) (string, error) {
|
||||
cookie, err := r.Cookie("hb.auth.token")
|
||||
if err != nil {
|
||||
return "", errors.New("access_token cookie is required")
|
||||
}
|
||||
|
||||
token, err := url.QueryUnescape(cookie.Value)
|
||||
if err != nil {
|
||||
return "", errors.New("access_token cookie is required")
|
||||
}
|
||||
|
||||
return token, nil
|
||||
}
|
||||
|
||||
// mwAuthToken is a middleware that will check the database for a stateful token
|
||||
// and attach it's user to the request context, or return an appropriate error.
|
||||
// Authorization support is by token via Headers or Query Parameter
|
||||
|
@ -115,21 +102,30 @@ func getCookie(r *http.Request) (string, error) {
|
|||
// Example:
|
||||
// - header = "Bearer 1234567890"
|
||||
// - query = "?access_token=1234567890"
|
||||
// - cookie = hb.auth.token = 1234567890
|
||||
func (a *app) mwAuthToken(next errchain.Handler) errchain.Handler {
|
||||
return errchain.HandlerFunc(func(w http.ResponseWriter, r *http.Request) error {
|
||||
keyFuncs := [...]KeyFunc{
|
||||
getBearer,
|
||||
getCookie,
|
||||
getQuery,
|
||||
var requestToken string
|
||||
|
||||
// We ignore the error to allow the next strategy to be attempted
|
||||
{
|
||||
cookies, _ := v1.GetCookies(r)
|
||||
if cookies != nil {
|
||||
requestToken = cookies.Token
|
||||
}
|
||||
}
|
||||
|
||||
var requestToken string
|
||||
for _, keyFunc := range keyFuncs {
|
||||
token, err := keyFunc(r)
|
||||
if err == nil {
|
||||
requestToken = token
|
||||
break
|
||||
if requestToken == "" {
|
||||
keyFuncs := [...]KeyFunc{
|
||||
getBearer,
|
||||
getQuery,
|
||||
}
|
||||
|
||||
for _, keyFunc := range keyFuncs {
|
||||
token, err := keyFunc(r)
|
||||
if err == nil {
|
||||
requestToken = token
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -92,6 +92,7 @@ func (a *app) mountRoutes(r *chi.Mux, chain *errchain.ErrChain, repos *repo.AllR
|
|||
r.Post(v1Base("/actions/ensure-asset-ids"), chain.ToHandlerFunc(v1Ctrl.HandleEnsureAssetID(), userMW...))
|
||||
r.Post(v1Base("/actions/zero-item-time-fields"), chain.ToHandlerFunc(v1Ctrl.HandleItemDateZeroOut(), userMW...))
|
||||
r.Post(v1Base("/actions/ensure-import-refs"), chain.ToHandlerFunc(v1Ctrl.HandleEnsureImportRefs(), userMW...))
|
||||
r.Post(v1Base("/actions/set-primary-photos"), chain.ToHandlerFunc(v1Ctrl.HandleSetPrimaryPhotos(), userMW...))
|
||||
|
||||
r.Get(v1Base("/locations"), chain.ToHandlerFunc(v1Ctrl.HandleLocationGetAll(), userMW...))
|
||||
r.Post(v1Base("/locations"), chain.ToHandlerFunc(v1Ctrl.HandleLocationCreate(), userMW...))
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
// Package docs GENERATED BY SWAG; DO NOT EDIT
|
||||
// This file was generated by swaggo/swag
|
||||
// Package docs Code generated by swaggo/swag. DO NOT EDIT
|
||||
package docs
|
||||
|
||||
import "github.com/swaggo/swag"
|
||||
|
@ -68,6 +67,31 @@ const docTemplate = `{
|
|||
}
|
||||
}
|
||||
},
|
||||
"/v1/actions/set-primary-photos": {
|
||||
"post": {
|
||||
"security": [
|
||||
{
|
||||
"Bearer": []
|
||||
}
|
||||
],
|
||||
"description": "Sets the first photo of each item as the primary photo",
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Actions"
|
||||
],
|
||||
"summary": "Set Primary Photos",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/v1.ActionAmountResult"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/v1/actions/zero-item-time-fields": {
|
||||
"post": {
|
||||
"security": [
|
||||
|
@ -1879,6 +1903,9 @@ const docTemplate = `{
|
|||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"primary": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"type": {
|
||||
"type": "string"
|
||||
},
|
||||
|
@ -1890,6 +1917,9 @@ const docTemplate = `{
|
|||
"repo.ItemAttachmentUpdate": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"primary": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"title": {
|
||||
"type": "string"
|
||||
},
|
||||
|
@ -1989,6 +2019,9 @@ const docTemplate = `{
|
|||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"imageId": {
|
||||
"type": "string"
|
||||
},
|
||||
"insured": {
|
||||
"type": "boolean"
|
||||
},
|
||||
|
@ -2004,9 +2037,13 @@ const docTemplate = `{
|
|||
},
|
||||
"location": {
|
||||
"description": "Edges",
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/repo.LocationSummary"
|
||||
}
|
||||
],
|
||||
"x-nullable": true,
|
||||
"x-omitempty": true,
|
||||
"$ref": "#/definitions/repo.LocationSummary"
|
||||
"x-omitempty": true
|
||||
},
|
||||
"manufacturer": {
|
||||
"type": "string"
|
||||
|
@ -2022,9 +2059,13 @@ const docTemplate = `{
|
|||
"type": "string"
|
||||
},
|
||||
"parent": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/repo.ItemSummary"
|
||||
}
|
||||
],
|
||||
"x-nullable": true,
|
||||
"x-omitempty": true,
|
||||
"$ref": "#/definitions/repo.ItemSummary"
|
||||
"x-omitempty": true
|
||||
},
|
||||
"purchaseFrom": {
|
||||
"type": "string"
|
||||
|
@ -2096,6 +2137,9 @@ const docTemplate = `{
|
|||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"imageId": {
|
||||
"type": "string"
|
||||
},
|
||||
"insured": {
|
||||
"type": "boolean"
|
||||
},
|
||||
|
@ -2107,9 +2151,13 @@ const docTemplate = `{
|
|||
},
|
||||
"location": {
|
||||
"description": "Edges",
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/repo.LocationSummary"
|
||||
}
|
||||
],
|
||||
"x-nullable": true,
|
||||
"x-omitempty": true,
|
||||
"$ref": "#/definitions/repo.LocationSummary"
|
||||
"x-omitempty": true
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
|
@ -2133,7 +2181,8 @@ const docTemplate = `{
|
|||
"type": "boolean"
|
||||
},
|
||||
"assetId": {
|
||||
"type": "string"
|
||||
"type": "string",
|
||||
"example": "0"
|
||||
},
|
||||
"description": {
|
||||
"type": "string"
|
||||
|
@ -2218,7 +2267,6 @@ const docTemplate = `{
|
|||
"type": "string"
|
||||
},
|
||||
"warrantyExpires": {
|
||||
"description": "Sold",
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
|
@ -2255,12 +2303,6 @@ const docTemplate = `{
|
|||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"items": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/repo.ItemSummary"
|
||||
}
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
|
@ -2322,12 +2364,6 @@ const docTemplate = `{
|
|||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"items": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/repo.ItemSummary"
|
||||
}
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
|
@ -2404,7 +2440,6 @@ const docTemplate = `{
|
|||
"type": "object",
|
||||
"properties": {
|
||||
"completedDate": {
|
||||
"description": "Sold",
|
||||
"type": "string"
|
||||
},
|
||||
"cost": {
|
||||
|
@ -2421,7 +2456,6 @@ const docTemplate = `{
|
|||
"type": "string"
|
||||
},
|
||||
"scheduledDate": {
|
||||
"description": "Sold",
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
|
@ -2433,7 +2467,6 @@ const docTemplate = `{
|
|||
],
|
||||
"properties": {
|
||||
"completedDate": {
|
||||
"description": "Sold",
|
||||
"type": "string"
|
||||
},
|
||||
"cost": {
|
||||
|
@ -2447,7 +2480,6 @@ const docTemplate = `{
|
|||
"type": "string"
|
||||
},
|
||||
"scheduledDate": {
|
||||
"description": "Sold",
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
|
@ -2456,7 +2488,6 @@ const docTemplate = `{
|
|||
"type": "object",
|
||||
"properties": {
|
||||
"completedDate": {
|
||||
"description": "Sold",
|
||||
"type": "string"
|
||||
},
|
||||
"cost": {
|
||||
|
@ -2470,7 +2501,6 @@ const docTemplate = `{
|
|||
"type": "string"
|
||||
},
|
||||
"scheduledDate": {
|
||||
"description": "Sold",
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
|
@ -2872,6 +2902,8 @@ var SwaggerInfo = &swag.Spec{
|
|||
Description: "Track, Manage, and Organize your Things.",
|
||||
InfoInstanceName: "swagger",
|
||||
SwaggerTemplate: docTemplate,
|
||||
LeftDelim: "{{",
|
||||
RightDelim: "}}",
|
||||
}
|
||||
|
||||
func init() {
|
||||
|
|
|
@ -60,6 +60,31 @@
|
|||
}
|
||||
}
|
||||
},
|
||||
"/v1/actions/set-primary-photos": {
|
||||
"post": {
|
||||
"security": [
|
||||
{
|
||||
"Bearer": []
|
||||
}
|
||||
],
|
||||
"description": "Sets the first photo of each item as the primary photo",
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Actions"
|
||||
],
|
||||
"summary": "Set Primary Photos",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/v1.ActionAmountResult"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/v1/actions/zero-item-time-fields": {
|
||||
"post": {
|
||||
"security": [
|
||||
|
@ -1871,6 +1896,9 @@
|
|||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"primary": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"type": {
|
||||
"type": "string"
|
||||
},
|
||||
|
@ -1882,6 +1910,9 @@
|
|||
"repo.ItemAttachmentUpdate": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"primary": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"title": {
|
||||
"type": "string"
|
||||
},
|
||||
|
@ -1981,6 +2012,9 @@
|
|||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"imageId": {
|
||||
"type": "string"
|
||||
},
|
||||
"insured": {
|
||||
"type": "boolean"
|
||||
},
|
||||
|
@ -1996,9 +2030,13 @@
|
|||
},
|
||||
"location": {
|
||||
"description": "Edges",
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/repo.LocationSummary"
|
||||
}
|
||||
],
|
||||
"x-nullable": true,
|
||||
"x-omitempty": true,
|
||||
"$ref": "#/definitions/repo.LocationSummary"
|
||||
"x-omitempty": true
|
||||
},
|
||||
"manufacturer": {
|
||||
"type": "string"
|
||||
|
@ -2014,9 +2052,13 @@
|
|||
"type": "string"
|
||||
},
|
||||
"parent": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/repo.ItemSummary"
|
||||
}
|
||||
],
|
||||
"x-nullable": true,
|
||||
"x-omitempty": true,
|
||||
"$ref": "#/definitions/repo.ItemSummary"
|
||||
"x-omitempty": true
|
||||
},
|
||||
"purchaseFrom": {
|
||||
"type": "string"
|
||||
|
@ -2088,6 +2130,9 @@
|
|||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"imageId": {
|
||||
"type": "string"
|
||||
},
|
||||
"insured": {
|
||||
"type": "boolean"
|
||||
},
|
||||
|
@ -2099,9 +2144,13 @@
|
|||
},
|
||||
"location": {
|
||||
"description": "Edges",
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/repo.LocationSummary"
|
||||
}
|
||||
],
|
||||
"x-nullable": true,
|
||||
"x-omitempty": true,
|
||||
"$ref": "#/definitions/repo.LocationSummary"
|
||||
"x-omitempty": true
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
|
@ -2125,7 +2174,8 @@
|
|||
"type": "boolean"
|
||||
},
|
||||
"assetId": {
|
||||
"type": "string"
|
||||
"type": "string",
|
||||
"example": "0"
|
||||
},
|
||||
"description": {
|
||||
"type": "string"
|
||||
|
@ -2210,7 +2260,6 @@
|
|||
"type": "string"
|
||||
},
|
||||
"warrantyExpires": {
|
||||
"description": "Sold",
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
|
@ -2247,12 +2296,6 @@
|
|||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"items": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/repo.ItemSummary"
|
||||
}
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
|
@ -2314,12 +2357,6 @@
|
|||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"items": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/repo.ItemSummary"
|
||||
}
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
|
@ -2396,7 +2433,6 @@
|
|||
"type": "object",
|
||||
"properties": {
|
||||
"completedDate": {
|
||||
"description": "Sold",
|
||||
"type": "string"
|
||||
},
|
||||
"cost": {
|
||||
|
@ -2413,7 +2449,6 @@
|
|||
"type": "string"
|
||||
},
|
||||
"scheduledDate": {
|
||||
"description": "Sold",
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
|
@ -2425,7 +2460,6 @@
|
|||
],
|
||||
"properties": {
|
||||
"completedDate": {
|
||||
"description": "Sold",
|
||||
"type": "string"
|
||||
},
|
||||
"cost": {
|
||||
|
@ -2439,7 +2473,6 @@
|
|||
"type": "string"
|
||||
},
|
||||
"scheduledDate": {
|
||||
"description": "Sold",
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
|
@ -2448,7 +2481,6 @@
|
|||
"type": "object",
|
||||
"properties": {
|
||||
"completedDate": {
|
||||
"description": "Sold",
|
||||
"type": "string"
|
||||
},
|
||||
"cost": {
|
||||
|
@ -2462,7 +2494,6 @@
|
|||
"type": "string"
|
||||
},
|
||||
"scheduledDate": {
|
||||
"description": "Sold",
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -52,6 +52,8 @@ definitions:
|
|||
$ref: '#/definitions/repo.DocumentOut'
|
||||
id:
|
||||
type: string
|
||||
primary:
|
||||
type: boolean
|
||||
type:
|
||||
type: string
|
||||
updatedAt:
|
||||
|
@ -59,6 +61,8 @@ definitions:
|
|||
type: object
|
||||
repo.ItemAttachmentUpdate:
|
||||
properties:
|
||||
primary:
|
||||
type: boolean
|
||||
title:
|
||||
type: string
|
||||
type:
|
||||
|
@ -126,6 +130,8 @@ definitions:
|
|||
type: array
|
||||
id:
|
||||
type: string
|
||||
imageId:
|
||||
type: string
|
||||
insured:
|
||||
type: boolean
|
||||
labels:
|
||||
|
@ -136,7 +142,8 @@ definitions:
|
|||
description: Warranty
|
||||
type: boolean
|
||||
location:
|
||||
$ref: '#/definitions/repo.LocationSummary'
|
||||
allOf:
|
||||
- $ref: '#/definitions/repo.LocationSummary'
|
||||
description: Edges
|
||||
x-nullable: true
|
||||
x-omitempty: true
|
||||
|
@ -150,7 +157,8 @@ definitions:
|
|||
description: Extras
|
||||
type: string
|
||||
parent:
|
||||
$ref: '#/definitions/repo.ItemSummary'
|
||||
allOf:
|
||||
- $ref: '#/definitions/repo.ItemSummary'
|
||||
x-nullable: true
|
||||
x-omitempty: true
|
||||
purchaseFrom:
|
||||
|
@ -201,6 +209,8 @@ definitions:
|
|||
type: string
|
||||
id:
|
||||
type: string
|
||||
imageId:
|
||||
type: string
|
||||
insured:
|
||||
type: boolean
|
||||
labels:
|
||||
|
@ -208,7 +218,8 @@ definitions:
|
|||
$ref: '#/definitions/repo.LabelSummary'
|
||||
type: array
|
||||
location:
|
||||
$ref: '#/definitions/repo.LocationSummary'
|
||||
allOf:
|
||||
- $ref: '#/definitions/repo.LocationSummary'
|
||||
description: Edges
|
||||
x-nullable: true
|
||||
x-omitempty: true
|
||||
|
@ -227,6 +238,7 @@ definitions:
|
|||
archived:
|
||||
type: boolean
|
||||
assetId:
|
||||
example: "0"
|
||||
type: string
|
||||
description:
|
||||
type: string
|
||||
|
@ -287,7 +299,6 @@ definitions:
|
|||
warrantyDetails:
|
||||
type: string
|
||||
warrantyExpires:
|
||||
description: Sold
|
||||
type: string
|
||||
type: object
|
||||
repo.LabelCreate:
|
||||
|
@ -312,10 +323,6 @@ definitions:
|
|||
type: string
|
||||
id:
|
||||
type: string
|
||||
items:
|
||||
items:
|
||||
$ref: '#/definitions/repo.ItemSummary'
|
||||
type: array
|
||||
name:
|
||||
type: string
|
||||
updatedAt:
|
||||
|
@ -356,10 +363,6 @@ definitions:
|
|||
type: string
|
||||
id:
|
||||
type: string
|
||||
items:
|
||||
items:
|
||||
$ref: '#/definitions/repo.ItemSummary'
|
||||
type: array
|
||||
name:
|
||||
type: string
|
||||
parent:
|
||||
|
@ -410,7 +413,6 @@ definitions:
|
|||
repo.MaintenanceEntry:
|
||||
properties:
|
||||
completedDate:
|
||||
description: Sold
|
||||
type: string
|
||||
cost:
|
||||
example: "0"
|
||||
|
@ -422,13 +424,11 @@ definitions:
|
|||
name:
|
||||
type: string
|
||||
scheduledDate:
|
||||
description: Sold
|
||||
type: string
|
||||
type: object
|
||||
repo.MaintenanceEntryCreate:
|
||||
properties:
|
||||
completedDate:
|
||||
description: Sold
|
||||
type: string
|
||||
cost:
|
||||
example: "0"
|
||||
|
@ -438,7 +438,6 @@ definitions:
|
|||
name:
|
||||
type: string
|
||||
scheduledDate:
|
||||
description: Sold
|
||||
type: string
|
||||
required:
|
||||
- name
|
||||
|
@ -446,7 +445,6 @@ definitions:
|
|||
repo.MaintenanceEntryUpdate:
|
||||
properties:
|
||||
completedDate:
|
||||
description: Sold
|
||||
type: string
|
||||
cost:
|
||||
example: "0"
|
||||
|
@ -456,7 +454,6 @@ definitions:
|
|||
name:
|
||||
type: string
|
||||
scheduledDate:
|
||||
description: Sold
|
||||
type: string
|
||||
type: object
|
||||
repo.MaintenanceLog:
|
||||
|
@ -742,6 +739,21 @@ paths:
|
|||
summary: Ensures Import Refs
|
||||
tags:
|
||||
- Actions
|
||||
/v1/actions/set-primary-photos:
|
||||
post:
|
||||
description: Sets the first photo of each item as the primary photo
|
||||
produces:
|
||||
- application/json
|
||||
responses:
|
||||
"200":
|
||||
description: OK
|
||||
schema:
|
||||
$ref: '#/definitions/v1.ActionAmountResult'
|
||||
security:
|
||||
- Bearer: []
|
||||
summary: Set Primary Photos
|
||||
tags:
|
||||
- Actions
|
||||
/v1/actions/zero-item-time-fields:
|
||||
post:
|
||||
description: Resets all item date fields to the beginning of the day
|
||||
|
|
|
@ -3,38 +3,38 @@ module github.com/hay-kot/homebox/backend
|
|||
go 1.20
|
||||
|
||||
require (
|
||||
ariga.io/atlas v0.12.0
|
||||
entgo.io/ent v0.12.3
|
||||
github.com/ardanlabs/conf/v3 v3.1.6
|
||||
github.com/containrrr/shoutrrr v0.7.1
|
||||
ariga.io/atlas v0.15.0
|
||||
entgo.io/ent v0.12.5
|
||||
github.com/ardanlabs/conf/v3 v3.1.7
|
||||
github.com/containrrr/shoutrrr v0.8.0
|
||||
github.com/go-chi/chi/v5 v5.0.10
|
||||
github.com/go-playground/validator/v10 v10.14.1
|
||||
github.com/gocarina/gocsv v0.0.0-20230616125104-99d496ca653d
|
||||
github.com/google/uuid v1.3.0
|
||||
github.com/gorilla/schema v1.2.0
|
||||
github.com/hay-kot/httpkit v0.0.3
|
||||
github.com/mattn/go-sqlite3 v1.14.17
|
||||
github.com/go-playground/validator/v10 v10.16.0
|
||||
github.com/gocarina/gocsv v0.0.0-20231116093920-b87c2d0e983a
|
||||
github.com/google/uuid v1.4.0
|
||||
github.com/gorilla/schema v1.2.1
|
||||
github.com/hay-kot/httpkit v0.0.5
|
||||
github.com/mattn/go-sqlite3 v1.14.18
|
||||
github.com/olahol/melody v1.1.4
|
||||
github.com/pkg/errors v0.9.1
|
||||
github.com/rs/zerolog v1.29.1
|
||||
github.com/rs/zerolog v1.31.0
|
||||
github.com/stretchr/testify v1.8.4
|
||||
github.com/swaggo/http-swagger v1.3.4
|
||||
github.com/swaggo/swag v1.16.1
|
||||
github.com/swaggo/swag v1.16.2
|
||||
github.com/yeqown/go-qrcode/v2 v2.2.2
|
||||
github.com/yeqown/go-qrcode/writer/standard v1.2.1
|
||||
golang.org/x/crypto v0.11.0
|
||||
modernc.org/sqlite v1.24.0
|
||||
github.com/yeqown/go-qrcode/writer/standard v1.2.2
|
||||
golang.org/x/crypto v0.16.0
|
||||
modernc.org/sqlite v1.27.0
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/KyleBanks/depth v1.2.1 // indirect
|
||||
github.com/agext/levenshtein v1.2.3 // indirect
|
||||
github.com/apparentlymart/go-textseg/v13 v13.0.0 // indirect
|
||||
github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect
|
||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||
github.com/dustin/go-humanize v1.0.1 // indirect
|
||||
github.com/fatih/color v1.15.0 // indirect
|
||||
github.com/fatih/color v1.16.0 // indirect
|
||||
github.com/fogleman/gg v1.3.0 // indirect
|
||||
github.com/gabriel-vasile/mimetype v1.4.2 // indirect
|
||||
github.com/gabriel-vasile/mimetype v1.4.3 // indirect
|
||||
github.com/go-openapi/inflect v0.19.0 // indirect
|
||||
github.com/go-openapi/jsonpointer v0.20.0 // indirect
|
||||
github.com/go-openapi/jsonreference v0.20.2 // indirect
|
||||
|
@ -43,35 +43,36 @@ require (
|
|||
github.com/go-playground/locales v0.14.1 // indirect
|
||||
github.com/go-playground/universal-translator v0.18.1 // indirect
|
||||
github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0 // indirect
|
||||
github.com/google/go-cmp v0.5.9 // indirect
|
||||
github.com/gorilla/websocket v1.5.0 // indirect
|
||||
github.com/hashicorp/hcl/v2 v2.17.0 // indirect
|
||||
github.com/google/go-cmp v0.6.0 // indirect
|
||||
github.com/gorilla/websocket v1.5.1 // indirect
|
||||
github.com/hashicorp/hcl/v2 v2.19.1 // indirect
|
||||
github.com/josharian/intern v1.0.0 // indirect
|
||||
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 // indirect
|
||||
github.com/leodido/go-urn v1.2.4 // indirect
|
||||
github.com/mailru/easyjson v0.7.7 // indirect
|
||||
github.com/mattn/go-colorable v0.1.13 // indirect
|
||||
github.com/mattn/go-isatty v0.0.19 // indirect
|
||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||
github.com/mitchellh/go-wordwrap v1.0.1 // indirect
|
||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
|
||||
github.com/rogpeppe/go-internal v1.11.0 // indirect
|
||||
github.com/swaggo/files v1.0.1 // indirect
|
||||
github.com/yeqown/reedsolomon v1.0.0 // indirect
|
||||
github.com/zclconf/go-cty v1.13.2 // indirect
|
||||
golang.org/x/image v0.9.0 // indirect
|
||||
golang.org/x/mod v0.12.0 // indirect
|
||||
golang.org/x/net v0.12.0 // indirect
|
||||
golang.org/x/sys v0.10.0 // indirect
|
||||
golang.org/x/text v0.11.0 // indirect
|
||||
golang.org/x/tools v0.11.0 // indirect
|
||||
github.com/zclconf/go-cty v1.14.1 // indirect
|
||||
golang.org/x/image v0.14.0 // indirect
|
||||
golang.org/x/mod v0.14.0 // indirect
|
||||
golang.org/x/net v0.18.0 // indirect
|
||||
golang.org/x/sys v0.15.0 // indirect
|
||||
golang.org/x/text v0.14.0 // indirect
|
||||
golang.org/x/tools v0.15.0 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||
lukechampine.com/uint128 v1.3.0 // indirect
|
||||
modernc.org/cc/v3 v3.41.0 // indirect
|
||||
modernc.org/ccgo/v3 v3.16.14 // indirect
|
||||
modernc.org/libc v1.24.1 // indirect
|
||||
modernc.org/ccgo/v3 v3.16.15 // indirect
|
||||
modernc.org/libc v1.34.4 // indirect
|
||||
modernc.org/mathutil v1.6.0 // indirect
|
||||
modernc.org/memory v1.6.0 // indirect
|
||||
modernc.org/memory v1.7.2 // indirect
|
||||
modernc.org/opt v0.1.3 // indirect
|
||||
modernc.org/strutil v1.1.3 // indirect
|
||||
modernc.org/strutil v1.2.0 // indirect
|
||||
modernc.org/token v1.1.0 // indirect
|
||||
)
|
||||
|
|
1215
backend/go.sum
1215
backend/go.sum
File diff suppressed because it is too large
Load diff
|
@ -38,7 +38,7 @@ func New() *EventBus {
|
|||
subscribers: map[Event][]func(any){
|
||||
EventLabelMutation: {},
|
||||
EventLocationMutation: {},
|
||||
EventItemMutation: {},
|
||||
EventItemMutation: {},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,7 +23,7 @@ func (svc *ItemService) AttachmentPath(ctx context.Context, attachmentId uuid.UU
|
|||
|
||||
func (svc *ItemService) AttachmentUpdate(ctx Context, itemId uuid.UUID, data *repo.ItemAttachmentUpdate) (repo.ItemOut, error) {
|
||||
// Update Attachment
|
||||
attachment, err := svc.repo.Attachments.Update(ctx, data.ID, attachment.Type(data.Type))
|
||||
attachment, err := svc.repo.Attachments.Update(ctx, data.ID, data)
|
||||
if err != nil {
|
||||
return repo.ItemOut{}, err
|
||||
}
|
||||
|
|
|
@ -26,6 +26,8 @@ type Attachment struct {
|
|||
UpdatedAt time.Time `json:"updated_at,omitempty"`
|
||||
// Type holds the value of the "type" field.
|
||||
Type attachment.Type `json:"type,omitempty"`
|
||||
// Primary holds the value of the "primary" field.
|
||||
Primary bool `json:"primary,omitempty"`
|
||||
// Edges holds the relations/edges for other nodes in the graph.
|
||||
// The values are being populated by the AttachmentQuery when eager-loading is set.
|
||||
Edges AttachmentEdges `json:"edges"`
|
||||
|
@ -76,6 +78,8 @@ func (*Attachment) scanValues(columns []string) ([]any, error) {
|
|||
values := make([]any, len(columns))
|
||||
for i := range columns {
|
||||
switch columns[i] {
|
||||
case attachment.FieldPrimary:
|
||||
values[i] = new(sql.NullBool)
|
||||
case attachment.FieldType:
|
||||
values[i] = new(sql.NullString)
|
||||
case attachment.FieldCreatedAt, attachment.FieldUpdatedAt:
|
||||
|
@ -125,6 +129,12 @@ func (a *Attachment) assignValues(columns []string, values []any) error {
|
|||
} else if value.Valid {
|
||||
a.Type = attachment.Type(value.String)
|
||||
}
|
||||
case attachment.FieldPrimary:
|
||||
if value, ok := values[i].(*sql.NullBool); !ok {
|
||||
return fmt.Errorf("unexpected type %T for field primary", values[i])
|
||||
} else if value.Valid {
|
||||
a.Primary = value.Bool
|
||||
}
|
||||
case attachment.ForeignKeys[0]:
|
||||
if value, ok := values[i].(*sql.NullScanner); !ok {
|
||||
return fmt.Errorf("unexpected type %T for field document_attachments", values[i])
|
||||
|
@ -193,6 +203,9 @@ func (a *Attachment) String() string {
|
|||
builder.WriteString(", ")
|
||||
builder.WriteString("type=")
|
||||
builder.WriteString(fmt.Sprintf("%v", a.Type))
|
||||
builder.WriteString(", ")
|
||||
builder.WriteString("primary=")
|
||||
builder.WriteString(fmt.Sprintf("%v", a.Primary))
|
||||
builder.WriteByte(')')
|
||||
return builder.String()
|
||||
}
|
||||
|
|
|
@ -22,6 +22,8 @@ const (
|
|||
FieldUpdatedAt = "updated_at"
|
||||
// FieldType holds the string denoting the type field in the database.
|
||||
FieldType = "type"
|
||||
// FieldPrimary holds the string denoting the primary field in the database.
|
||||
FieldPrimary = "primary"
|
||||
// EdgeItem holds the string denoting the item edge name in mutations.
|
||||
EdgeItem = "item"
|
||||
// EdgeDocument holds the string denoting the document edge name in mutations.
|
||||
|
@ -50,6 +52,7 @@ var Columns = []string{
|
|||
FieldCreatedAt,
|
||||
FieldUpdatedAt,
|
||||
FieldType,
|
||||
FieldPrimary,
|
||||
}
|
||||
|
||||
// ForeignKeys holds the SQL foreign-keys that are owned by the "attachments"
|
||||
|
@ -81,6 +84,8 @@ var (
|
|||
DefaultUpdatedAt func() time.Time
|
||||
// UpdateDefaultUpdatedAt holds the default value on update for the "updated_at" field.
|
||||
UpdateDefaultUpdatedAt func() time.Time
|
||||
// DefaultPrimary holds the default value on creation for the "primary" field.
|
||||
DefaultPrimary bool
|
||||
// DefaultID holds the default value on creation for the "id" field.
|
||||
DefaultID func() uuid.UUID
|
||||
)
|
||||
|
@ -137,6 +142,11 @@ func ByType(opts ...sql.OrderTermOption) OrderOption {
|
|||
return sql.OrderByField(FieldType, opts...).ToFunc()
|
||||
}
|
||||
|
||||
// ByPrimary orders the results by the primary field.
|
||||
func ByPrimary(opts ...sql.OrderTermOption) OrderOption {
|
||||
return sql.OrderByField(FieldPrimary, opts...).ToFunc()
|
||||
}
|
||||
|
||||
// ByItemField orders the results by item field.
|
||||
func ByItemField(field string, opts ...sql.OrderTermOption) OrderOption {
|
||||
return func(s *sql.Selector) {
|
||||
|
|
|
@ -66,6 +66,11 @@ func UpdatedAt(v time.Time) predicate.Attachment {
|
|||
return predicate.Attachment(sql.FieldEQ(FieldUpdatedAt, v))
|
||||
}
|
||||
|
||||
// Primary applies equality check predicate on the "primary" field. It's identical to PrimaryEQ.
|
||||
func Primary(v bool) predicate.Attachment {
|
||||
return predicate.Attachment(sql.FieldEQ(FieldPrimary, v))
|
||||
}
|
||||
|
||||
// CreatedAtEQ applies the EQ predicate on the "created_at" field.
|
||||
func CreatedAtEQ(v time.Time) predicate.Attachment {
|
||||
return predicate.Attachment(sql.FieldEQ(FieldCreatedAt, v))
|
||||
|
@ -166,6 +171,16 @@ func TypeNotIn(vs ...Type) predicate.Attachment {
|
|||
return predicate.Attachment(sql.FieldNotIn(FieldType, vs...))
|
||||
}
|
||||
|
||||
// PrimaryEQ applies the EQ predicate on the "primary" field.
|
||||
func PrimaryEQ(v bool) predicate.Attachment {
|
||||
return predicate.Attachment(sql.FieldEQ(FieldPrimary, v))
|
||||
}
|
||||
|
||||
// PrimaryNEQ applies the NEQ predicate on the "primary" field.
|
||||
func PrimaryNEQ(v bool) predicate.Attachment {
|
||||
return predicate.Attachment(sql.FieldNEQ(FieldPrimary, v))
|
||||
}
|
||||
|
||||
// HasItem applies the HasEdge predicate on the "item" edge.
|
||||
func HasItem() predicate.Attachment {
|
||||
return predicate.Attachment(func(s *sql.Selector) {
|
||||
|
@ -214,32 +229,15 @@ func HasDocumentWith(preds ...predicate.Document) predicate.Attachment {
|
|||
|
||||
// And groups predicates with the AND operator between them.
|
||||
func And(predicates ...predicate.Attachment) predicate.Attachment {
|
||||
return predicate.Attachment(func(s *sql.Selector) {
|
||||
s1 := s.Clone().SetP(nil)
|
||||
for _, p := range predicates {
|
||||
p(s1)
|
||||
}
|
||||
s.Where(s1.P())
|
||||
})
|
||||
return predicate.Attachment(sql.AndPredicates(predicates...))
|
||||
}
|
||||
|
||||
// Or groups predicates with the OR operator between them.
|
||||
func Or(predicates ...predicate.Attachment) predicate.Attachment {
|
||||
return predicate.Attachment(func(s *sql.Selector) {
|
||||
s1 := s.Clone().SetP(nil)
|
||||
for i, p := range predicates {
|
||||
if i > 0 {
|
||||
s1.Or()
|
||||
}
|
||||
p(s1)
|
||||
}
|
||||
s.Where(s1.P())
|
||||
})
|
||||
return predicate.Attachment(sql.OrPredicates(predicates...))
|
||||
}
|
||||
|
||||
// Not applies the not operator on the given predicate.
|
||||
func Not(p predicate.Attachment) predicate.Attachment {
|
||||
return predicate.Attachment(func(s *sql.Selector) {
|
||||
p(s.Not())
|
||||
})
|
||||
return predicate.Attachment(sql.NotPredicates(p))
|
||||
}
|
||||
|
|
|
@ -65,6 +65,20 @@ func (ac *AttachmentCreate) SetNillableType(a *attachment.Type) *AttachmentCreat
|
|||
return ac
|
||||
}
|
||||
|
||||
// SetPrimary sets the "primary" field.
|
||||
func (ac *AttachmentCreate) SetPrimary(b bool) *AttachmentCreate {
|
||||
ac.mutation.SetPrimary(b)
|
||||
return ac
|
||||
}
|
||||
|
||||
// SetNillablePrimary sets the "primary" field if the given value is not nil.
|
||||
func (ac *AttachmentCreate) SetNillablePrimary(b *bool) *AttachmentCreate {
|
||||
if b != nil {
|
||||
ac.SetPrimary(*b)
|
||||
}
|
||||
return ac
|
||||
}
|
||||
|
||||
// SetID sets the "id" field.
|
||||
func (ac *AttachmentCreate) SetID(u uuid.UUID) *AttachmentCreate {
|
||||
ac.mutation.SetID(u)
|
||||
|
@ -148,6 +162,10 @@ func (ac *AttachmentCreate) defaults() {
|
|||
v := attachment.DefaultType
|
||||
ac.mutation.SetType(v)
|
||||
}
|
||||
if _, ok := ac.mutation.Primary(); !ok {
|
||||
v := attachment.DefaultPrimary
|
||||
ac.mutation.SetPrimary(v)
|
||||
}
|
||||
if _, ok := ac.mutation.ID(); !ok {
|
||||
v := attachment.DefaultID()
|
||||
ac.mutation.SetID(v)
|
||||
|
@ -170,6 +188,9 @@ func (ac *AttachmentCreate) check() error {
|
|||
return &ValidationError{Name: "type", err: fmt.Errorf(`ent: validator failed for field "Attachment.type": %w`, err)}
|
||||
}
|
||||
}
|
||||
if _, ok := ac.mutation.Primary(); !ok {
|
||||
return &ValidationError{Name: "primary", err: errors.New(`ent: missing required field "Attachment.primary"`)}
|
||||
}
|
||||
if _, ok := ac.mutation.ItemID(); !ok {
|
||||
return &ValidationError{Name: "item", err: errors.New(`ent: missing required edge "Attachment.item"`)}
|
||||
}
|
||||
|
@ -223,6 +244,10 @@ func (ac *AttachmentCreate) createSpec() (*Attachment, *sqlgraph.CreateSpec) {
|
|||
_spec.SetField(attachment.FieldType, field.TypeEnum, value)
|
||||
_node.Type = value
|
||||
}
|
||||
if value, ok := ac.mutation.Primary(); ok {
|
||||
_spec.SetField(attachment.FieldPrimary, field.TypeBool, value)
|
||||
_node.Primary = value
|
||||
}
|
||||
if nodes := ac.mutation.ItemIDs(); len(nodes) > 0 {
|
||||
edge := &sqlgraph.EdgeSpec{
|
||||
Rel: sqlgraph.M2O,
|
||||
|
@ -263,11 +288,15 @@ func (ac *AttachmentCreate) createSpec() (*Attachment, *sqlgraph.CreateSpec) {
|
|||
// AttachmentCreateBulk is the builder for creating many Attachment entities in bulk.
|
||||
type AttachmentCreateBulk struct {
|
||||
config
|
||||
err error
|
||||
builders []*AttachmentCreate
|
||||
}
|
||||
|
||||
// Save creates the Attachment entities in the database.
|
||||
func (acb *AttachmentCreateBulk) Save(ctx context.Context) ([]*Attachment, error) {
|
||||
if acb.err != nil {
|
||||
return nil, acb.err
|
||||
}
|
||||
specs := make([]*sqlgraph.CreateSpec, len(acb.builders))
|
||||
nodes := make([]*Attachment, len(acb.builders))
|
||||
mutators := make([]Mutator, len(acb.builders))
|
||||
|
|
|
@ -51,6 +51,20 @@ func (au *AttachmentUpdate) SetNillableType(a *attachment.Type) *AttachmentUpdat
|
|||
return au
|
||||
}
|
||||
|
||||
// SetPrimary sets the "primary" field.
|
||||
func (au *AttachmentUpdate) SetPrimary(b bool) *AttachmentUpdate {
|
||||
au.mutation.SetPrimary(b)
|
||||
return au
|
||||
}
|
||||
|
||||
// SetNillablePrimary sets the "primary" field if the given value is not nil.
|
||||
func (au *AttachmentUpdate) SetNillablePrimary(b *bool) *AttachmentUpdate {
|
||||
if b != nil {
|
||||
au.SetPrimary(*b)
|
||||
}
|
||||
return au
|
||||
}
|
||||
|
||||
// SetItemID sets the "item" edge to the Item entity by ID.
|
||||
func (au *AttachmentUpdate) SetItemID(id uuid.UUID) *AttachmentUpdate {
|
||||
au.mutation.SetItemID(id)
|
||||
|
@ -160,6 +174,9 @@ func (au *AttachmentUpdate) sqlSave(ctx context.Context) (n int, err error) {
|
|||
if value, ok := au.mutation.GetType(); ok {
|
||||
_spec.SetField(attachment.FieldType, field.TypeEnum, value)
|
||||
}
|
||||
if value, ok := au.mutation.Primary(); ok {
|
||||
_spec.SetField(attachment.FieldPrimary, field.TypeBool, value)
|
||||
}
|
||||
if au.mutation.ItemCleared() {
|
||||
edge := &sqlgraph.EdgeSpec{
|
||||
Rel: sqlgraph.M2O,
|
||||
|
@ -258,6 +275,20 @@ func (auo *AttachmentUpdateOne) SetNillableType(a *attachment.Type) *AttachmentU
|
|||
return auo
|
||||
}
|
||||
|
||||
// SetPrimary sets the "primary" field.
|
||||
func (auo *AttachmentUpdateOne) SetPrimary(b bool) *AttachmentUpdateOne {
|
||||
auo.mutation.SetPrimary(b)
|
||||
return auo
|
||||
}
|
||||
|
||||
// SetNillablePrimary sets the "primary" field if the given value is not nil.
|
||||
func (auo *AttachmentUpdateOne) SetNillablePrimary(b *bool) *AttachmentUpdateOne {
|
||||
if b != nil {
|
||||
auo.SetPrimary(*b)
|
||||
}
|
||||
return auo
|
||||
}
|
||||
|
||||
// SetItemID sets the "item" edge to the Item entity by ID.
|
||||
func (auo *AttachmentUpdateOne) SetItemID(id uuid.UUID) *AttachmentUpdateOne {
|
||||
auo.mutation.SetItemID(id)
|
||||
|
@ -397,6 +428,9 @@ func (auo *AttachmentUpdateOne) sqlSave(ctx context.Context) (_node *Attachment,
|
|||
if value, ok := auo.mutation.GetType(); ok {
|
||||
_spec.SetField(attachment.FieldType, field.TypeEnum, value)
|
||||
}
|
||||
if value, ok := auo.mutation.Primary(); ok {
|
||||
_spec.SetField(attachment.FieldPrimary, field.TypeBool, value)
|
||||
}
|
||||
if auo.mutation.ItemCleared() {
|
||||
edge := &sqlgraph.EdgeSpec{
|
||||
Rel: sqlgraph.M2O,
|
||||
|
|
|
@ -98,32 +98,15 @@ func HasTokenWith(preds ...predicate.AuthTokens) predicate.AuthRoles {
|
|||
|
||||
// And groups predicates with the AND operator between them.
|
||||
func And(predicates ...predicate.AuthRoles) predicate.AuthRoles {
|
||||
return predicate.AuthRoles(func(s *sql.Selector) {
|
||||
s1 := s.Clone().SetP(nil)
|
||||
for _, p := range predicates {
|
||||
p(s1)
|
||||
}
|
||||
s.Where(s1.P())
|
||||
})
|
||||
return predicate.AuthRoles(sql.AndPredicates(predicates...))
|
||||
}
|
||||
|
||||
// Or groups predicates with the OR operator between them.
|
||||
func Or(predicates ...predicate.AuthRoles) predicate.AuthRoles {
|
||||
return predicate.AuthRoles(func(s *sql.Selector) {
|
||||
s1 := s.Clone().SetP(nil)
|
||||
for i, p := range predicates {
|
||||
if i > 0 {
|
||||
s1.Or()
|
||||
}
|
||||
p(s1)
|
||||
}
|
||||
s.Where(s1.P())
|
||||
})
|
||||
return predicate.AuthRoles(sql.OrPredicates(predicates...))
|
||||
}
|
||||
|
||||
// Not applies the not operator on the given predicate.
|
||||
func Not(p predicate.AuthRoles) predicate.AuthRoles {
|
||||
return predicate.AuthRoles(func(s *sql.Selector) {
|
||||
p(s.Not())
|
||||
})
|
||||
return predicate.AuthRoles(sql.NotPredicates(p))
|
||||
}
|
||||
|
|
|
@ -158,11 +158,15 @@ func (arc *AuthRolesCreate) createSpec() (*AuthRoles, *sqlgraph.CreateSpec) {
|
|||
// AuthRolesCreateBulk is the builder for creating many AuthRoles entities in bulk.
|
||||
type AuthRolesCreateBulk struct {
|
||||
config
|
||||
err error
|
||||
builders []*AuthRolesCreate
|
||||
}
|
||||
|
||||
// Save creates the AuthRoles entities in the database.
|
||||
func (arcb *AuthRolesCreateBulk) Save(ctx context.Context) ([]*AuthRoles, error) {
|
||||
if arcb.err != nil {
|
||||
return nil, arcb.err
|
||||
}
|
||||
specs := make([]*sqlgraph.CreateSpec, len(arcb.builders))
|
||||
nodes := make([]*AuthRoles, len(arcb.builders))
|
||||
mutators := make([]Mutator, len(arcb.builders))
|
||||
|
|
|
@ -284,32 +284,15 @@ func HasRolesWith(preds ...predicate.AuthRoles) predicate.AuthTokens {
|
|||
|
||||
// And groups predicates with the AND operator between them.
|
||||
func And(predicates ...predicate.AuthTokens) predicate.AuthTokens {
|
||||
return predicate.AuthTokens(func(s *sql.Selector) {
|
||||
s1 := s.Clone().SetP(nil)
|
||||
for _, p := range predicates {
|
||||
p(s1)
|
||||
}
|
||||
s.Where(s1.P())
|
||||
})
|
||||
return predicate.AuthTokens(sql.AndPredicates(predicates...))
|
||||
}
|
||||
|
||||
// Or groups predicates with the OR operator between them.
|
||||
func Or(predicates ...predicate.AuthTokens) predicate.AuthTokens {
|
||||
return predicate.AuthTokens(func(s *sql.Selector) {
|
||||
s1 := s.Clone().SetP(nil)
|
||||
for i, p := range predicates {
|
||||
if i > 0 {
|
||||
s1.Or()
|
||||
}
|
||||
p(s1)
|
||||
}
|
||||
s.Where(s1.P())
|
||||
})
|
||||
return predicate.AuthTokens(sql.OrPredicates(predicates...))
|
||||
}
|
||||
|
||||
// Not applies the not operator on the given predicate.
|
||||
func Not(p predicate.AuthTokens) predicate.AuthTokens {
|
||||
return predicate.AuthTokens(func(s *sql.Selector) {
|
||||
p(s.Not())
|
||||
})
|
||||
return predicate.AuthTokens(sql.NotPredicates(p))
|
||||
}
|
||||
|
|
|
@ -280,11 +280,15 @@ func (atc *AuthTokensCreate) createSpec() (*AuthTokens, *sqlgraph.CreateSpec) {
|
|||
// AuthTokensCreateBulk is the builder for creating many AuthTokens entities in bulk.
|
||||
type AuthTokensCreateBulk struct {
|
||||
config
|
||||
err error
|
||||
builders []*AuthTokensCreate
|
||||
}
|
||||
|
||||
// Save creates the AuthTokens entities in the database.
|
||||
func (atcb *AuthTokensCreateBulk) Save(ctx context.Context) ([]*AuthTokens, error) {
|
||||
if atcb.err != nil {
|
||||
return nil, atcb.err
|
||||
}
|
||||
specs := make([]*sqlgraph.CreateSpec, len(atcb.builders))
|
||||
nodes := make([]*AuthTokens, len(atcb.builders))
|
||||
mutators := make([]Mutator, len(atcb.builders))
|
||||
|
|
|
@ -7,6 +7,7 @@ import (
|
|||
"errors"
|
||||
"fmt"
|
||||
"log"
|
||||
"reflect"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/migrate"
|
||||
|
@ -65,9 +66,7 @@ type Client struct {
|
|||
|
||||
// NewClient creates a new client configured with the given options.
|
||||
func NewClient(opts ...Option) *Client {
|
||||
cfg := config{log: log.Println, hooks: &hooks{}, inters: &inters{}}
|
||||
cfg.options(opts...)
|
||||
client := &Client{config: cfg}
|
||||
client := &Client{config: newConfig(opts...)}
|
||||
client.init()
|
||||
return client
|
||||
}
|
||||
|
@ -107,6 +106,13 @@ type (
|
|||
Option func(*config)
|
||||
)
|
||||
|
||||
// newConfig creates a new config for the client.
|
||||
func newConfig(opts ...Option) config {
|
||||
cfg := config{log: log.Println, hooks: &hooks{}, inters: &inters{}}
|
||||
cfg.options(opts...)
|
||||
return cfg
|
||||
}
|
||||
|
||||
// options applies the options on the config object.
|
||||
func (c *config) options(opts ...Option) {
|
||||
for _, opt := range opts {
|
||||
|
@ -154,11 +160,14 @@ func Open(driverName, dataSourceName string, options ...Option) (*Client, error)
|
|||
}
|
||||
}
|
||||
|
||||
// ErrTxStarted is returned when trying to start a new transaction from a transactional client.
|
||||
var ErrTxStarted = errors.New("ent: cannot start a transaction within a transaction")
|
||||
|
||||
// Tx returns a new transactional client. The provided context
|
||||
// is used until the transaction is committed or rolled back.
|
||||
func (c *Client) Tx(ctx context.Context) (*Tx, error) {
|
||||
if _, ok := c.driver.(*txDriver); ok {
|
||||
return nil, errors.New("ent: cannot start a transaction within a transaction")
|
||||
return nil, ErrTxStarted
|
||||
}
|
||||
tx, err := newTx(ctx, c.driver)
|
||||
if err != nil {
|
||||
|
@ -330,6 +339,21 @@ func (c *AttachmentClient) CreateBulk(builders ...*AttachmentCreate) *Attachment
|
|||
return &AttachmentCreateBulk{config: c.config, builders: builders}
|
||||
}
|
||||
|
||||
// MapCreateBulk creates a bulk creation builder from the given slice. For each item in the slice, the function creates
|
||||
// a builder and applies setFunc on it.
|
||||
func (c *AttachmentClient) MapCreateBulk(slice any, setFunc func(*AttachmentCreate, int)) *AttachmentCreateBulk {
|
||||
rv := reflect.ValueOf(slice)
|
||||
if rv.Kind() != reflect.Slice {
|
||||
return &AttachmentCreateBulk{err: fmt.Errorf("calling to AttachmentClient.MapCreateBulk with wrong type %T, need slice", slice)}
|
||||
}
|
||||
builders := make([]*AttachmentCreate, rv.Len())
|
||||
for i := 0; i < rv.Len(); i++ {
|
||||
builders[i] = c.Create()
|
||||
setFunc(builders[i], i)
|
||||
}
|
||||
return &AttachmentCreateBulk{config: c.config, builders: builders}
|
||||
}
|
||||
|
||||
// Update returns an update builder for Attachment.
|
||||
func (c *AttachmentClient) Update() *AttachmentUpdate {
|
||||
mutation := newAttachmentMutation(c.config, OpUpdate)
|
||||
|
@ -480,6 +504,21 @@ func (c *AuthRolesClient) CreateBulk(builders ...*AuthRolesCreate) *AuthRolesCre
|
|||
return &AuthRolesCreateBulk{config: c.config, builders: builders}
|
||||
}
|
||||
|
||||
// MapCreateBulk creates a bulk creation builder from the given slice. For each item in the slice, the function creates
|
||||
// a builder and applies setFunc on it.
|
||||
func (c *AuthRolesClient) MapCreateBulk(slice any, setFunc func(*AuthRolesCreate, int)) *AuthRolesCreateBulk {
|
||||
rv := reflect.ValueOf(slice)
|
||||
if rv.Kind() != reflect.Slice {
|
||||
return &AuthRolesCreateBulk{err: fmt.Errorf("calling to AuthRolesClient.MapCreateBulk with wrong type %T, need slice", slice)}
|
||||
}
|
||||
builders := make([]*AuthRolesCreate, rv.Len())
|
||||
for i := 0; i < rv.Len(); i++ {
|
||||
builders[i] = c.Create()
|
||||
setFunc(builders[i], i)
|
||||
}
|
||||
return &AuthRolesCreateBulk{config: c.config, builders: builders}
|
||||
}
|
||||
|
||||
// Update returns an update builder for AuthRoles.
|
||||
func (c *AuthRolesClient) Update() *AuthRolesUpdate {
|
||||
mutation := newAuthRolesMutation(c.config, OpUpdate)
|
||||
|
@ -614,6 +653,21 @@ func (c *AuthTokensClient) CreateBulk(builders ...*AuthTokensCreate) *AuthTokens
|
|||
return &AuthTokensCreateBulk{config: c.config, builders: builders}
|
||||
}
|
||||
|
||||
// MapCreateBulk creates a bulk creation builder from the given slice. For each item in the slice, the function creates
|
||||
// a builder and applies setFunc on it.
|
||||
func (c *AuthTokensClient) MapCreateBulk(slice any, setFunc func(*AuthTokensCreate, int)) *AuthTokensCreateBulk {
|
||||
rv := reflect.ValueOf(slice)
|
||||
if rv.Kind() != reflect.Slice {
|
||||
return &AuthTokensCreateBulk{err: fmt.Errorf("calling to AuthTokensClient.MapCreateBulk with wrong type %T, need slice", slice)}
|
||||
}
|
||||
builders := make([]*AuthTokensCreate, rv.Len())
|
||||
for i := 0; i < rv.Len(); i++ {
|
||||
builders[i] = c.Create()
|
||||
setFunc(builders[i], i)
|
||||
}
|
||||
return &AuthTokensCreateBulk{config: c.config, builders: builders}
|
||||
}
|
||||
|
||||
// Update returns an update builder for AuthTokens.
|
||||
func (c *AuthTokensClient) Update() *AuthTokensUpdate {
|
||||
mutation := newAuthTokensMutation(c.config, OpUpdate)
|
||||
|
@ -764,6 +818,21 @@ func (c *DocumentClient) CreateBulk(builders ...*DocumentCreate) *DocumentCreate
|
|||
return &DocumentCreateBulk{config: c.config, builders: builders}
|
||||
}
|
||||
|
||||
// MapCreateBulk creates a bulk creation builder from the given slice. For each item in the slice, the function creates
|
||||
// a builder and applies setFunc on it.
|
||||
func (c *DocumentClient) MapCreateBulk(slice any, setFunc func(*DocumentCreate, int)) *DocumentCreateBulk {
|
||||
rv := reflect.ValueOf(slice)
|
||||
if rv.Kind() != reflect.Slice {
|
||||
return &DocumentCreateBulk{err: fmt.Errorf("calling to DocumentClient.MapCreateBulk with wrong type %T, need slice", slice)}
|
||||
}
|
||||
builders := make([]*DocumentCreate, rv.Len())
|
||||
for i := 0; i < rv.Len(); i++ {
|
||||
builders[i] = c.Create()
|
||||
setFunc(builders[i], i)
|
||||
}
|
||||
return &DocumentCreateBulk{config: c.config, builders: builders}
|
||||
}
|
||||
|
||||
// Update returns an update builder for Document.
|
||||
func (c *DocumentClient) Update() *DocumentUpdate {
|
||||
mutation := newDocumentMutation(c.config, OpUpdate)
|
||||
|
@ -914,6 +983,21 @@ func (c *GroupClient) CreateBulk(builders ...*GroupCreate) *GroupCreateBulk {
|
|||
return &GroupCreateBulk{config: c.config, builders: builders}
|
||||
}
|
||||
|
||||
// MapCreateBulk creates a bulk creation builder from the given slice. For each item in the slice, the function creates
|
||||
// a builder and applies setFunc on it.
|
||||
func (c *GroupClient) MapCreateBulk(slice any, setFunc func(*GroupCreate, int)) *GroupCreateBulk {
|
||||
rv := reflect.ValueOf(slice)
|
||||
if rv.Kind() != reflect.Slice {
|
||||
return &GroupCreateBulk{err: fmt.Errorf("calling to GroupClient.MapCreateBulk with wrong type %T, need slice", slice)}
|
||||
}
|
||||
builders := make([]*GroupCreate, rv.Len())
|
||||
for i := 0; i < rv.Len(); i++ {
|
||||
builders[i] = c.Create()
|
||||
setFunc(builders[i], i)
|
||||
}
|
||||
return &GroupCreateBulk{config: c.config, builders: builders}
|
||||
}
|
||||
|
||||
// Update returns an update builder for Group.
|
||||
func (c *GroupClient) Update() *GroupUpdate {
|
||||
mutation := newGroupMutation(c.config, OpUpdate)
|
||||
|
@ -1144,6 +1228,21 @@ func (c *GroupInvitationTokenClient) CreateBulk(builders ...*GroupInvitationToke
|
|||
return &GroupInvitationTokenCreateBulk{config: c.config, builders: builders}
|
||||
}
|
||||
|
||||
// MapCreateBulk creates a bulk creation builder from the given slice. For each item in the slice, the function creates
|
||||
// a builder and applies setFunc on it.
|
||||
func (c *GroupInvitationTokenClient) MapCreateBulk(slice any, setFunc func(*GroupInvitationTokenCreate, int)) *GroupInvitationTokenCreateBulk {
|
||||
rv := reflect.ValueOf(slice)
|
||||
if rv.Kind() != reflect.Slice {
|
||||
return &GroupInvitationTokenCreateBulk{err: fmt.Errorf("calling to GroupInvitationTokenClient.MapCreateBulk with wrong type %T, need slice", slice)}
|
||||
}
|
||||
builders := make([]*GroupInvitationTokenCreate, rv.Len())
|
||||
for i := 0; i < rv.Len(); i++ {
|
||||
builders[i] = c.Create()
|
||||
setFunc(builders[i], i)
|
||||
}
|
||||
return &GroupInvitationTokenCreateBulk{config: c.config, builders: builders}
|
||||
}
|
||||
|
||||
// Update returns an update builder for GroupInvitationToken.
|
||||
func (c *GroupInvitationTokenClient) Update() *GroupInvitationTokenUpdate {
|
||||
mutation := newGroupInvitationTokenMutation(c.config, OpUpdate)
|
||||
|
@ -1278,6 +1377,21 @@ func (c *ItemClient) CreateBulk(builders ...*ItemCreate) *ItemCreateBulk {
|
|||
return &ItemCreateBulk{config: c.config, builders: builders}
|
||||
}
|
||||
|
||||
// MapCreateBulk creates a bulk creation builder from the given slice. For each item in the slice, the function creates
|
||||
// a builder and applies setFunc on it.
|
||||
func (c *ItemClient) MapCreateBulk(slice any, setFunc func(*ItemCreate, int)) *ItemCreateBulk {
|
||||
rv := reflect.ValueOf(slice)
|
||||
if rv.Kind() != reflect.Slice {
|
||||
return &ItemCreateBulk{err: fmt.Errorf("calling to ItemClient.MapCreateBulk with wrong type %T, need slice", slice)}
|
||||
}
|
||||
builders := make([]*ItemCreate, rv.Len())
|
||||
for i := 0; i < rv.Len(); i++ {
|
||||
builders[i] = c.Create()
|
||||
setFunc(builders[i], i)
|
||||
}
|
||||
return &ItemCreateBulk{config: c.config, builders: builders}
|
||||
}
|
||||
|
||||
// Update returns an update builder for Item.
|
||||
func (c *ItemClient) Update() *ItemUpdate {
|
||||
mutation := newItemMutation(c.config, OpUpdate)
|
||||
|
@ -1524,6 +1638,21 @@ func (c *ItemFieldClient) CreateBulk(builders ...*ItemFieldCreate) *ItemFieldCre
|
|||
return &ItemFieldCreateBulk{config: c.config, builders: builders}
|
||||
}
|
||||
|
||||
// MapCreateBulk creates a bulk creation builder from the given slice. For each item in the slice, the function creates
|
||||
// a builder and applies setFunc on it.
|
||||
func (c *ItemFieldClient) MapCreateBulk(slice any, setFunc func(*ItemFieldCreate, int)) *ItemFieldCreateBulk {
|
||||
rv := reflect.ValueOf(slice)
|
||||
if rv.Kind() != reflect.Slice {
|
||||
return &ItemFieldCreateBulk{err: fmt.Errorf("calling to ItemFieldClient.MapCreateBulk with wrong type %T, need slice", slice)}
|
||||
}
|
||||
builders := make([]*ItemFieldCreate, rv.Len())
|
||||
for i := 0; i < rv.Len(); i++ {
|
||||
builders[i] = c.Create()
|
||||
setFunc(builders[i], i)
|
||||
}
|
||||
return &ItemFieldCreateBulk{config: c.config, builders: builders}
|
||||
}
|
||||
|
||||
// Update returns an update builder for ItemField.
|
||||
func (c *ItemFieldClient) Update() *ItemFieldUpdate {
|
||||
mutation := newItemFieldMutation(c.config, OpUpdate)
|
||||
|
@ -1658,6 +1787,21 @@ func (c *LabelClient) CreateBulk(builders ...*LabelCreate) *LabelCreateBulk {
|
|||
return &LabelCreateBulk{config: c.config, builders: builders}
|
||||
}
|
||||
|
||||
// MapCreateBulk creates a bulk creation builder from the given slice. For each item in the slice, the function creates
|
||||
// a builder and applies setFunc on it.
|
||||
func (c *LabelClient) MapCreateBulk(slice any, setFunc func(*LabelCreate, int)) *LabelCreateBulk {
|
||||
rv := reflect.ValueOf(slice)
|
||||
if rv.Kind() != reflect.Slice {
|
||||
return &LabelCreateBulk{err: fmt.Errorf("calling to LabelClient.MapCreateBulk with wrong type %T, need slice", slice)}
|
||||
}
|
||||
builders := make([]*LabelCreate, rv.Len())
|
||||
for i := 0; i < rv.Len(); i++ {
|
||||
builders[i] = c.Create()
|
||||
setFunc(builders[i], i)
|
||||
}
|
||||
return &LabelCreateBulk{config: c.config, builders: builders}
|
||||
}
|
||||
|
||||
// Update returns an update builder for Label.
|
||||
func (c *LabelClient) Update() *LabelUpdate {
|
||||
mutation := newLabelMutation(c.config, OpUpdate)
|
||||
|
@ -1808,6 +1952,21 @@ func (c *LocationClient) CreateBulk(builders ...*LocationCreate) *LocationCreate
|
|||
return &LocationCreateBulk{config: c.config, builders: builders}
|
||||
}
|
||||
|
||||
// MapCreateBulk creates a bulk creation builder from the given slice. For each item in the slice, the function creates
|
||||
// a builder and applies setFunc on it.
|
||||
func (c *LocationClient) MapCreateBulk(slice any, setFunc func(*LocationCreate, int)) *LocationCreateBulk {
|
||||
rv := reflect.ValueOf(slice)
|
||||
if rv.Kind() != reflect.Slice {
|
||||
return &LocationCreateBulk{err: fmt.Errorf("calling to LocationClient.MapCreateBulk with wrong type %T, need slice", slice)}
|
||||
}
|
||||
builders := make([]*LocationCreate, rv.Len())
|
||||
for i := 0; i < rv.Len(); i++ {
|
||||
builders[i] = c.Create()
|
||||
setFunc(builders[i], i)
|
||||
}
|
||||
return &LocationCreateBulk{config: c.config, builders: builders}
|
||||
}
|
||||
|
||||
// Update returns an update builder for Location.
|
||||
func (c *LocationClient) Update() *LocationUpdate {
|
||||
mutation := newLocationMutation(c.config, OpUpdate)
|
||||
|
@ -1990,6 +2149,21 @@ func (c *MaintenanceEntryClient) CreateBulk(builders ...*MaintenanceEntryCreate)
|
|||
return &MaintenanceEntryCreateBulk{config: c.config, builders: builders}
|
||||
}
|
||||
|
||||
// MapCreateBulk creates a bulk creation builder from the given slice. For each item in the slice, the function creates
|
||||
// a builder and applies setFunc on it.
|
||||
func (c *MaintenanceEntryClient) MapCreateBulk(slice any, setFunc func(*MaintenanceEntryCreate, int)) *MaintenanceEntryCreateBulk {
|
||||
rv := reflect.ValueOf(slice)
|
||||
if rv.Kind() != reflect.Slice {
|
||||
return &MaintenanceEntryCreateBulk{err: fmt.Errorf("calling to MaintenanceEntryClient.MapCreateBulk with wrong type %T, need slice", slice)}
|
||||
}
|
||||
builders := make([]*MaintenanceEntryCreate, rv.Len())
|
||||
for i := 0; i < rv.Len(); i++ {
|
||||
builders[i] = c.Create()
|
||||
setFunc(builders[i], i)
|
||||
}
|
||||
return &MaintenanceEntryCreateBulk{config: c.config, builders: builders}
|
||||
}
|
||||
|
||||
// Update returns an update builder for MaintenanceEntry.
|
||||
func (c *MaintenanceEntryClient) Update() *MaintenanceEntryUpdate {
|
||||
mutation := newMaintenanceEntryMutation(c.config, OpUpdate)
|
||||
|
@ -2124,6 +2298,21 @@ func (c *NotifierClient) CreateBulk(builders ...*NotifierCreate) *NotifierCreate
|
|||
return &NotifierCreateBulk{config: c.config, builders: builders}
|
||||
}
|
||||
|
||||
// MapCreateBulk creates a bulk creation builder from the given slice. For each item in the slice, the function creates
|
||||
// a builder and applies setFunc on it.
|
||||
func (c *NotifierClient) MapCreateBulk(slice any, setFunc func(*NotifierCreate, int)) *NotifierCreateBulk {
|
||||
rv := reflect.ValueOf(slice)
|
||||
if rv.Kind() != reflect.Slice {
|
||||
return &NotifierCreateBulk{err: fmt.Errorf("calling to NotifierClient.MapCreateBulk with wrong type %T, need slice", slice)}
|
||||
}
|
||||
builders := make([]*NotifierCreate, rv.Len())
|
||||
for i := 0; i < rv.Len(); i++ {
|
||||
builders[i] = c.Create()
|
||||
setFunc(builders[i], i)
|
||||
}
|
||||
return &NotifierCreateBulk{config: c.config, builders: builders}
|
||||
}
|
||||
|
||||
// Update returns an update builder for Notifier.
|
||||
func (c *NotifierClient) Update() *NotifierUpdate {
|
||||
mutation := newNotifierMutation(c.config, OpUpdate)
|
||||
|
@ -2274,6 +2463,21 @@ func (c *UserClient) CreateBulk(builders ...*UserCreate) *UserCreateBulk {
|
|||
return &UserCreateBulk{config: c.config, builders: builders}
|
||||
}
|
||||
|
||||
// MapCreateBulk creates a bulk creation builder from the given slice. For each item in the slice, the function creates
|
||||
// a builder and applies setFunc on it.
|
||||
func (c *UserClient) MapCreateBulk(slice any, setFunc func(*UserCreate, int)) *UserCreateBulk {
|
||||
rv := reflect.ValueOf(slice)
|
||||
if rv.Kind() != reflect.Slice {
|
||||
return &UserCreateBulk{err: fmt.Errorf("calling to UserClient.MapCreateBulk with wrong type %T, need slice", slice)}
|
||||
}
|
||||
builders := make([]*UserCreate, rv.Len())
|
||||
for i := 0; i < rv.Len(); i++ {
|
||||
builders[i] = c.Create()
|
||||
setFunc(builders[i], i)
|
||||
}
|
||||
return &UserCreateBulk{config: c.config, builders: builders}
|
||||
}
|
||||
|
||||
// Update returns an update builder for User.
|
||||
func (c *UserClient) Update() *UserUpdate {
|
||||
mutation := newUserMutation(c.config, OpUpdate)
|
||||
|
|
|
@ -334,32 +334,15 @@ func HasAttachmentsWith(preds ...predicate.Attachment) predicate.Document {
|
|||
|
||||
// And groups predicates with the AND operator between them.
|
||||
func And(predicates ...predicate.Document) predicate.Document {
|
||||
return predicate.Document(func(s *sql.Selector) {
|
||||
s1 := s.Clone().SetP(nil)
|
||||
for _, p := range predicates {
|
||||
p(s1)
|
||||
}
|
||||
s.Where(s1.P())
|
||||
})
|
||||
return predicate.Document(sql.AndPredicates(predicates...))
|
||||
}
|
||||
|
||||
// Or groups predicates with the OR operator between them.
|
||||
func Or(predicates ...predicate.Document) predicate.Document {
|
||||
return predicate.Document(func(s *sql.Selector) {
|
||||
s1 := s.Clone().SetP(nil)
|
||||
for i, p := range predicates {
|
||||
if i > 0 {
|
||||
s1.Or()
|
||||
}
|
||||
p(s1)
|
||||
}
|
||||
s.Where(s1.P())
|
||||
})
|
||||
return predicate.Document(sql.OrPredicates(predicates...))
|
||||
}
|
||||
|
||||
// Not applies the not operator on the given predicate.
|
||||
func Not(p predicate.Document) predicate.Document {
|
||||
return predicate.Document(func(s *sql.Selector) {
|
||||
p(s.Not())
|
||||
})
|
||||
return predicate.Document(sql.NotPredicates(p))
|
||||
}
|
||||
|
|
|
@ -269,11 +269,15 @@ func (dc *DocumentCreate) createSpec() (*Document, *sqlgraph.CreateSpec) {
|
|||
// DocumentCreateBulk is the builder for creating many Document entities in bulk.
|
||||
type DocumentCreateBulk struct {
|
||||
config
|
||||
err error
|
||||
builders []*DocumentCreate
|
||||
}
|
||||
|
||||
// Save creates the Document entities in the database.
|
||||
func (dcb *DocumentCreateBulk) Save(ctx context.Context) ([]*Document, error) {
|
||||
if dcb.err != nil {
|
||||
return nil, dcb.err
|
||||
}
|
||||
specs := make([]*sqlgraph.CreateSpec, len(dcb.builders))
|
||||
nodes := make([]*Document, len(dcb.builders))
|
||||
mutators := make([]Mutator, len(dcb.builders))
|
||||
|
|
|
@ -43,12 +43,28 @@ func (du *DocumentUpdate) SetTitle(s string) *DocumentUpdate {
|
|||
return du
|
||||
}
|
||||
|
||||
// SetNillableTitle sets the "title" field if the given value is not nil.
|
||||
func (du *DocumentUpdate) SetNillableTitle(s *string) *DocumentUpdate {
|
||||
if s != nil {
|
||||
du.SetTitle(*s)
|
||||
}
|
||||
return du
|
||||
}
|
||||
|
||||
// SetPath sets the "path" field.
|
||||
func (du *DocumentUpdate) SetPath(s string) *DocumentUpdate {
|
||||
du.mutation.SetPath(s)
|
||||
return du
|
||||
}
|
||||
|
||||
// SetNillablePath sets the "path" field if the given value is not nil.
|
||||
func (du *DocumentUpdate) SetNillablePath(s *string) *DocumentUpdate {
|
||||
if s != nil {
|
||||
du.SetPath(*s)
|
||||
}
|
||||
return du
|
||||
}
|
||||
|
||||
// SetGroupID sets the "group" edge to the Group entity by ID.
|
||||
func (du *DocumentUpdate) SetGroupID(id uuid.UUID) *DocumentUpdate {
|
||||
du.mutation.SetGroupID(id)
|
||||
|
@ -288,12 +304,28 @@ func (duo *DocumentUpdateOne) SetTitle(s string) *DocumentUpdateOne {
|
|||
return duo
|
||||
}
|
||||
|
||||
// SetNillableTitle sets the "title" field if the given value is not nil.
|
||||
func (duo *DocumentUpdateOne) SetNillableTitle(s *string) *DocumentUpdateOne {
|
||||
if s != nil {
|
||||
duo.SetTitle(*s)
|
||||
}
|
||||
return duo
|
||||
}
|
||||
|
||||
// SetPath sets the "path" field.
|
||||
func (duo *DocumentUpdateOne) SetPath(s string) *DocumentUpdateOne {
|
||||
duo.mutation.SetPath(s)
|
||||
return duo
|
||||
}
|
||||
|
||||
// SetNillablePath sets the "path" field if the given value is not nil.
|
||||
func (duo *DocumentUpdateOne) SetNillablePath(s *string) *DocumentUpdateOne {
|
||||
if s != nil {
|
||||
duo.SetPath(*s)
|
||||
}
|
||||
return duo
|
||||
}
|
||||
|
||||
// SetGroupID sets the "group" edge to the Group entity by ID.
|
||||
func (duo *DocumentUpdateOne) SetGroupID(id uuid.UUID) *DocumentUpdateOne {
|
||||
duo.mutation.SetGroupID(id)
|
||||
|
|
|
@ -137,7 +137,6 @@ const (
|
|||
CurrencyBrl Currency = "brl"
|
||||
CurrencyCad Currency = "cad"
|
||||
CurrencyChf Currency = "chf"
|
||||
CurrencyCny Currency = "cny"
|
||||
CurrencyCzk Currency = "czk"
|
||||
CurrencyDkk Currency = "dkk"
|
||||
CurrencyEur Currency = "eur"
|
||||
|
@ -160,6 +159,8 @@ const (
|
|||
CurrencyThb Currency = "thb"
|
||||
CurrencyTry Currency = "try"
|
||||
CurrencyUsd Currency = "usd"
|
||||
CurrencyXag Currency = "xag"
|
||||
CurrencyXau Currency = "xau"
|
||||
CurrencyZar Currency = "zar"
|
||||
)
|
||||
|
||||
|
@ -170,7 +171,7 @@ func (c Currency) String() string {
|
|||
// CurrencyValidator is a validator for the "currency" field enum values. It is called by the builders before save.
|
||||
func CurrencyValidator(c Currency) error {
|
||||
switch c {
|
||||
case CurrencyAed, CurrencyAud, CurrencyBgn, CurrencyBrl, CurrencyCad, CurrencyChf, CurrencyCny, CurrencyCzk, CurrencyDkk, CurrencyEur, CurrencyGbp, CurrencyHkd, CurrencyIdr, CurrencyInr, CurrencyJpy, CurrencyKrw, CurrencyMxn, CurrencyNok, CurrencyNzd, CurrencyPln, CurrencyRmb, CurrencyRon, CurrencyRub, CurrencySar, CurrencySek, CurrencySgd, CurrencyThb, CurrencyTry, CurrencyUsd, CurrencyZar:
|
||||
case CurrencyAed, CurrencyAud, CurrencyBgn, CurrencyBrl, CurrencyCad, CurrencyChf, CurrencyCzk, CurrencyDkk, CurrencyEur, CurrencyGbp, CurrencyHkd, CurrencyIdr, CurrencyInr, CurrencyJpy, CurrencyKrw, CurrencyMxn, CurrencyNok, CurrencyNzd, CurrencyPln, CurrencyRmb, CurrencyRon, CurrencyRub, CurrencySar, CurrencySek, CurrencySgd, CurrencyThb, CurrencyTry, CurrencyUsd, CurrencyXag, CurrencyXau, CurrencyZar:
|
||||
return nil
|
||||
default:
|
||||
return fmt.Errorf("group: invalid enum value for currency field: %q", c)
|
||||
|
|
|
@ -399,32 +399,15 @@ func HasNotifiersWith(preds ...predicate.Notifier) predicate.Group {
|
|||
|
||||
// And groups predicates with the AND operator between them.
|
||||
func And(predicates ...predicate.Group) predicate.Group {
|
||||
return predicate.Group(func(s *sql.Selector) {
|
||||
s1 := s.Clone().SetP(nil)
|
||||
for _, p := range predicates {
|
||||
p(s1)
|
||||
}
|
||||
s.Where(s1.P())
|
||||
})
|
||||
return predicate.Group(sql.AndPredicates(predicates...))
|
||||
}
|
||||
|
||||
// Or groups predicates with the OR operator between them.
|
||||
func Or(predicates ...predicate.Group) predicate.Group {
|
||||
return predicate.Group(func(s *sql.Selector) {
|
||||
s1 := s.Clone().SetP(nil)
|
||||
for i, p := range predicates {
|
||||
if i > 0 {
|
||||
s1.Or()
|
||||
}
|
||||
p(s1)
|
||||
}
|
||||
s.Where(s1.P())
|
||||
})
|
||||
return predicate.Group(sql.OrPredicates(predicates...))
|
||||
}
|
||||
|
||||
// Not applies the not operator on the given predicate.
|
||||
func Not(p predicate.Group) predicate.Group {
|
||||
return predicate.Group(func(s *sql.Selector) {
|
||||
p(s.Not())
|
||||
})
|
||||
return predicate.Group(sql.NotPredicates(p))
|
||||
}
|
||||
|
|
|
@ -441,11 +441,15 @@ func (gc *GroupCreate) createSpec() (*Group, *sqlgraph.CreateSpec) {
|
|||
// GroupCreateBulk is the builder for creating many Group entities in bulk.
|
||||
type GroupCreateBulk struct {
|
||||
config
|
||||
err error
|
||||
builders []*GroupCreate
|
||||
}
|
||||
|
||||
// Save creates the Group entities in the database.
|
||||
func (gcb *GroupCreateBulk) Save(ctx context.Context) ([]*Group, error) {
|
||||
if gcb.err != nil {
|
||||
return nil, gcb.err
|
||||
}
|
||||
specs := make([]*sqlgraph.CreateSpec, len(gcb.builders))
|
||||
nodes := make([]*Group, len(gcb.builders))
|
||||
mutators := make([]Mutator, len(gcb.builders))
|
||||
|
|
|
@ -48,6 +48,14 @@ func (gu *GroupUpdate) SetName(s string) *GroupUpdate {
|
|||
return gu
|
||||
}
|
||||
|
||||
// SetNillableName sets the "name" field if the given value is not nil.
|
||||
func (gu *GroupUpdate) SetNillableName(s *string) *GroupUpdate {
|
||||
if s != nil {
|
||||
gu.SetName(*s)
|
||||
}
|
||||
return gu
|
||||
}
|
||||
|
||||
// SetCurrency sets the "currency" field.
|
||||
func (gu *GroupUpdate) SetCurrency(gr group.Currency) *GroupUpdate {
|
||||
gu.mutation.SetCurrency(gr)
|
||||
|
@ -738,6 +746,14 @@ func (guo *GroupUpdateOne) SetName(s string) *GroupUpdateOne {
|
|||
return guo
|
||||
}
|
||||
|
||||
// SetNillableName sets the "name" field if the given value is not nil.
|
||||
func (guo *GroupUpdateOne) SetNillableName(s *string) *GroupUpdateOne {
|
||||
if s != nil {
|
||||
guo.SetName(*s)
|
||||
}
|
||||
return guo
|
||||
}
|
||||
|
||||
// SetCurrency sets the "currency" field.
|
||||
func (guo *GroupUpdateOne) SetCurrency(gr group.Currency) *GroupUpdateOne {
|
||||
guo.mutation.SetCurrency(gr)
|
||||
|
|
|
@ -306,32 +306,15 @@ func HasGroupWith(preds ...predicate.Group) predicate.GroupInvitationToken {
|
|||
|
||||
// And groups predicates with the AND operator between them.
|
||||
func And(predicates ...predicate.GroupInvitationToken) predicate.GroupInvitationToken {
|
||||
return predicate.GroupInvitationToken(func(s *sql.Selector) {
|
||||
s1 := s.Clone().SetP(nil)
|
||||
for _, p := range predicates {
|
||||
p(s1)
|
||||
}
|
||||
s.Where(s1.P())
|
||||
})
|
||||
return predicate.GroupInvitationToken(sql.AndPredicates(predicates...))
|
||||
}
|
||||
|
||||
// Or groups predicates with the OR operator between them.
|
||||
func Or(predicates ...predicate.GroupInvitationToken) predicate.GroupInvitationToken {
|
||||
return predicate.GroupInvitationToken(func(s *sql.Selector) {
|
||||
s1 := s.Clone().SetP(nil)
|
||||
for i, p := range predicates {
|
||||
if i > 0 {
|
||||
s1.Or()
|
||||
}
|
||||
p(s1)
|
||||
}
|
||||
s.Where(s1.P())
|
||||
})
|
||||
return predicate.GroupInvitationToken(sql.OrPredicates(predicates...))
|
||||
}
|
||||
|
||||
// Not applies the not operator on the given predicate.
|
||||
func Not(p predicate.GroupInvitationToken) predicate.GroupInvitationToken {
|
||||
return predicate.GroupInvitationToken(func(s *sql.Selector) {
|
||||
p(s.Not())
|
||||
})
|
||||
return predicate.GroupInvitationToken(sql.NotPredicates(p))
|
||||
}
|
||||
|
|
|
@ -269,11 +269,15 @@ func (gitc *GroupInvitationTokenCreate) createSpec() (*GroupInvitationToken, *sq
|
|||
// GroupInvitationTokenCreateBulk is the builder for creating many GroupInvitationToken entities in bulk.
|
||||
type GroupInvitationTokenCreateBulk struct {
|
||||
config
|
||||
err error
|
||||
builders []*GroupInvitationTokenCreate
|
||||
}
|
||||
|
||||
// Save creates the GroupInvitationToken entities in the database.
|
||||
func (gitcb *GroupInvitationTokenCreateBulk) Save(ctx context.Context) ([]*GroupInvitationToken, error) {
|
||||
if gitcb.err != nil {
|
||||
return nil, gitcb.err
|
||||
}
|
||||
specs := make([]*sqlgraph.CreateSpec, len(gitcb.builders))
|
||||
nodes := make([]*GroupInvitationToken, len(gitcb.builders))
|
||||
mutators := make([]Mutator, len(gitcb.builders))
|
||||
|
|
|
@ -1592,32 +1592,15 @@ func HasAttachmentsWith(preds ...predicate.Attachment) predicate.Item {
|
|||
|
||||
// And groups predicates with the AND operator between them.
|
||||
func And(predicates ...predicate.Item) predicate.Item {
|
||||
return predicate.Item(func(s *sql.Selector) {
|
||||
s1 := s.Clone().SetP(nil)
|
||||
for _, p := range predicates {
|
||||
p(s1)
|
||||
}
|
||||
s.Where(s1.P())
|
||||
})
|
||||
return predicate.Item(sql.AndPredicates(predicates...))
|
||||
}
|
||||
|
||||
// Or groups predicates with the OR operator between them.
|
||||
func Or(predicates ...predicate.Item) predicate.Item {
|
||||
return predicate.Item(func(s *sql.Selector) {
|
||||
s1 := s.Clone().SetP(nil)
|
||||
for i, p := range predicates {
|
||||
if i > 0 {
|
||||
s1.Or()
|
||||
}
|
||||
p(s1)
|
||||
}
|
||||
s.Where(s1.P())
|
||||
})
|
||||
return predicate.Item(sql.OrPredicates(predicates...))
|
||||
}
|
||||
|
||||
// Not applies the not operator on the given predicate.
|
||||
func Not(p predicate.Item) predicate.Item {
|
||||
return predicate.Item(func(s *sql.Selector) {
|
||||
p(s.Not())
|
||||
})
|
||||
return predicate.Item(sql.NotPredicates(p))
|
||||
}
|
||||
|
|
|
@ -900,11 +900,15 @@ func (ic *ItemCreate) createSpec() (*Item, *sqlgraph.CreateSpec) {
|
|||
// ItemCreateBulk is the builder for creating many Item entities in bulk.
|
||||
type ItemCreateBulk struct {
|
||||
config
|
||||
err error
|
||||
builders []*ItemCreate
|
||||
}
|
||||
|
||||
// Save creates the Item entities in the database.
|
||||
func (icb *ItemCreateBulk) Save(ctx context.Context) ([]*Item, error) {
|
||||
if icb.err != nil {
|
||||
return nil, icb.err
|
||||
}
|
||||
specs := make([]*sqlgraph.CreateSpec, len(icb.builders))
|
||||
nodes := make([]*Item, len(icb.builders))
|
||||
mutators := make([]Mutator, len(icb.builders))
|
||||
|
|
|
@ -47,6 +47,14 @@ func (iu *ItemUpdate) SetName(s string) *ItemUpdate {
|
|||
return iu
|
||||
}
|
||||
|
||||
// SetNillableName sets the "name" field if the given value is not nil.
|
||||
func (iu *ItemUpdate) SetNillableName(s *string) *ItemUpdate {
|
||||
if s != nil {
|
||||
iu.SetName(*s)
|
||||
}
|
||||
return iu
|
||||
}
|
||||
|
||||
// SetDescription sets the "description" field.
|
||||
func (iu *ItemUpdate) SetDescription(s string) *ItemUpdate {
|
||||
iu.mutation.SetDescription(s)
|
||||
|
@ -1247,6 +1255,14 @@ func (iuo *ItemUpdateOne) SetName(s string) *ItemUpdateOne {
|
|||
return iuo
|
||||
}
|
||||
|
||||
// SetNillableName sets the "name" field if the given value is not nil.
|
||||
func (iuo *ItemUpdateOne) SetNillableName(s *string) *ItemUpdateOne {
|
||||
if s != nil {
|
||||
iuo.SetName(*s)
|
||||
}
|
||||
return iuo
|
||||
}
|
||||
|
||||
// SetDescription sets the "description" field.
|
||||
func (iuo *ItemUpdateOne) SetDescription(s string) *ItemUpdateOne {
|
||||
iuo.mutation.SetDescription(s)
|
||||
|
|
|
@ -536,32 +536,15 @@ func HasItemWith(preds ...predicate.Item) predicate.ItemField {
|
|||
|
||||
// And groups predicates with the AND operator between them.
|
||||
func And(predicates ...predicate.ItemField) predicate.ItemField {
|
||||
return predicate.ItemField(func(s *sql.Selector) {
|
||||
s1 := s.Clone().SetP(nil)
|
||||
for _, p := range predicates {
|
||||
p(s1)
|
||||
}
|
||||
s.Where(s1.P())
|
||||
})
|
||||
return predicate.ItemField(sql.AndPredicates(predicates...))
|
||||
}
|
||||
|
||||
// Or groups predicates with the OR operator between them.
|
||||
func Or(predicates ...predicate.ItemField) predicate.ItemField {
|
||||
return predicate.ItemField(func(s *sql.Selector) {
|
||||
s1 := s.Clone().SetP(nil)
|
||||
for i, p := range predicates {
|
||||
if i > 0 {
|
||||
s1.Or()
|
||||
}
|
||||
p(s1)
|
||||
}
|
||||
s.Where(s1.P())
|
||||
})
|
||||
return predicate.ItemField(sql.OrPredicates(predicates...))
|
||||
}
|
||||
|
||||
// Not applies the not operator on the given predicate.
|
||||
func Not(p predicate.ItemField) predicate.ItemField {
|
||||
return predicate.ItemField(func(s *sql.Selector) {
|
||||
p(s.Not())
|
||||
})
|
||||
return predicate.ItemField(sql.NotPredicates(p))
|
||||
}
|
||||
|
|
|
@ -356,11 +356,15 @@ func (ifc *ItemFieldCreate) createSpec() (*ItemField, *sqlgraph.CreateSpec) {
|
|||
// ItemFieldCreateBulk is the builder for creating many ItemField entities in bulk.
|
||||
type ItemFieldCreateBulk struct {
|
||||
config
|
||||
err error
|
||||
builders []*ItemFieldCreate
|
||||
}
|
||||
|
||||
// Save creates the ItemField entities in the database.
|
||||
func (ifcb *ItemFieldCreateBulk) Save(ctx context.Context) ([]*ItemField, error) {
|
||||
if ifcb.err != nil {
|
||||
return nil, ifcb.err
|
||||
}
|
||||
specs := make([]*sqlgraph.CreateSpec, len(ifcb.builders))
|
||||
nodes := make([]*ItemField, len(ifcb.builders))
|
||||
mutators := make([]Mutator, len(ifcb.builders))
|
||||
|
|
|
@ -42,6 +42,14 @@ func (ifu *ItemFieldUpdate) SetName(s string) *ItemFieldUpdate {
|
|||
return ifu
|
||||
}
|
||||
|
||||
// SetNillableName sets the "name" field if the given value is not nil.
|
||||
func (ifu *ItemFieldUpdate) SetNillableName(s *string) *ItemFieldUpdate {
|
||||
if s != nil {
|
||||
ifu.SetName(*s)
|
||||
}
|
||||
return ifu
|
||||
}
|
||||
|
||||
// SetDescription sets the "description" field.
|
||||
func (ifu *ItemFieldUpdate) SetDescription(s string) *ItemFieldUpdate {
|
||||
ifu.mutation.SetDescription(s)
|
||||
|
@ -68,6 +76,14 @@ func (ifu *ItemFieldUpdate) SetType(i itemfield.Type) *ItemFieldUpdate {
|
|||
return ifu
|
||||
}
|
||||
|
||||
// SetNillableType sets the "type" field if the given value is not nil.
|
||||
func (ifu *ItemFieldUpdate) SetNillableType(i *itemfield.Type) *ItemFieldUpdate {
|
||||
if i != nil {
|
||||
ifu.SetType(*i)
|
||||
}
|
||||
return ifu
|
||||
}
|
||||
|
||||
// SetTextValue sets the "text_value" field.
|
||||
func (ifu *ItemFieldUpdate) SetTextValue(s string) *ItemFieldUpdate {
|
||||
ifu.mutation.SetTextValue(s)
|
||||
|
@ -343,6 +359,14 @@ func (ifuo *ItemFieldUpdateOne) SetName(s string) *ItemFieldUpdateOne {
|
|||
return ifuo
|
||||
}
|
||||
|
||||
// SetNillableName sets the "name" field if the given value is not nil.
|
||||
func (ifuo *ItemFieldUpdateOne) SetNillableName(s *string) *ItemFieldUpdateOne {
|
||||
if s != nil {
|
||||
ifuo.SetName(*s)
|
||||
}
|
||||
return ifuo
|
||||
}
|
||||
|
||||
// SetDescription sets the "description" field.
|
||||
func (ifuo *ItemFieldUpdateOne) SetDescription(s string) *ItemFieldUpdateOne {
|
||||
ifuo.mutation.SetDescription(s)
|
||||
|
@ -369,6 +393,14 @@ func (ifuo *ItemFieldUpdateOne) SetType(i itemfield.Type) *ItemFieldUpdateOne {
|
|||
return ifuo
|
||||
}
|
||||
|
||||
// SetNillableType sets the "type" field if the given value is not nil.
|
||||
func (ifuo *ItemFieldUpdateOne) SetNillableType(i *itemfield.Type) *ItemFieldUpdateOne {
|
||||
if i != nil {
|
||||
ifuo.SetType(*i)
|
||||
}
|
||||
return ifuo
|
||||
}
|
||||
|
||||
// SetTextValue sets the "text_value" field.
|
||||
func (ifuo *ItemFieldUpdateOne) SetTextValue(s string) *ItemFieldUpdateOne {
|
||||
ifuo.mutation.SetTextValue(s)
|
||||
|
|
|
@ -424,32 +424,15 @@ func HasItemsWith(preds ...predicate.Item) predicate.Label {
|
|||
|
||||
// And groups predicates with the AND operator between them.
|
||||
func And(predicates ...predicate.Label) predicate.Label {
|
||||
return predicate.Label(func(s *sql.Selector) {
|
||||
s1 := s.Clone().SetP(nil)
|
||||
for _, p := range predicates {
|
||||
p(s1)
|
||||
}
|
||||
s.Where(s1.P())
|
||||
})
|
||||
return predicate.Label(sql.AndPredicates(predicates...))
|
||||
}
|
||||
|
||||
// Or groups predicates with the OR operator between them.
|
||||
func Or(predicates ...predicate.Label) predicate.Label {
|
||||
return predicate.Label(func(s *sql.Selector) {
|
||||
s1 := s.Clone().SetP(nil)
|
||||
for i, p := range predicates {
|
||||
if i > 0 {
|
||||
s1.Or()
|
||||
}
|
||||
p(s1)
|
||||
}
|
||||
s.Where(s1.P())
|
||||
})
|
||||
return predicate.Label(sql.OrPredicates(predicates...))
|
||||
}
|
||||
|
||||
// Not applies the not operator on the given predicate.
|
||||
func Not(p predicate.Label) predicate.Label {
|
||||
return predicate.Label(func(s *sql.Selector) {
|
||||
p(s.Not())
|
||||
})
|
||||
return predicate.Label(sql.NotPredicates(p))
|
||||
}
|
||||
|
|
|
@ -297,11 +297,15 @@ func (lc *LabelCreate) createSpec() (*Label, *sqlgraph.CreateSpec) {
|
|||
// LabelCreateBulk is the builder for creating many Label entities in bulk.
|
||||
type LabelCreateBulk struct {
|
||||
config
|
||||
err error
|
||||
builders []*LabelCreate
|
||||
}
|
||||
|
||||
// Save creates the Label entities in the database.
|
||||
func (lcb *LabelCreateBulk) Save(ctx context.Context) ([]*Label, error) {
|
||||
if lcb.err != nil {
|
||||
return nil, lcb.err
|
||||
}
|
||||
specs := make([]*sqlgraph.CreateSpec, len(lcb.builders))
|
||||
nodes := make([]*Label, len(lcb.builders))
|
||||
mutators := make([]Mutator, len(lcb.builders))
|
||||
|
|
|
@ -43,6 +43,14 @@ func (lu *LabelUpdate) SetName(s string) *LabelUpdate {
|
|||
return lu
|
||||
}
|
||||
|
||||
// SetNillableName sets the "name" field if the given value is not nil.
|
||||
func (lu *LabelUpdate) SetNillableName(s *string) *LabelUpdate {
|
||||
if s != nil {
|
||||
lu.SetName(*s)
|
||||
}
|
||||
return lu
|
||||
}
|
||||
|
||||
// SetDescription sets the "description" field.
|
||||
func (lu *LabelUpdate) SetDescription(s string) *LabelUpdate {
|
||||
lu.mutation.SetDescription(s)
|
||||
|
@ -336,6 +344,14 @@ func (luo *LabelUpdateOne) SetName(s string) *LabelUpdateOne {
|
|||
return luo
|
||||
}
|
||||
|
||||
// SetNillableName sets the "name" field if the given value is not nil.
|
||||
func (luo *LabelUpdateOne) SetNillableName(s *string) *LabelUpdateOne {
|
||||
if s != nil {
|
||||
luo.SetName(*s)
|
||||
}
|
||||
return luo
|
||||
}
|
||||
|
||||
// SetDescription sets the "description" field.
|
||||
func (luo *LabelUpdateOne) SetDescription(s string) *LabelUpdateOne {
|
||||
luo.mutation.SetDescription(s)
|
||||
|
|
|
@ -390,32 +390,15 @@ func HasItemsWith(preds ...predicate.Item) predicate.Location {
|
|||
|
||||
// And groups predicates with the AND operator between them.
|
||||
func And(predicates ...predicate.Location) predicate.Location {
|
||||
return predicate.Location(func(s *sql.Selector) {
|
||||
s1 := s.Clone().SetP(nil)
|
||||
for _, p := range predicates {
|
||||
p(s1)
|
||||
}
|
||||
s.Where(s1.P())
|
||||
})
|
||||
return predicate.Location(sql.AndPredicates(predicates...))
|
||||
}
|
||||
|
||||
// Or groups predicates with the OR operator between them.
|
||||
func Or(predicates ...predicate.Location) predicate.Location {
|
||||
return predicate.Location(func(s *sql.Selector) {
|
||||
s1 := s.Clone().SetP(nil)
|
||||
for i, p := range predicates {
|
||||
if i > 0 {
|
||||
s1.Or()
|
||||
}
|
||||
p(s1)
|
||||
}
|
||||
s.Where(s1.P())
|
||||
})
|
||||
return predicate.Location(sql.OrPredicates(predicates...))
|
||||
}
|
||||
|
||||
// Not applies the not operator on the given predicate.
|
||||
func Not(p predicate.Location) predicate.Location {
|
||||
return predicate.Location(func(s *sql.Selector) {
|
||||
p(s.Not())
|
||||
})
|
||||
return predicate.Location(sql.NotPredicates(p))
|
||||
}
|
||||
|
|
|
@ -341,11 +341,15 @@ func (lc *LocationCreate) createSpec() (*Location, *sqlgraph.CreateSpec) {
|
|||
// LocationCreateBulk is the builder for creating many Location entities in bulk.
|
||||
type LocationCreateBulk struct {
|
||||
config
|
||||
err error
|
||||
builders []*LocationCreate
|
||||
}
|
||||
|
||||
// Save creates the Location entities in the database.
|
||||
func (lcb *LocationCreateBulk) Save(ctx context.Context) ([]*Location, error) {
|
||||
if lcb.err != nil {
|
||||
return nil, lcb.err
|
||||
}
|
||||
specs := make([]*sqlgraph.CreateSpec, len(lcb.builders))
|
||||
nodes := make([]*Location, len(lcb.builders))
|
||||
mutators := make([]Mutator, len(lcb.builders))
|
||||
|
|
|
@ -43,6 +43,14 @@ func (lu *LocationUpdate) SetName(s string) *LocationUpdate {
|
|||
return lu
|
||||
}
|
||||
|
||||
// SetNillableName sets the "name" field if the given value is not nil.
|
||||
func (lu *LocationUpdate) SetNillableName(s *string) *LocationUpdate {
|
||||
if s != nil {
|
||||
lu.SetName(*s)
|
||||
}
|
||||
return lu
|
||||
}
|
||||
|
||||
// SetDescription sets the "description" field.
|
||||
func (lu *LocationUpdate) SetDescription(s string) *LocationUpdate {
|
||||
lu.mutation.SetDescription(s)
|
||||
|
@ -440,6 +448,14 @@ func (luo *LocationUpdateOne) SetName(s string) *LocationUpdateOne {
|
|||
return luo
|
||||
}
|
||||
|
||||
// SetNillableName sets the "name" field if the given value is not nil.
|
||||
func (luo *LocationUpdateOne) SetNillableName(s *string) *LocationUpdateOne {
|
||||
if s != nil {
|
||||
luo.SetName(*s)
|
||||
}
|
||||
return luo
|
||||
}
|
||||
|
||||
// SetDescription sets the "description" field.
|
||||
func (luo *LocationUpdateOne) SetDescription(s string) *LocationUpdateOne {
|
||||
luo.mutation.SetDescription(s)
|
||||
|
|
|
@ -501,32 +501,15 @@ func HasItemWith(preds ...predicate.Item) predicate.MaintenanceEntry {
|
|||
|
||||
// And groups predicates with the AND operator between them.
|
||||
func And(predicates ...predicate.MaintenanceEntry) predicate.MaintenanceEntry {
|
||||
return predicate.MaintenanceEntry(func(s *sql.Selector) {
|
||||
s1 := s.Clone().SetP(nil)
|
||||
for _, p := range predicates {
|
||||
p(s1)
|
||||
}
|
||||
s.Where(s1.P())
|
||||
})
|
||||
return predicate.MaintenanceEntry(sql.AndPredicates(predicates...))
|
||||
}
|
||||
|
||||
// Or groups predicates with the OR operator between them.
|
||||
func Or(predicates ...predicate.MaintenanceEntry) predicate.MaintenanceEntry {
|
||||
return predicate.MaintenanceEntry(func(s *sql.Selector) {
|
||||
s1 := s.Clone().SetP(nil)
|
||||
for i, p := range predicates {
|
||||
if i > 0 {
|
||||
s1.Or()
|
||||
}
|
||||
p(s1)
|
||||
}
|
||||
s.Where(s1.P())
|
||||
})
|
||||
return predicate.MaintenanceEntry(sql.OrPredicates(predicates...))
|
||||
}
|
||||
|
||||
// Not applies the not operator on the given predicate.
|
||||
func Not(p predicate.MaintenanceEntry) predicate.MaintenanceEntry {
|
||||
return predicate.MaintenanceEntry(func(s *sql.Selector) {
|
||||
p(s.Not())
|
||||
})
|
||||
return predicate.MaintenanceEntry(sql.NotPredicates(p))
|
||||
}
|
||||
|
|
|
@ -306,11 +306,15 @@ func (mec *MaintenanceEntryCreate) createSpec() (*MaintenanceEntry, *sqlgraph.Cr
|
|||
// MaintenanceEntryCreateBulk is the builder for creating many MaintenanceEntry entities in bulk.
|
||||
type MaintenanceEntryCreateBulk struct {
|
||||
config
|
||||
err error
|
||||
builders []*MaintenanceEntryCreate
|
||||
}
|
||||
|
||||
// Save creates the MaintenanceEntry entities in the database.
|
||||
func (mecb *MaintenanceEntryCreateBulk) Save(ctx context.Context) ([]*MaintenanceEntry, error) {
|
||||
if mecb.err != nil {
|
||||
return nil, mecb.err
|
||||
}
|
||||
specs := make([]*sqlgraph.CreateSpec, len(mecb.builders))
|
||||
nodes := make([]*MaintenanceEntry, len(mecb.builders))
|
||||
mutators := make([]Mutator, len(mecb.builders))
|
||||
|
|
|
@ -42,6 +42,14 @@ func (meu *MaintenanceEntryUpdate) SetItemID(u uuid.UUID) *MaintenanceEntryUpdat
|
|||
return meu
|
||||
}
|
||||
|
||||
// SetNillableItemID sets the "item_id" field if the given value is not nil.
|
||||
func (meu *MaintenanceEntryUpdate) SetNillableItemID(u *uuid.UUID) *MaintenanceEntryUpdate {
|
||||
if u != nil {
|
||||
meu.SetItemID(*u)
|
||||
}
|
||||
return meu
|
||||
}
|
||||
|
||||
// SetDate sets the "date" field.
|
||||
func (meu *MaintenanceEntryUpdate) SetDate(t time.Time) *MaintenanceEntryUpdate {
|
||||
meu.mutation.SetDate(t)
|
||||
|
@ -88,6 +96,14 @@ func (meu *MaintenanceEntryUpdate) SetName(s string) *MaintenanceEntryUpdate {
|
|||
return meu
|
||||
}
|
||||
|
||||
// SetNillableName sets the "name" field if the given value is not nil.
|
||||
func (meu *MaintenanceEntryUpdate) SetNillableName(s *string) *MaintenanceEntryUpdate {
|
||||
if s != nil {
|
||||
meu.SetName(*s)
|
||||
}
|
||||
return meu
|
||||
}
|
||||
|
||||
// SetDescription sets the "description" field.
|
||||
func (meu *MaintenanceEntryUpdate) SetDescription(s string) *MaintenanceEntryUpdate {
|
||||
meu.mutation.SetDescription(s)
|
||||
|
@ -302,6 +318,14 @@ func (meuo *MaintenanceEntryUpdateOne) SetItemID(u uuid.UUID) *MaintenanceEntryU
|
|||
return meuo
|
||||
}
|
||||
|
||||
// SetNillableItemID sets the "item_id" field if the given value is not nil.
|
||||
func (meuo *MaintenanceEntryUpdateOne) SetNillableItemID(u *uuid.UUID) *MaintenanceEntryUpdateOne {
|
||||
if u != nil {
|
||||
meuo.SetItemID(*u)
|
||||
}
|
||||
return meuo
|
||||
}
|
||||
|
||||
// SetDate sets the "date" field.
|
||||
func (meuo *MaintenanceEntryUpdateOne) SetDate(t time.Time) *MaintenanceEntryUpdateOne {
|
||||
meuo.mutation.SetDate(t)
|
||||
|
@ -348,6 +372,14 @@ func (meuo *MaintenanceEntryUpdateOne) SetName(s string) *MaintenanceEntryUpdate
|
|||
return meuo
|
||||
}
|
||||
|
||||
// SetNillableName sets the "name" field if the given value is not nil.
|
||||
func (meuo *MaintenanceEntryUpdateOne) SetNillableName(s *string) *MaintenanceEntryUpdateOne {
|
||||
if s != nil {
|
||||
meuo.SetName(*s)
|
||||
}
|
||||
return meuo
|
||||
}
|
||||
|
||||
// SetDescription sets the "description" field.
|
||||
func (meuo *MaintenanceEntryUpdateOne) SetDescription(s string) *MaintenanceEntryUpdateOne {
|
||||
meuo.mutation.SetDescription(s)
|
||||
|
|
|
@ -14,6 +14,7 @@ var (
|
|||
{Name: "created_at", Type: field.TypeTime},
|
||||
{Name: "updated_at", Type: field.TypeTime},
|
||||
{Name: "type", Type: field.TypeEnum, Enums: []string{"photo", "manual", "warranty", "attachment", "receipt"}, Default: "attachment"},
|
||||
{Name: "primary", Type: field.TypeBool, Default: false},
|
||||
{Name: "document_attachments", Type: field.TypeUUID},
|
||||
{Name: "item_attachments", Type: field.TypeUUID},
|
||||
}
|
||||
|
@ -25,13 +26,13 @@ var (
|
|||
ForeignKeys: []*schema.ForeignKey{
|
||||
{
|
||||
Symbol: "attachments_documents_attachments",
|
||||
Columns: []*schema.Column{AttachmentsColumns[4]},
|
||||
Columns: []*schema.Column{AttachmentsColumns[5]},
|
||||
RefColumns: []*schema.Column{DocumentsColumns[0]},
|
||||
OnDelete: schema.Cascade,
|
||||
},
|
||||
{
|
||||
Symbol: "attachments_items_attachments",
|
||||
Columns: []*schema.Column{AttachmentsColumns[5]},
|
||||
Columns: []*schema.Column{AttachmentsColumns[6]},
|
||||
RefColumns: []*schema.Column{ItemsColumns[0]},
|
||||
OnDelete: schema.Cascade,
|
||||
},
|
||||
|
@ -116,7 +117,7 @@ var (
|
|||
{Name: "created_at", Type: field.TypeTime},
|
||||
{Name: "updated_at", Type: field.TypeTime},
|
||||
{Name: "name", Type: field.TypeString, Size: 255},
|
||||
{Name: "currency", Type: field.TypeEnum, Enums: []string{"aed", "aud", "bgn", "brl", "cad", "chf", "cny", "czk", "dkk", "eur", "gbp", "hkd", "idr", "inr", "jpy", "krw", "mxn", "nok", "nzd", "pln", "rmb", "ron", "rub", "sar", "sek", "sgd", "thb", "try", "usd", "zar"}, Default: "usd"},
|
||||
{Name: "currency", Type: field.TypeEnum, Enums: []string{"aed", "aud", "bgn", "brl", "cad", "chf", "czk", "dkk", "eur", "gbp", "hkd", "idr", "inr", "jpy", "krw", "mxn", "nok", "nzd", "pln", "rmb", "ron", "rub", "sar", "sek", "sgd", "thb", "try", "usd", "xag", "xau", "zar"}, Default: "usd"},
|
||||
}
|
||||
// GroupsTable holds the schema information for the "groups" table.
|
||||
GroupsTable = &schema.Table{
|
||||
|
|
|
@ -61,6 +61,7 @@ type AttachmentMutation struct {
|
|||
created_at *time.Time
|
||||
updated_at *time.Time
|
||||
_type *attachment.Type
|
||||
primary *bool
|
||||
clearedFields map[string]struct{}
|
||||
item *uuid.UUID
|
||||
cleareditem bool
|
||||
|
@ -283,6 +284,42 @@ func (m *AttachmentMutation) ResetType() {
|
|||
m._type = nil
|
||||
}
|
||||
|
||||
// SetPrimary sets the "primary" field.
|
||||
func (m *AttachmentMutation) SetPrimary(b bool) {
|
||||
m.primary = &b
|
||||
}
|
||||
|
||||
// Primary returns the value of the "primary" field in the mutation.
|
||||
func (m *AttachmentMutation) Primary() (r bool, exists bool) {
|
||||
v := m.primary
|
||||
if v == nil {
|
||||
return
|
||||
}
|
||||
return *v, true
|
||||
}
|
||||
|
||||
// OldPrimary returns the old "primary" field's value of the Attachment entity.
|
||||
// If the Attachment object wasn't provided to the builder, the object is fetched from the database.
|
||||
// An error is returned if the mutation operation is not UpdateOne, or the database query fails.
|
||||
func (m *AttachmentMutation) OldPrimary(ctx context.Context) (v bool, err error) {
|
||||
if !m.op.Is(OpUpdateOne) {
|
||||
return v, errors.New("OldPrimary is only allowed on UpdateOne operations")
|
||||
}
|
||||
if m.id == nil || m.oldValue == nil {
|
||||
return v, errors.New("OldPrimary requires an ID field in the mutation")
|
||||
}
|
||||
oldValue, err := m.oldValue(ctx)
|
||||
if err != nil {
|
||||
return v, fmt.Errorf("querying old value for OldPrimary: %w", err)
|
||||
}
|
||||
return oldValue.Primary, nil
|
||||
}
|
||||
|
||||
// ResetPrimary resets all changes to the "primary" field.
|
||||
func (m *AttachmentMutation) ResetPrimary() {
|
||||
m.primary = nil
|
||||
}
|
||||
|
||||
// SetItemID sets the "item" edge to the Item entity by id.
|
||||
func (m *AttachmentMutation) SetItemID(id uuid.UUID) {
|
||||
m.item = &id
|
||||
|
@ -395,7 +432,7 @@ func (m *AttachmentMutation) Type() string {
|
|||
// order to get all numeric fields that were incremented/decremented, call
|
||||
// AddedFields().
|
||||
func (m *AttachmentMutation) Fields() []string {
|
||||
fields := make([]string, 0, 3)
|
||||
fields := make([]string, 0, 4)
|
||||
if m.created_at != nil {
|
||||
fields = append(fields, attachment.FieldCreatedAt)
|
||||
}
|
||||
|
@ -405,6 +442,9 @@ func (m *AttachmentMutation) Fields() []string {
|
|||
if m._type != nil {
|
||||
fields = append(fields, attachment.FieldType)
|
||||
}
|
||||
if m.primary != nil {
|
||||
fields = append(fields, attachment.FieldPrimary)
|
||||
}
|
||||
return fields
|
||||
}
|
||||
|
||||
|
@ -419,6 +459,8 @@ func (m *AttachmentMutation) Field(name string) (ent.Value, bool) {
|
|||
return m.UpdatedAt()
|
||||
case attachment.FieldType:
|
||||
return m.GetType()
|
||||
case attachment.FieldPrimary:
|
||||
return m.Primary()
|
||||
}
|
||||
return nil, false
|
||||
}
|
||||
|
@ -434,6 +476,8 @@ func (m *AttachmentMutation) OldField(ctx context.Context, name string) (ent.Val
|
|||
return m.OldUpdatedAt(ctx)
|
||||
case attachment.FieldType:
|
||||
return m.OldType(ctx)
|
||||
case attachment.FieldPrimary:
|
||||
return m.OldPrimary(ctx)
|
||||
}
|
||||
return nil, fmt.Errorf("unknown Attachment field %s", name)
|
||||
}
|
||||
|
@ -464,6 +508,13 @@ func (m *AttachmentMutation) SetField(name string, value ent.Value) error {
|
|||
}
|
||||
m.SetType(v)
|
||||
return nil
|
||||
case attachment.FieldPrimary:
|
||||
v, ok := value.(bool)
|
||||
if !ok {
|
||||
return fmt.Errorf("unexpected type %T for field %s", value, name)
|
||||
}
|
||||
m.SetPrimary(v)
|
||||
return nil
|
||||
}
|
||||
return fmt.Errorf("unknown Attachment field %s", name)
|
||||
}
|
||||
|
@ -522,6 +573,9 @@ func (m *AttachmentMutation) ResetField(name string) error {
|
|||
case attachment.FieldType:
|
||||
m.ResetType()
|
||||
return nil
|
||||
case attachment.FieldPrimary:
|
||||
m.ResetPrimary()
|
||||
return nil
|
||||
}
|
||||
return fmt.Errorf("unknown Attachment field %s", name)
|
||||
}
|
||||
|
@ -9475,6 +9529,7 @@ func (m *MaintenanceEntryMutation) ResetCost() {
|
|||
// ClearItem clears the "item" edge to the Item entity.
|
||||
func (m *MaintenanceEntryMutation) ClearItem() {
|
||||
m.cleareditem = true
|
||||
m.clearedFields[maintenanceentry.FieldItemID] = struct{}{}
|
||||
}
|
||||
|
||||
// ItemCleared reports if the "item" edge to the Item entity was cleared.
|
||||
|
@ -10238,6 +10293,7 @@ func (m *NotifierMutation) ResetIsActive() {
|
|||
// ClearGroup clears the "group" edge to the Group entity.
|
||||
func (m *NotifierMutation) ClearGroup() {
|
||||
m.clearedgroup = true
|
||||
m.clearedFields[notifier.FieldGroupID] = struct{}{}
|
||||
}
|
||||
|
||||
// GroupCleared reports if the "group" edge to the Group entity was cleared.
|
||||
|
@ -10264,6 +10320,7 @@ func (m *NotifierMutation) ResetGroup() {
|
|||
// ClearUser clears the "user" edge to the User entity.
|
||||
func (m *NotifierMutation) ClearUser() {
|
||||
m.cleareduser = true
|
||||
m.clearedFields[notifier.FieldUserID] = struct{}{}
|
||||
}
|
||||
|
||||
// UserCleared reports if the "user" edge to the User entity was cleared.
|
||||
|
|
|
@ -399,32 +399,15 @@ func HasUserWith(preds ...predicate.User) predicate.Notifier {
|
|||
|
||||
// And groups predicates with the AND operator between them.
|
||||
func And(predicates ...predicate.Notifier) predicate.Notifier {
|
||||
return predicate.Notifier(func(s *sql.Selector) {
|
||||
s1 := s.Clone().SetP(nil)
|
||||
for _, p := range predicates {
|
||||
p(s1)
|
||||
}
|
||||
s.Where(s1.P())
|
||||
})
|
||||
return predicate.Notifier(sql.AndPredicates(predicates...))
|
||||
}
|
||||
|
||||
// Or groups predicates with the OR operator between them.
|
||||
func Or(predicates ...predicate.Notifier) predicate.Notifier {
|
||||
return predicate.Notifier(func(s *sql.Selector) {
|
||||
s1 := s.Clone().SetP(nil)
|
||||
for i, p := range predicates {
|
||||
if i > 0 {
|
||||
s1.Or()
|
||||
}
|
||||
p(s1)
|
||||
}
|
||||
s.Where(s1.P())
|
||||
})
|
||||
return predicate.Notifier(sql.OrPredicates(predicates...))
|
||||
}
|
||||
|
||||
// Not applies the not operator on the given predicate.
|
||||
func Not(p predicate.Notifier) predicate.Notifier {
|
||||
return predicate.Notifier(func(s *sql.Selector) {
|
||||
p(s.Not())
|
||||
})
|
||||
return predicate.Notifier(sql.NotPredicates(p))
|
||||
}
|
||||
|
|
|
@ -300,11 +300,15 @@ func (nc *NotifierCreate) createSpec() (*Notifier, *sqlgraph.CreateSpec) {
|
|||
// NotifierCreateBulk is the builder for creating many Notifier entities in bulk.
|
||||
type NotifierCreateBulk struct {
|
||||
config
|
||||
err error
|
||||
builders []*NotifierCreate
|
||||
}
|
||||
|
||||
// Save creates the Notifier entities in the database.
|
||||
func (ncb *NotifierCreateBulk) Save(ctx context.Context) ([]*Notifier, error) {
|
||||
if ncb.err != nil {
|
||||
return nil, ncb.err
|
||||
}
|
||||
specs := make([]*sqlgraph.CreateSpec, len(ncb.builders))
|
||||
nodes := make([]*Notifier, len(ncb.builders))
|
||||
mutators := make([]Mutator, len(ncb.builders))
|
||||
|
|
|
@ -43,24 +43,56 @@ func (nu *NotifierUpdate) SetGroupID(u uuid.UUID) *NotifierUpdate {
|
|||
return nu
|
||||
}
|
||||
|
||||
// SetNillableGroupID sets the "group_id" field if the given value is not nil.
|
||||
func (nu *NotifierUpdate) SetNillableGroupID(u *uuid.UUID) *NotifierUpdate {
|
||||
if u != nil {
|
||||
nu.SetGroupID(*u)
|
||||
}
|
||||
return nu
|
||||
}
|
||||
|
||||
// SetUserID sets the "user_id" field.
|
||||
func (nu *NotifierUpdate) SetUserID(u uuid.UUID) *NotifierUpdate {
|
||||
nu.mutation.SetUserID(u)
|
||||
return nu
|
||||
}
|
||||
|
||||
// SetNillableUserID sets the "user_id" field if the given value is not nil.
|
||||
func (nu *NotifierUpdate) SetNillableUserID(u *uuid.UUID) *NotifierUpdate {
|
||||
if u != nil {
|
||||
nu.SetUserID(*u)
|
||||
}
|
||||
return nu
|
||||
}
|
||||
|
||||
// SetName sets the "name" field.
|
||||
func (nu *NotifierUpdate) SetName(s string) *NotifierUpdate {
|
||||
nu.mutation.SetName(s)
|
||||
return nu
|
||||
}
|
||||
|
||||
// SetNillableName sets the "name" field if the given value is not nil.
|
||||
func (nu *NotifierUpdate) SetNillableName(s *string) *NotifierUpdate {
|
||||
if s != nil {
|
||||
nu.SetName(*s)
|
||||
}
|
||||
return nu
|
||||
}
|
||||
|
||||
// SetURL sets the "url" field.
|
||||
func (nu *NotifierUpdate) SetURL(s string) *NotifierUpdate {
|
||||
nu.mutation.SetURL(s)
|
||||
return nu
|
||||
}
|
||||
|
||||
// SetNillableURL sets the "url" field if the given value is not nil.
|
||||
func (nu *NotifierUpdate) SetNillableURL(s *string) *NotifierUpdate {
|
||||
if s != nil {
|
||||
nu.SetURL(*s)
|
||||
}
|
||||
return nu
|
||||
}
|
||||
|
||||
// SetIsActive sets the "is_active" field.
|
||||
func (nu *NotifierUpdate) SetIsActive(b bool) *NotifierUpdate {
|
||||
nu.mutation.SetIsActive(b)
|
||||
|
@ -273,24 +305,56 @@ func (nuo *NotifierUpdateOne) SetGroupID(u uuid.UUID) *NotifierUpdateOne {
|
|||
return nuo
|
||||
}
|
||||
|
||||
// SetNillableGroupID sets the "group_id" field if the given value is not nil.
|
||||
func (nuo *NotifierUpdateOne) SetNillableGroupID(u *uuid.UUID) *NotifierUpdateOne {
|
||||
if u != nil {
|
||||
nuo.SetGroupID(*u)
|
||||
}
|
||||
return nuo
|
||||
}
|
||||
|
||||
// SetUserID sets the "user_id" field.
|
||||
func (nuo *NotifierUpdateOne) SetUserID(u uuid.UUID) *NotifierUpdateOne {
|
||||
nuo.mutation.SetUserID(u)
|
||||
return nuo
|
||||
}
|
||||
|
||||
// SetNillableUserID sets the "user_id" field if the given value is not nil.
|
||||
func (nuo *NotifierUpdateOne) SetNillableUserID(u *uuid.UUID) *NotifierUpdateOne {
|
||||
if u != nil {
|
||||
nuo.SetUserID(*u)
|
||||
}
|
||||
return nuo
|
||||
}
|
||||
|
||||
// SetName sets the "name" field.
|
||||
func (nuo *NotifierUpdateOne) SetName(s string) *NotifierUpdateOne {
|
||||
nuo.mutation.SetName(s)
|
||||
return nuo
|
||||
}
|
||||
|
||||
// SetNillableName sets the "name" field if the given value is not nil.
|
||||
func (nuo *NotifierUpdateOne) SetNillableName(s *string) *NotifierUpdateOne {
|
||||
if s != nil {
|
||||
nuo.SetName(*s)
|
||||
}
|
||||
return nuo
|
||||
}
|
||||
|
||||
// SetURL sets the "url" field.
|
||||
func (nuo *NotifierUpdateOne) SetURL(s string) *NotifierUpdateOne {
|
||||
nuo.mutation.SetURL(s)
|
||||
return nuo
|
||||
}
|
||||
|
||||
// SetNillableURL sets the "url" field if the given value is not nil.
|
||||
func (nuo *NotifierUpdateOne) SetNillableURL(s *string) *NotifierUpdateOne {
|
||||
if s != nil {
|
||||
nuo.SetURL(*s)
|
||||
}
|
||||
return nuo
|
||||
}
|
||||
|
||||
// SetIsActive sets the "is_active" field.
|
||||
func (nuo *NotifierUpdateOne) SetIsActive(b bool) *NotifierUpdateOne {
|
||||
nuo.mutation.SetIsActive(b)
|
||||
|
|
|
@ -40,6 +40,10 @@ func init() {
|
|||
attachment.DefaultUpdatedAt = attachmentDescUpdatedAt.Default.(func() time.Time)
|
||||
// attachment.UpdateDefaultUpdatedAt holds the default value on update for the updated_at field.
|
||||
attachment.UpdateDefaultUpdatedAt = attachmentDescUpdatedAt.UpdateDefault.(func() time.Time)
|
||||
// attachmentDescPrimary is the schema descriptor for primary field.
|
||||
attachmentDescPrimary := attachmentFields[1].Descriptor()
|
||||
// attachment.DefaultPrimary holds the default value on creation for the primary field.
|
||||
attachment.DefaultPrimary = attachmentDescPrimary.Default.(bool)
|
||||
// attachmentDescID is the schema descriptor for id field.
|
||||
attachmentDescID := attachmentMixinFields0[0].Descriptor()
|
||||
// attachment.DefaultID holds the default value on creation for the id field.
|
||||
|
|
|
@ -5,6 +5,6 @@ package runtime
|
|||
// The schema-stitching logic is generated in github.com/hay-kot/homebox/backend/internal/data/ent/runtime.go
|
||||
|
||||
const (
|
||||
Version = "v0.12.3" // Version of ent codegen.
|
||||
Sum = "h1:N5lO2EOrHpCH5HYfiMOCHYbo+oh5M8GjT0/cx5x6xkk=" // Sum of ent codegen.
|
||||
Version = "v0.12.5" // Version of ent codegen.
|
||||
Sum = "h1:KREM5E4CSoej4zeGa88Ou/gfturAnpUv0mzAjch1sj4=" // Sum of ent codegen.
|
||||
)
|
||||
|
|
|
@ -24,6 +24,8 @@ func (Attachment) Fields() []ent.Field {
|
|||
field.Enum("type").
|
||||
Values("photo", "manual", "warranty", "attachment", "receipt").
|
||||
Default("attachment"),
|
||||
field.Bool("primary").
|
||||
Default(false),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -58,6 +58,8 @@ func (Group) Fields() []ent.Field {
|
|||
"thb",
|
||||
"try",
|
||||
"usd",
|
||||
"xag",
|
||||
"xau",
|
||||
"zar",
|
||||
),
|
||||
}
|
||||
|
|
|
@ -532,32 +532,15 @@ func HasNotifiersWith(preds ...predicate.Notifier) predicate.User {
|
|||
|
||||
// And groups predicates with the AND operator between them.
|
||||
func And(predicates ...predicate.User) predicate.User {
|
||||
return predicate.User(func(s *sql.Selector) {
|
||||
s1 := s.Clone().SetP(nil)
|
||||
for _, p := range predicates {
|
||||
p(s1)
|
||||
}
|
||||
s.Where(s1.P())
|
||||
})
|
||||
return predicate.User(sql.AndPredicates(predicates...))
|
||||
}
|
||||
|
||||
// Or groups predicates with the OR operator between them.
|
||||
func Or(predicates ...predicate.User) predicate.User {
|
||||
return predicate.User(func(s *sql.Selector) {
|
||||
s1 := s.Clone().SetP(nil)
|
||||
for i, p := range predicates {
|
||||
if i > 0 {
|
||||
s1.Or()
|
||||
}
|
||||
p(s1)
|
||||
}
|
||||
s.Where(s1.P())
|
||||
})
|
||||
return predicate.User(sql.OrPredicates(predicates...))
|
||||
}
|
||||
|
||||
// Not applies the not operator on the given predicate.
|
||||
func Not(p predicate.User) predicate.User {
|
||||
return predicate.User(func(s *sql.Selector) {
|
||||
p(s.Not())
|
||||
})
|
||||
return predicate.User(sql.NotPredicates(p))
|
||||
}
|
||||
|
|
|
@ -417,11 +417,15 @@ func (uc *UserCreate) createSpec() (*User, *sqlgraph.CreateSpec) {
|
|||
// UserCreateBulk is the builder for creating many User entities in bulk.
|
||||
type UserCreateBulk struct {
|
||||
config
|
||||
err error
|
||||
builders []*UserCreate
|
||||
}
|
||||
|
||||
// Save creates the User entities in the database.
|
||||
func (ucb *UserCreateBulk) Save(ctx context.Context) ([]*User, error) {
|
||||
if ucb.err != nil {
|
||||
return nil, ucb.err
|
||||
}
|
||||
specs := make([]*sqlgraph.CreateSpec, len(ucb.builders))
|
||||
nodes := make([]*User, len(ucb.builders))
|
||||
mutators := make([]Mutator, len(ucb.builders))
|
||||
|
|
|
@ -44,18 +44,42 @@ func (uu *UserUpdate) SetName(s string) *UserUpdate {
|
|||
return uu
|
||||
}
|
||||
|
||||
// SetNillableName sets the "name" field if the given value is not nil.
|
||||
func (uu *UserUpdate) SetNillableName(s *string) *UserUpdate {
|
||||
if s != nil {
|
||||
uu.SetName(*s)
|
||||
}
|
||||
return uu
|
||||
}
|
||||
|
||||
// SetEmail sets the "email" field.
|
||||
func (uu *UserUpdate) SetEmail(s string) *UserUpdate {
|
||||
uu.mutation.SetEmail(s)
|
||||
return uu
|
||||
}
|
||||
|
||||
// SetNillableEmail sets the "email" field if the given value is not nil.
|
||||
func (uu *UserUpdate) SetNillableEmail(s *string) *UserUpdate {
|
||||
if s != nil {
|
||||
uu.SetEmail(*s)
|
||||
}
|
||||
return uu
|
||||
}
|
||||
|
||||
// SetPassword sets the "password" field.
|
||||
func (uu *UserUpdate) SetPassword(s string) *UserUpdate {
|
||||
uu.mutation.SetPassword(s)
|
||||
return uu
|
||||
}
|
||||
|
||||
// SetNillablePassword sets the "password" field if the given value is not nil.
|
||||
func (uu *UserUpdate) SetNillablePassword(s *string) *UserUpdate {
|
||||
if s != nil {
|
||||
uu.SetPassword(*s)
|
||||
}
|
||||
return uu
|
||||
}
|
||||
|
||||
// SetIsSuperuser sets the "is_superuser" field.
|
||||
func (uu *UserUpdate) SetIsSuperuser(b bool) *UserUpdate {
|
||||
uu.mutation.SetIsSuperuser(b)
|
||||
|
@ -466,18 +490,42 @@ func (uuo *UserUpdateOne) SetName(s string) *UserUpdateOne {
|
|||
return uuo
|
||||
}
|
||||
|
||||
// SetNillableName sets the "name" field if the given value is not nil.
|
||||
func (uuo *UserUpdateOne) SetNillableName(s *string) *UserUpdateOne {
|
||||
if s != nil {
|
||||
uuo.SetName(*s)
|
||||
}
|
||||
return uuo
|
||||
}
|
||||
|
||||
// SetEmail sets the "email" field.
|
||||
func (uuo *UserUpdateOne) SetEmail(s string) *UserUpdateOne {
|
||||
uuo.mutation.SetEmail(s)
|
||||
return uuo
|
||||
}
|
||||
|
||||
// SetNillableEmail sets the "email" field if the given value is not nil.
|
||||
func (uuo *UserUpdateOne) SetNillableEmail(s *string) *UserUpdateOne {
|
||||
if s != nil {
|
||||
uuo.SetEmail(*s)
|
||||
}
|
||||
return uuo
|
||||
}
|
||||
|
||||
// SetPassword sets the "password" field.
|
||||
func (uuo *UserUpdateOne) SetPassword(s string) *UserUpdateOne {
|
||||
uuo.mutation.SetPassword(s)
|
||||
return uuo
|
||||
}
|
||||
|
||||
// SetNillablePassword sets the "password" field if the given value is not nil.
|
||||
func (uuo *UserUpdateOne) SetNillablePassword(s *string) *UserUpdateOne {
|
||||
if s != nil {
|
||||
uuo.SetPassword(*s)
|
||||
}
|
||||
return uuo
|
||||
}
|
||||
|
||||
// SetIsSuperuser sets the "is_superuser" field.
|
||||
func (uuo *UserUpdateOne) SetIsSuperuser(b bool) *UserUpdateOne {
|
||||
uuo.mutation.SetIsSuperuser(b)
|
||||
|
|
|
@ -3,7 +3,7 @@ package migrations
|
|||
import (
|
||||
"embed"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"path"
|
||||
)
|
||||
|
||||
//go:embed all:migrations
|
||||
|
@ -28,12 +28,12 @@ func Write(temp string) error {
|
|||
continue
|
||||
}
|
||||
|
||||
b, err := Files.ReadFile(filepath.Join("migrations", f.Name()))
|
||||
b, err := Files.ReadFile(path.Join("migrations", f.Name()))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = os.WriteFile(filepath.Join(temp, f.Name()), b, 0o644)
|
||||
err = os.WriteFile(path.Join(temp, f.Name()), b, 0o644)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
|
|
@ -0,0 +1,12 @@
|
|||
-- Disable the enforcement of foreign-keys constraints
|
||||
PRAGMA foreign_keys = off;
|
||||
-- Create "new_attachments" table
|
||||
CREATE TABLE `new_attachments` (`id` uuid NOT NULL, `created_at` datetime NOT NULL, `updated_at` datetime NOT NULL, `type` text NOT NULL DEFAULT 'attachment', `primary` bool NOT NULL DEFAULT false, `document_attachments` uuid NOT NULL, `item_attachments` uuid NOT NULL, PRIMARY KEY (`id`), CONSTRAINT `attachments_documents_attachments` FOREIGN KEY (`document_attachments`) REFERENCES `documents` (`id`) ON DELETE CASCADE, CONSTRAINT `attachments_items_attachments` FOREIGN KEY (`item_attachments`) REFERENCES `items` (`id`) ON DELETE CASCADE);
|
||||
-- Copy rows from old table "attachments" to new temporary table "new_attachments"
|
||||
INSERT INTO `new_attachments` (`id`, `created_at`, `updated_at`, `type`, `document_attachments`, `item_attachments`) SELECT `id`, `created_at`, `updated_at`, `type`, `document_attachments`, `item_attachments` FROM `attachments`;
|
||||
-- Drop "attachments" table after copying rows
|
||||
DROP TABLE `attachments`;
|
||||
-- Rename temporary table "new_attachments" to "attachments"
|
||||
ALTER TABLE `new_attachments` RENAME TO `attachments`;
|
||||
-- Enable back the enforcement of foreign-keys constraints
|
||||
PRAGMA foreign_keys = on;
|
|
@ -1,4 +1,4 @@
|
|||
h1:VjVLPBHzJ8N1Hiw+Aeitb0alnVn9UFilnajCzc+pie8=
|
||||
h1:sjJCTAqc9FG8BKBIzh5ZynYD/Ilz6vnLqM4XX83WQ4M=
|
||||
20220929052825_init.sql h1:ZlCqm1wzjDmofeAcSX3jE4h4VcdTNGpRg2eabztDy9Q=
|
||||
20221001210956_group_invitations.sql h1:YQKJFtE39wFOcRNbZQ/d+ZlHwrcfcsZlcv/pLEYdpjw=
|
||||
20221009173029_add_user_roles.sql h1:vWmzAfgEWQeGk0Vn70zfVPCcfEZth3E0JcvyKTjpYyU=
|
||||
|
@ -12,3 +12,4 @@ h1:VjVLPBHzJ8N1Hiw+Aeitb0alnVn9UFilnajCzc+pie8=
|
|||
20230227024134_add_scheduled_date.sql h1:8qO5OBZ0AzsfYEQOAQQrYIjyhSwM+v1A+/ylLSoiyoc=
|
||||
20230305065819_add_notifier_types.sql h1:r5xrgCKYQ2o9byBqYeAX1zdp94BLdaxf4vq9OmGHNl0=
|
||||
20230305071524_add_group_id_to_notifiers.sql h1:xDShqbyClcFhvJbwclOHdczgXbdffkxXNWjV61hL/t4=
|
||||
20231006213457_add_primary_attachment_flag.sql h1:J4tMSJQFa7vaj0jpnh8YKTssdyIjRyq6RXDXZIzDDu4=
|
||||
|
|
|
@ -47,6 +47,11 @@ func (aid AssetID) MarshalJSON() ([]byte, error) {
|
|||
}
|
||||
|
||||
func (aid *AssetID) UnmarshalJSON(d []byte) error {
|
||||
if len(d) == 0 || bytes.Equal(d, []byte(`""`)) {
|
||||
*aid = -1
|
||||
return nil
|
||||
}
|
||||
|
||||
d = bytes.Replace(d, []byte(`"`), []byte(``), -1)
|
||||
d = bytes.Replace(d, []byte(`-`), []byte(``), -1)
|
||||
|
||||
|
|
|
@ -13,8 +13,8 @@ import (
|
|||
)
|
||||
|
||||
var (
|
||||
fk = faker.NewFaker()
|
||||
tbus = eventbus.New()
|
||||
fk = faker.NewFaker()
|
||||
tbus = eventbus.New()
|
||||
|
||||
tClient *ent.Client
|
||||
tRepos *AllRepos
|
||||
|
@ -45,7 +45,7 @@ func TestMain(m *testing.M) {
|
|||
log.Fatalf("failed opening connection to sqlite: %v", err)
|
||||
}
|
||||
|
||||
go tbus.Run()
|
||||
go tbus.Run()
|
||||
|
||||
err = client.Schema.Create(context.Background())
|
||||
if err != nil {
|
||||
|
|
|
@ -233,7 +233,7 @@ func (r *GroupRepository) StatsGroup(ctx context.Context, GID uuid.UUID) (GroupS
|
|||
(SELECT COUNT(*) FROM items WHERE group_items = ? AND items.archived = false) AS total_items,
|
||||
(SELECT COUNT(*) FROM locations WHERE group_locations = ?) AS total_locations,
|
||||
(SELECT COUNT(*) FROM labels WHERE group_labels = ?) AS total_labels,
|
||||
(SELECT SUM(purchase_price) FROM items WHERE group_items = ? AND items.archived = false) AS total_item_price,
|
||||
(SELECT SUM(purchase_price*quantity) FROM items WHERE group_items = ? AND items.archived = false) AS total_item_price,
|
||||
(SELECT COUNT(*)
|
||||
FROM items
|
||||
WHERE group_items = ?
|
||||
|
|
|
@ -7,6 +7,7 @@ import (
|
|||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/attachment"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/item"
|
||||
)
|
||||
|
||||
// AttachmentRepo is a repository for Attachments table that links Items to Documents
|
||||
|
@ -24,12 +25,14 @@ type (
|
|||
UpdatedAt time.Time `json:"updatedAt"`
|
||||
Type string `json:"type"`
|
||||
Document DocumentOut `json:"document"`
|
||||
Primary bool `json:"primary"`
|
||||
}
|
||||
|
||||
ItemAttachmentUpdate struct {
|
||||
ID uuid.UUID `json:"-"`
|
||||
Type string `json:"type"`
|
||||
Title string `json:"title"`
|
||||
ID uuid.UUID `json:"-"`
|
||||
Type string `json:"type"`
|
||||
Title string `json:"title"`
|
||||
Primary bool `json:"primary"`
|
||||
}
|
||||
)
|
||||
|
||||
|
@ -39,6 +42,7 @@ func ToItemAttachment(attachment *ent.Attachment) ItemAttachment {
|
|||
CreatedAt: attachment.CreatedAt,
|
||||
UpdatedAt: attachment.UpdatedAt,
|
||||
Type: attachment.Type.String(),
|
||||
Primary: attachment.Primary,
|
||||
Document: DocumentOut{
|
||||
ID: attachment.Edges.Document.ID,
|
||||
Title: attachment.Edges.Document.Title,
|
||||
|
@ -48,11 +52,30 @@ func ToItemAttachment(attachment *ent.Attachment) ItemAttachment {
|
|||
}
|
||||
|
||||
func (r *AttachmentRepo) Create(ctx context.Context, itemId, docId uuid.UUID, typ attachment.Type) (*ent.Attachment, error) {
|
||||
return r.db.Attachment.Create().
|
||||
bldr := r.db.Attachment.Create().
|
||||
SetType(typ).
|
||||
SetDocumentID(docId).
|
||||
SetItemID(itemId).
|
||||
Save(ctx)
|
||||
SetItemID(itemId)
|
||||
|
||||
// Autoset primary to true if this is the first attachment
|
||||
// that is of type photo
|
||||
if typ == attachment.TypePhoto {
|
||||
cnt, err := r.db.Attachment.Query().
|
||||
Where(
|
||||
attachment.HasItemWith(item.ID(itemId)),
|
||||
attachment.TypeEQ(typ),
|
||||
).
|
||||
Count(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if cnt == 0 {
|
||||
bldr = bldr.SetPrimary(true)
|
||||
}
|
||||
}
|
||||
|
||||
return bldr.Save(ctx)
|
||||
}
|
||||
|
||||
func (r *AttachmentRepo) Get(ctx context.Context, id uuid.UUID) (*ent.Attachment, error) {
|
||||
|
@ -64,10 +87,33 @@ func (r *AttachmentRepo) Get(ctx context.Context, id uuid.UUID) (*ent.Attachment
|
|||
Only(ctx)
|
||||
}
|
||||
|
||||
func (r *AttachmentRepo) Update(ctx context.Context, itemId uuid.UUID, typ attachment.Type) (*ent.Attachment, error) {
|
||||
itm, err := r.db.Attachment.UpdateOneID(itemId).
|
||||
SetType(typ).
|
||||
Save(ctx)
|
||||
func (r *AttachmentRepo) Update(ctx context.Context, itemId uuid.UUID, data *ItemAttachmentUpdate) (*ent.Attachment, error) {
|
||||
// TODO: execute within Tx
|
||||
typ := attachment.Type(data.Type)
|
||||
|
||||
bldr := r.db.Attachment.UpdateOneID(itemId).
|
||||
SetType(typ)
|
||||
|
||||
// Primary only applies to photos
|
||||
if typ == attachment.TypePhoto {
|
||||
bldr = bldr.SetPrimary(data.Primary)
|
||||
} else {
|
||||
bldr = bldr.SetPrimary(false)
|
||||
}
|
||||
|
||||
itm, err := bldr.Save(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Ensure all other attachments are not primary
|
||||
err = r.db.Attachment.Update().
|
||||
Where(
|
||||
attachment.HasItemWith(item.ID(itemId)),
|
||||
attachment.IDNEQ(itm.ID),
|
||||
).
|
||||
SetPrimary(false).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
|
@ -110,7 +110,10 @@ func TestAttachmentRepo_Update(t *testing.T) {
|
|||
|
||||
for _, typ := range []attachment.Type{"photo", "manual", "warranty", "attachment"} {
|
||||
t.Run(string(typ), func(t *testing.T) {
|
||||
_, err := tRepos.Attachments.Update(context.Background(), entity.ID, typ)
|
||||
_, err := tRepos.Attachments.Update(context.Background(), entity.ID, &ItemAttachmentUpdate{
|
||||
Type: string(typ),
|
||||
})
|
||||
|
||||
assert.NoError(t, err)
|
||||
|
||||
updated, err := tRepos.Attachments.Get(context.Background(), entity.ID)
|
||||
|
|
|
@ -8,6 +8,7 @@ import (
|
|||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services/reporting/eventbus"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/attachment"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/group"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/item"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/itemfield"
|
||||
|
@ -35,6 +36,7 @@ type (
|
|||
AssetID AssetID `json:"assetId"`
|
||||
LocationIDs []uuid.UUID `json:"locationIds"`
|
||||
LabelIDs []uuid.UUID `json:"labelIds"`
|
||||
ParentItemIDs []uuid.UUID `json:"parentIds"`
|
||||
SortBy string `json:"sortBy"`
|
||||
IncludeArchived bool `json:"includeArchived"`
|
||||
Fields []FieldQuery `json:"fields"`
|
||||
|
@ -125,6 +127,8 @@ type (
|
|||
// Edges
|
||||
Location *LocationSummary `json:"location,omitempty" extensions:"x-nullable,x-omitempty"`
|
||||
Labels []LabelSummary `json:"labels"`
|
||||
|
||||
ImageID *uuid.UUID `json:"imageId,omitempty"`
|
||||
}
|
||||
|
||||
ItemOut struct {
|
||||
|
@ -156,7 +160,6 @@ type (
|
|||
|
||||
Attachments []ItemAttachment `json:"attachments"`
|
||||
Fields []ItemField `json:"fields"`
|
||||
Children []ItemSummary `json:"children"`
|
||||
}
|
||||
)
|
||||
|
||||
|
@ -174,6 +177,16 @@ func mapItemSummary(item *ent.Item) ItemSummary {
|
|||
labels = mapEach(item.Edges.Label, mapLabelSummary)
|
||||
}
|
||||
|
||||
var imageID *uuid.UUID
|
||||
if item.Edges.Attachments != nil {
|
||||
for _, a := range item.Edges.Attachments {
|
||||
if a.Primary && a.Edges.Document != nil {
|
||||
imageID = &a.ID
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return ItemSummary{
|
||||
ID: item.ID,
|
||||
Name: item.Name,
|
||||
|
@ -191,6 +204,7 @@ func mapItemSummary(item *ent.Item) ItemSummary {
|
|||
|
||||
// Warranty
|
||||
Insured: item.Insured,
|
||||
ImageID: imageID,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -226,11 +240,6 @@ func mapItemOut(item *ent.Item) ItemOut {
|
|||
fields = mapFields(item.Edges.Fields)
|
||||
}
|
||||
|
||||
var children []ItemSummary
|
||||
if item.Edges.Children != nil {
|
||||
children = mapEach(item.Edges.Children, mapItemSummary)
|
||||
}
|
||||
|
||||
var parent *ItemSummary
|
||||
if item.Edges.Parent != nil {
|
||||
v := mapItemSummary(item.Edges.Parent)
|
||||
|
@ -264,7 +273,6 @@ func mapItemOut(item *ent.Item) ItemOut {
|
|||
Notes: item.Notes,
|
||||
Attachments: attachments,
|
||||
Fields: fields,
|
||||
Children: children,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -282,7 +290,6 @@ func (e *ItemsRepository) getOne(ctx context.Context, where ...predicate.Item) (
|
|||
WithLabel().
|
||||
WithLocation().
|
||||
WithGroup().
|
||||
WithChildren().
|
||||
WithParent().
|
||||
WithAttachments(func(aq *ent.AttachmentQuery) {
|
||||
aq.WithDocument()
|
||||
|
@ -384,6 +391,10 @@ func (e *ItemsRepository) QueryByGroup(ctx context.Context, gid uuid.UUID, q Ite
|
|||
|
||||
andPredicates = append(andPredicates, item.Or(fieldPredicates...))
|
||||
}
|
||||
|
||||
if len(q.ParentItemIDs) > 0 {
|
||||
andPredicates = append(andPredicates, item.HasParentWith(item.IDIn(q.ParentItemIDs...)))
|
||||
}
|
||||
}
|
||||
|
||||
if len(andPredicates) > 0 {
|
||||
|
@ -407,7 +418,13 @@ func (e *ItemsRepository) QueryByGroup(ctx context.Context, gid uuid.UUID, q Ite
|
|||
|
||||
qb = qb.
|
||||
WithLabel().
|
||||
WithLocation()
|
||||
WithLocation().
|
||||
WithAttachments(func(aq *ent.AttachmentQuery) {
|
||||
aq.Where(
|
||||
attachment.Primary(true),
|
||||
).
|
||||
WithDocument()
|
||||
})
|
||||
|
||||
if q.Page != -1 || q.PageSize != -1 {
|
||||
qb = qb.
|
||||
|
@ -533,13 +550,13 @@ func (e *ItemsRepository) Create(ctx context.Context, gid uuid.UUID, data ItemCr
|
|||
}
|
||||
|
||||
func (e *ItemsRepository) Delete(ctx context.Context, id uuid.UUID) error {
|
||||
err := e.db.Item.DeleteOneID(id).Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
err := e.db.Item.DeleteOneID(id).Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
e.publishMutationEvent(id)
|
||||
return nil
|
||||
e.publishMutationEvent(id)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e *ItemsRepository) DeleteByGroup(ctx context.Context, gid, id uuid.UUID) error {
|
||||
|
@ -549,12 +566,11 @@ func (e *ItemsRepository) DeleteByGroup(ctx context.Context, gid, id uuid.UUID)
|
|||
item.ID(id),
|
||||
item.HasGroupWith(group.ID(gid)),
|
||||
).Exec(ctx)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
e.publishMutationEvent(gid)
|
||||
e.publishMutationEvent(gid)
|
||||
return err
|
||||
}
|
||||
|
||||
|
@ -670,7 +686,7 @@ func (e *ItemsRepository) UpdateByGroup(ctx context.Context, GID uuid.UUID, data
|
|||
}
|
||||
}
|
||||
|
||||
e.publishMutationEvent(GID)
|
||||
e.publishMutationEvent(GID)
|
||||
return e.GetOne(ctx, data.ID)
|
||||
}
|
||||
|
||||
|
@ -709,7 +725,7 @@ func (e *ItemsRepository) Patch(ctx context.Context, GID, ID uuid.UUID, data Ite
|
|||
q.SetQuantity(*data.Quantity)
|
||||
}
|
||||
|
||||
e.publishMutationEvent(GID)
|
||||
e.publishMutationEvent(GID)
|
||||
return q.Exec(ctx)
|
||||
}
|
||||
|
||||
|
@ -822,3 +838,52 @@ func (e *ItemsRepository) ZeroOutTimeFields(ctx context.Context, GID uuid.UUID)
|
|||
|
||||
return updated, nil
|
||||
}
|
||||
|
||||
func (e *ItemsRepository) SetPrimaryPhotos(ctx context.Context, GID uuid.UUID) (int, error) {
|
||||
// All items where there is no primary photo
|
||||
itemIDs, err := e.db.Item.Query().
|
||||
Where(
|
||||
item.HasGroupWith(group.ID(GID)),
|
||||
item.HasAttachmentsWith(
|
||||
attachment.TypeEQ(attachment.TypePhoto),
|
||||
attachment.Not(
|
||||
attachment.And(
|
||||
attachment.Primary(true),
|
||||
attachment.TypeEQ(attachment.TypePhoto),
|
||||
),
|
||||
),
|
||||
),
|
||||
).
|
||||
IDs(ctx)
|
||||
if err != nil {
|
||||
return -1, err
|
||||
}
|
||||
|
||||
updated := 0
|
||||
for _, id := range itemIDs {
|
||||
// Find the first photo attachment
|
||||
a, err := e.db.Attachment.Query().
|
||||
Where(
|
||||
attachment.HasItemWith(item.ID(id)),
|
||||
attachment.TypeEQ(attachment.TypePhoto),
|
||||
attachment.Primary(false),
|
||||
).
|
||||
First(ctx)
|
||||
if err != nil {
|
||||
return updated, err
|
||||
}
|
||||
|
||||
// Set it as primary
|
||||
_, err = e.db.Attachment.UpdateOne(a).
|
||||
SetPrimary(true).
|
||||
Save(ctx)
|
||||
|
||||
if err != nil {
|
||||
return updated, err
|
||||
}
|
||||
|
||||
updated++
|
||||
}
|
||||
|
||||
return updated, nil
|
||||
}
|
||||
|
|
|
@ -8,7 +8,6 @@ import (
|
|||
"github.com/hay-kot/homebox/backend/internal/core/services/reporting/eventbus"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/group"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/item"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/label"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
|
||||
)
|
||||
|
@ -42,7 +41,6 @@ type (
|
|||
|
||||
LabelOut struct {
|
||||
LabelSummary
|
||||
Items []ItemSummary `json:"items"`
|
||||
}
|
||||
)
|
||||
|
||||
|
@ -64,7 +62,6 @@ var (
|
|||
func mapLabelOut(label *ent.Label) LabelOut {
|
||||
return LabelOut{
|
||||
LabelSummary: mapLabelSummary(label),
|
||||
Items: mapEach(label.Edges.Items, mapItemSummary),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -78,9 +75,6 @@ func (r *LabelRepository) getOne(ctx context.Context, where ...predicate.Label)
|
|||
return mapLabelOutErr(r.db.Label.Query().
|
||||
Where(where...).
|
||||
WithGroup().
|
||||
WithItems(func(iq *ent.ItemQuery) {
|
||||
iq.Where(item.Archived(false))
|
||||
}).
|
||||
Only(ctx),
|
||||
)
|
||||
}
|
||||
|
@ -142,7 +136,7 @@ func (r *LabelRepository) UpdateByGroup(ctx context.Context, GID uuid.UUID, data
|
|||
}
|
||||
|
||||
// delete removes the label from the database. This should only be used when
|
||||
// the label's ownership is already confirmed/validated.
|
||||
// the label's ownership is already confirmed/validated.
|
||||
func (r *LabelRepository) delete(ctx context.Context, id uuid.UUID) error {
|
||||
return r.db.Label.DeleteOneID(id).Exec(ctx)
|
||||
}
|
||||
|
|
|
@ -9,7 +9,6 @@ import (
|
|||
"github.com/hay-kot/homebox/backend/internal/core/services/reporting/eventbus"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/group"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/item"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/location"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
|
||||
)
|
||||
|
@ -49,7 +48,6 @@ type (
|
|||
LocationOut struct {
|
||||
Parent *LocationSummary `json:"parent,omitempty"`
|
||||
LocationSummary
|
||||
Items []ItemSummary `json:"items"`
|
||||
Children []LocationSummary `json:"children"`
|
||||
}
|
||||
)
|
||||
|
@ -87,8 +85,6 @@ func mapLocationOut(location *ent.Location) LocationOut {
|
|||
CreatedAt: location.CreatedAt,
|
||||
UpdatedAt: location.UpdatedAt,
|
||||
},
|
||||
Items: mapEach(location.Edges.Items, mapItemSummary),
|
||||
Children: children,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -164,11 +160,6 @@ func (r *LocationRepository) getOne(ctx context.Context, where ...predicate.Loca
|
|||
return mapLocationOutErr(r.db.Location.Query().
|
||||
Where(where...).
|
||||
WithGroup().
|
||||
WithItems(func(iq *ent.ItemQuery) {
|
||||
iq.Where(item.Archived(false)).
|
||||
Order(ent.Asc(item.FieldName)).
|
||||
WithLabel()
|
||||
}).
|
||||
WithParent().
|
||||
WithChildren().
|
||||
Only(ctx))
|
||||
|
|
|
@ -15,6 +15,7 @@ const (
|
|||
)
|
||||
|
||||
type Config struct {
|
||||
conf.Version
|
||||
Mode string `yaml:"mode" conf:"default:development"` // development or production
|
||||
Web WebConfig `yaml:"web"`
|
||||
Storage Storage `yaml:"storage"`
|
||||
|
@ -36,17 +37,25 @@ type DebugConf struct {
|
|||
}
|
||||
|
||||
type WebConfig struct {
|
||||
Port string `yaml:"port" conf:"default:7745"`
|
||||
Port string `yaml:"port" conf:"default:7745"`
|
||||
Host string `yaml:"host"`
|
||||
MaxUploadSize int64 `yaml:"max_file_upload" conf:"default:10"`
|
||||
ReadTimeout int `yaml:"read_timeout" conf:"default:10"`
|
||||
WriteTimeout int `yaml:"write_timeout" conf:"default:10"`
|
||||
IdleTimeout int `yaml:"idle_timeout" conf:"default:30"`
|
||||
}
|
||||
|
||||
// New parses the CLI/Config file and returns a Config struct. If the file argument is an empty string, the
|
||||
// file is not read. If the file is not empty, the file is read and the Config struct is returned.
|
||||
func New() (*Config, error) {
|
||||
func New(buildstr string, description string) (*Config, error) {
|
||||
var cfg Config
|
||||
const prefix = "HBOX"
|
||||
|
||||
cfg.Version = conf.Version{
|
||||
Build: buildstr,
|
||||
Desc: description,
|
||||
}
|
||||
|
||||
help, err := conf.Parse(prefix, &cfg)
|
||||
if err != nil {
|
||||
if errors.Is(err, conf.ErrHelpWanted) {
|
||||
|
|
|
@ -13,6 +13,7 @@ func init() {
|
|||
|
||||
err := validate.RegisterValidation("shoutrrr", func(fl validator.FieldLevel) bool {
|
||||
prefixes := [...]string{
|
||||
"bark://",
|
||||
"discord://",
|
||||
"smtp://",
|
||||
"gotify://",
|
||||
|
@ -21,6 +22,7 @@ func init() {
|
|||
"join://",
|
||||
"mattermost://",
|
||||
"matrix://",
|
||||
"ntfy://",
|
||||
"opsgenie://",
|
||||
"pushbullet://",
|
||||
"pushover://",
|
||||
|
@ -30,6 +32,7 @@ func init() {
|
|||
"telegram://",
|
||||
"zulip://",
|
||||
"generic://",
|
||||
"generic+",
|
||||
}
|
||||
|
||||
str := fl.Field().String()
|
||||
|
|
|
@ -60,6 +60,31 @@
|
|||
}
|
||||
}
|
||||
},
|
||||
"/v1/actions/set-primary-photos": {
|
||||
"post": {
|
||||
"security": [
|
||||
{
|
||||
"Bearer": []
|
||||
}
|
||||
],
|
||||
"description": "Sets the first photo of each item as the primary photo",
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Actions"
|
||||
],
|
||||
"summary": "Set Primary Photos",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/v1.ActionAmountResult"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/v1/actions/zero-item-time-fields": {
|
||||
"post": {
|
||||
"security": [
|
||||
|
@ -1871,6 +1896,9 @@
|
|||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"primary": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"type": {
|
||||
"type": "string"
|
||||
},
|
||||
|
@ -1882,6 +1910,9 @@
|
|||
"repo.ItemAttachmentUpdate": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"primary": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"title": {
|
||||
"type": "string"
|
||||
},
|
||||
|
@ -1981,6 +2012,9 @@
|
|||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"imageId": {
|
||||
"type": "string"
|
||||
},
|
||||
"insured": {
|
||||
"type": "boolean"
|
||||
},
|
||||
|
@ -1996,9 +2030,13 @@
|
|||
},
|
||||
"location": {
|
||||
"description": "Edges",
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/repo.LocationSummary"
|
||||
}
|
||||
],
|
||||
"x-nullable": true,
|
||||
"x-omitempty": true,
|
||||
"$ref": "#/definitions/repo.LocationSummary"
|
||||
"x-omitempty": true
|
||||
},
|
||||
"manufacturer": {
|
||||
"type": "string"
|
||||
|
@ -2014,9 +2052,13 @@
|
|||
"type": "string"
|
||||
},
|
||||
"parent": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/repo.ItemSummary"
|
||||
}
|
||||
],
|
||||
"x-nullable": true,
|
||||
"x-omitempty": true,
|
||||
"$ref": "#/definitions/repo.ItemSummary"
|
||||
"x-omitempty": true
|
||||
},
|
||||
"purchaseFrom": {
|
||||
"type": "string"
|
||||
|
@ -2088,6 +2130,9 @@
|
|||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"imageId": {
|
||||
"type": "string"
|
||||
},
|
||||
"insured": {
|
||||
"type": "boolean"
|
||||
},
|
||||
|
@ -2099,9 +2144,13 @@
|
|||
},
|
||||
"location": {
|
||||
"description": "Edges",
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/repo.LocationSummary"
|
||||
}
|
||||
],
|
||||
"x-nullable": true,
|
||||
"x-omitempty": true,
|
||||
"$ref": "#/definitions/repo.LocationSummary"
|
||||
"x-omitempty": true
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
|
@ -2125,7 +2174,8 @@
|
|||
"type": "boolean"
|
||||
},
|
||||
"assetId": {
|
||||
"type": "string"
|
||||
"type": "string",
|
||||
"example": "0"
|
||||
},
|
||||
"description": {
|
||||
"type": "string"
|
||||
|
@ -2210,7 +2260,6 @@
|
|||
"type": "string"
|
||||
},
|
||||
"warrantyExpires": {
|
||||
"description": "Sold",
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
|
@ -2247,12 +2296,6 @@
|
|||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"items": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/repo.ItemSummary"
|
||||
}
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
|
@ -2314,12 +2357,6 @@
|
|||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"items": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/repo.ItemSummary"
|
||||
}
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
|
@ -2396,7 +2433,6 @@
|
|||
"type": "object",
|
||||
"properties": {
|
||||
"completedDate": {
|
||||
"description": "Sold",
|
||||
"type": "string"
|
||||
},
|
||||
"cost": {
|
||||
|
@ -2413,7 +2449,6 @@
|
|||
"type": "string"
|
||||
},
|
||||
"scheduledDate": {
|
||||
"description": "Sold",
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
|
@ -2425,7 +2460,6 @@
|
|||
],
|
||||
"properties": {
|
||||
"completedDate": {
|
||||
"description": "Sold",
|
||||
"type": "string"
|
||||
},
|
||||
"cost": {
|
||||
|
@ -2439,7 +2473,6 @@
|
|||
"type": "string"
|
||||
},
|
||||
"scheduledDate": {
|
||||
"description": "Sold",
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
|
@ -2448,7 +2481,6 @@
|
|||
"type": "object",
|
||||
"properties": {
|
||||
"completedDate": {
|
||||
"description": "Sold",
|
||||
"type": "string"
|
||||
},
|
||||
"cost": {
|
||||
|
@ -2462,7 +2494,6 @@
|
|||
"type": "string"
|
||||
},
|
||||
"scheduledDate": {
|
||||
"description": "Sold",
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,11 +2,11 @@
|
|||
|
||||
## Quick Start
|
||||
|
||||
Using the CSV import is the recommended way for adding items to the database. It is always going to be the fastest way to import any large amount of items and provides the most flexibility when it comes to adding items.
|
||||
Using the CSV import is the recommended way for adding items to the database. It is always going to be the fastest way to import any large number of items and provides the most flexibility when it comes to adding items.
|
||||
|
||||
**Current Limitations**
|
||||
|
||||
- Imports only supports importing items, locations, and labels
|
||||
- Imports only support importing items, locations, and labels
|
||||
- Imports and Exports do not support attachments. Attachments must be uploaded after import
|
||||
- CSV Exports do not support nested path exports (e.g. `Home / Office / Desk`) and will only export the Items direct parent, (though imports _do_ support nested paths)
|
||||
- Cannot specify item-to-item relationships (e.g. `Item A` is a child of `Item B`)
|
||||
|
@ -16,13 +16,13 @@ Using the CSV import is the recommended way for adding items to the database. It
|
|||
|
||||
## CSV Reference
|
||||
|
||||
Below are the supported columns. They are case sensitive, can be in any ordered or can be omitted unless otherwise specified.
|
||||
Below are the supported columns. They are case-sensitive, can be in any ordered or can be omitted unless otherwise specified.
|
||||
|
||||
### Special Syntax Columns
|
||||
|
||||
`HB.import_ref`
|
||||
|
||||
: Import Refs are unique strings that can be used to deduplicate imports. Before an item is imported, we check the database for a matching ref. If the ref exists, we skip creation of that item.
|
||||
: Import Refs are unique strings that can be used to deduplicate imports. Before an item is imported, we check the database for a matching ref. If the ref exists, we skip the creation of that item.
|
||||
|
||||
* String Type
|
||||
* Max 100 Characters
|
||||
|
@ -52,7 +52,7 @@ Below are the supported columns. They are case sensitive, can be in any ordered
|
|||
### Standard Columns
|
||||
|
||||
| Column | Type | Description |
|
||||
| -------------------- | ------------- | --------------------------------------------- |
|
||||
|----------------------|---------------|-----------------------------------------------|
|
||||
| HB.quantity | Integer | The quantity of items to create |
|
||||
| HB.name | String | Name of the item |
|
||||
| HB.asset_id | AssetID | Asset ID for the item |
|
||||
|
@ -76,7 +76,7 @@ Below are the supported columns. They are case sensitive, can be in any ordered
|
|||
**Type Key**
|
||||
|
||||
| Type | Format |
|
||||
| ------- | --------------------------------------------------- |
|
||||
|---------|-----------------------------------------------------|
|
||||
| String | Max 255 Characters unless otherwise specified |
|
||||
| Date | YYYY-MM-DD |
|
||||
| Boolean | true or false, yes or no, 1 or 0 - case insensitive |
|
||||
|
|
|
@ -15,19 +15,19 @@
|
|||
|
||||
|
||||
|
||||
Homebox is the inventory and organization system built for the Home User! With a focus on simplicity and ease of use, Homebox is the perfect solution for your home inventory, organization, and management needs. While developing this project I've tried to keep the following principles in mind:
|
||||
Homebox is the inventory and organization system built for the Home User! With a focus on simplicity and ease of use, Homebox is the perfect solution for your home inventory, organization, and management needs. While developing this project, I've tried to keep the following principles in mind:
|
||||
|
||||
- _Simple_ - Homebox is designed to be simple and easy to use. No complicated setup or configuration required. Use either a single docker container, or deploy yourself by compiling the binary for your platform of choice.
|
||||
- _Blazingly Fast_ - Homebox is written in Go which makes it extremely fast and requires minimal resources to deploy. In general idle memory usage is less than 50MB for the whole container.
|
||||
- _Blazingly Fast_ - Homebox is written in Go, which makes it extremely fast and requires minimal resources to deploy. In general idle memory usage is less than 50MB for the whole container.
|
||||
- _Portable_ - Homebox is designed to be portable and run on anywhere. We use SQLite and an embedded Web UI to make it easy to deploy, use, and backup.
|
||||
|
||||
## Project Status
|
||||
|
||||
Homebox is currently in early-active development and is currently in **beta** stage. This means that the project may still be unstable and clunky. Overall we are striving to not introduce any breaking changes and have checks in place to ensure migrations and upgrades are smooth. However, we do not guarantee that there will be no breaking changes. We will try to keep the documentation up to date as we make changes.
|
||||
Homebox is currently in early active development and is currently in **beta** stage. This means that the project may still be unstable and clunky. Overall, we are striving to not introduce any breaking changes and have checks in place to ensure migrations and upgrades are smooth. However, we do not guarantee that there will be no breaking changes. We will try to keep the documentation up to date as we make changes.
|
||||
|
||||
## Features
|
||||
|
||||
- Create and Manage _Items_ by provided a name and description - That's it! Homebox requires only a few details to be provided to create an item, after that you can specify as much detail as you want, or hide away some of the things you won't ever need.
|
||||
- Create and Manage _Items_ by providing a name and a description - That's it! Homebox requires only a few details to be provided to create an item, after that you can specify as much detail as you want, or hide away some of the things you won't ever need.
|
||||
- Optional Details for Items include
|
||||
- Warranty Information
|
||||
- Sold To Information
|
||||
|
@ -40,17 +40,17 @@ Homebox is currently in early-active development and is currently in **beta** st
|
|||
- Bill of Materials Export
|
||||
- QR Code Label Generator
|
||||
- Organize _Items_ by creating _Labels_ and _Locations_ and assigning them to items.
|
||||
- Multi-Tenant Support - All users are placed inside of a group and can only see items that are apart of their group. Invite family members to your group, or share an instance among friends!
|
||||
- Multi-Tenant Support - All users are placed in a group and can only see items in their group. Invite family members to your group, or share an instance among friends!
|
||||
|
||||
|
||||
## Why Not Use Something Else?
|
||||
|
||||
There are a lot of great inventory management systems out there, but none of them _really_ fit my needs as a home user. Snipe-IT is a fantastic product that has so many robust features and management options that it's easy to become overwhelmed and confused. I wanted something that was simple and easy to use that didn't require a lot of cognitive overhead to manage. I primarily built this to organize my IOT devices and save my warranty and documentation information in a central, searchable location.
|
||||
There are a lot of great inventory management systems out there, but none of them _really_ fit my needs as a home user. Snipe-IT is a fantastic product that has so many robust features and management options which makes it easy to become overwhelmed and confused. I wanted something that was simple and easy to use that didn't require a lot of cognitive overhead to manage. I primarily built this to organize my IOT devices and save my warranty and documentation information in a central, searchable location.
|
||||
|
||||
### Spreadsheet
|
||||
|
||||
That's a fair point. If your needs can be fulfilled by a Spreadsheet, I'd suggest using that instead. I've found spreadsheets get pretty unwieldy when you have a lot of data and it's hard to keep track of what's where. I also wanted to be able to search and filter my data in a more robust way than a spreadsheet can provide. I also wanted to leave to door open for more advanced features in the future like maintenance logs, moving label generators, and more.
|
||||
That's a fair point. If your needs can be fulfilled by a Spreadsheet, I'd suggest using that instead. I've found spreadsheets get pretty unwieldy when you have a lot of data, and it's hard to keep track of what's where. I also wanted to be able to search and filter my data in a more robust way than a spreadsheet can provide. I also wanted to leave the door open for more advanced features in the future like maintenance logs, moving label generators, and more.
|
||||
|
||||
### Snipe-It?
|
||||
|
||||
Snipe-It is the gold standard for IT management. If your use-case is to manage consumables and IT physical infrastructure I highly suggest you look at Snipe-It over Homebox, it's just more purpose built for that use case. Homebox is, in contrast, purpose built for the home user, which means that we try to focus on keeping things simple and easy to use. Lowering the friction for creating items and managing them is a key goal of Homebox which means you lose out on some of the more advanced features. In most cases this is a good trade-off.
|
||||
Snipe-It is the gold standard for IT management. If your use-case is to manage consumables and IT physical infrastructure, I highly suggest you look at Snipe-It over Homebox, it's just more purpose built for that use case. Homebox is, in contrast, purpose built for the home user, which means that we try to focus on keeping things simple and easy to use. Lowering the friction for creating items and managing them is a key goal of Homebox which means you lose out on some of the more advanced features. In most cases, this is a good trade-off.
|
|
@ -7,7 +7,7 @@ Great for testing out the application, but not recommended for stable use. Check
|
|||
For each image there are two tags, respectively the regular tag and $TAG-rootless, which uses a non-root image.
|
||||
|
||||
```sh
|
||||
# If using the rootless image, ensure data
|
||||
# If using the rootless image, ensure data
|
||||
# folder has correct permissions
|
||||
$ mkdir -p /path/to/data/folder
|
||||
$ chown 65532:65532 -R /path/to/data/folder
|
||||
|
@ -21,7 +21,7 @@ $ docker run -d \
|
|||
--volume /path/to/data/folder/:/data \
|
||||
ghcr.io/hay-kot/homebox:latest
|
||||
# ghcr.io/hay-kot/homebox:latest-rootless
|
||||
|
||||
|
||||
```
|
||||
|
||||
## Docker-Compose
|
||||
|
@ -62,8 +62,11 @@ volumes:
|
|||
| HBOX_OPTIONS_ALLOW_REGISTRATION | true | allow users to register themselves |
|
||||
| HBOX_OPTIONS_AUTO_INCREMENT_ASSET_ID | true | auto increments the asset_id field for new items |
|
||||
| HBOX_WEB_MAX_UPLOAD_SIZE | 10 | maximum file upload size supported in MB |
|
||||
| HBOX_WEB_READ_TIMEOUT | 10 | Read timeout of HTTP sever |
|
||||
| HBOX_WEB_WRITE_TIMEOUT | 10 | Write timeout of HTTP server |
|
||||
| HBOX_WEB_IDLE_TIMEOUT | 30 | Idle timeout of HTTP server |
|
||||
| HBOX_STORAGE_DATA | /data/ | path to the data directory, do not change this if you're using docker |
|
||||
| HBOX_STORAGE_SQLITE_URL | /data/homebox.db?_fk=1 | sqlite database url, in you're using docker do not change this |
|
||||
| HBOX_STORAGE_SQLITE_URL | /data/homebox.db?_fk=1 | sqlite database url, if you're using docker do not change this |
|
||||
| HBOX_LOG_LEVEL | info | log level to use, can be one of: trace, debug, info, warn, error, critical |
|
||||
| HBOX_LOG_FORMAT | text | log format to use, can be one of: text, json |
|
||||
| HBOX_MAILER_HOST | | email host to use, if not set no email provider will be used |
|
||||
|
|
|
@ -12,7 +12,7 @@ Custom fields are a great way to add any extra information to your item. The fol
|
|||
Custom fields are appended to the main details section of your item.
|
||||
|
||||
!!! tip
|
||||
Homebox Custom Fields also have special support for URLs. Provide a URL (`https://google.com`) and it will be automatically converted to a clickable link in the UI. Optionally, you can also use markdown syntax to add a custom text to the button. `[Google](https://google.com)`
|
||||
Homebox Custom Fields also have special support for URLs. Provide a URL (`https://google.com`) and it will be automatically converted to a clickable link in the UI. Optionally, you can also use Markdown syntax to add a custom text to the button. `[Google](https://google.com)`
|
||||
|
||||
## Managing Asset IDs
|
||||
|
||||
|
@ -20,26 +20,26 @@ Homebox provides the option to auto-set asset IDs, this is the default behavior.
|
|||
|
||||
Example ID: `000-001`
|
||||
|
||||
Asset IDs are partially managed by Homebox, but have a flexible implementation to allow for unique use cases. ID's are non-unique at the database level so there is nothing stopping a user from manually setting duplicate IDs for various items. There are two recommended approaches to manage Asset IDs
|
||||
Asset IDs are partially managed by Homebox, but have a flexible implementation to allow for unique use cases. IDs are non-unique at the database level, so there is nothing stopping a user from manually setting duplicate IDs for various items. There are two recommended approaches to manage Asset IDs:
|
||||
|
||||
### 1. Auto Incrementing IDs
|
||||
|
||||
This is the default behavior and likely to one to experience the most consistent behavior. Whenever creating or importing an item, that items receives the next available ID. This is the most consistent approach and is recommended for most users.
|
||||
This is the default behavior likely to experience the most consistency. Whenever creating or importing an item, that item receives the next available ID. This is recommended for most users.
|
||||
|
||||
### 2. Auto Incrementing ID's with Reset
|
||||
### 2. Auto Incrementing IDs with Reset
|
||||
|
||||
In some cases you may want to skip some items such as consumables, or items that are loosely tracked. In this case, we recommend that you leave auto-incrementing ID's enabled _however_ when you create a new item that you want to skip, you can go to that item and reset the ID to 0. This will remove it from the auto-incrementing sequence and the next item will receive the next available ID.
|
||||
In some cases, you may want to skip some items such as consumables, or items that are loosely tracked. In this case, we recommend that you leave auto-incrementing IDs enabled _however_ when you create a new item that you want to skip, you can go to that item and reset the ID to 0. This will remove it from the auto-incrementing sequence, and the next item will receive the next available ID.
|
||||
|
||||
!!! tip
|
||||
If you're migrating from an older version there is a action on the users profile page to assign IDs to all items. This will assign the next available ID to all items in the order of creation. You should _only_ do this once during the migration process. You should be especially cautious of this action if you're using the reset feature described in option number 2
|
||||
If you're migrating from an older version, there is an action on the user's profile page to assign IDs to all items. This will assign the next available ID to all items in order of their creation. You should __only do this once__ during the migration process. You should be especially cautious with this if you're using the reset feature described in [option number 2](#2-auto-incrementing-ids-with-reset)
|
||||
|
||||
## QR Codes
|
||||
|
||||
:octicons-tag-24: 0.7.0
|
||||
|
||||
Homebox has a built-in QR code generator that can be used to generate QR codes for your items. This is useful for tracking items with a mobile device. You can generate a QR code for any item by clicking the QR code icon in the top right of the item details page. The same can be done for the Labels and Locations page. Currently support is limited to generating one off QR Codes.
|
||||
Homebox has a built-in QR code generator that can be used to generate QR codes for your items. This is useful for tracking items with a mobile device. You can generate a QR code for any item by clicking the QR code icon in the top right of the item details page. The same can be done for the Labels and Locations page. Currently, support is limited to generating one-off QR Codes.
|
||||
|
||||
However, the API endpoint is available for generating QR codes on the fly for any item (or any other data) if you provide a valid API key in the query parameters. An example url would look like `/api/v1/qrcode?data=https://homebox.fly.dev/item/{uuid}`. Currently the easiest way to get an API token is to use one from an existing URL of the QR Code in the API key, but this will be improved in the future.
|
||||
However, the API endpoint is available for generating QR codes on the fly for any item (or any other data) if you provide a valid API key in the query parameters. An example url would look like `/api/v1/qrcode?data=https://homebox.fly.dev/item/{uuid}`. Currently, the easiest way to get an API token is to use one from an existing URL of the QR Code in the API key, but this will be improved in the future.
|
||||
|
||||
:octicons-tag-24: v0.8.0
|
||||
|
||||
|
|
|
@ -1 +1 @@
|
|||
mkdocs-material==9.1.21
|
||||
mkdocs-material==9.4.14
|
|
@ -1,94 +0,0 @@
|
|||
<template>
|
||||
<DoughnutChart
|
||||
:chart-options="chartOptions"
|
||||
:chart-data="chartData"
|
||||
:chart-id="chartId"
|
||||
:dataset-id-key="datasetIdKey"
|
||||
:css-classes="cssClasses"
|
||||
:styles="styles"
|
||||
:width="width"
|
||||
:height="height"
|
||||
/>
|
||||
</template>
|
||||
|
||||
<script lang="ts">
|
||||
import { Doughnut as DoughnutChart } from "vue-chartjs";
|
||||
import { Chart as ChartJS, Title, Tooltip, Legend, CategoryScale, LinearScale, ArcElement } from "chart.js";
|
||||
import { TChartData } from "vue-chartjs/dist/types";
|
||||
|
||||
ChartJS.register(Title, Tooltip, Legend, CategoryScale, LinearScale, ArcElement);
|
||||
|
||||
export default defineComponent({
|
||||
name: "BarChart",
|
||||
components: {
|
||||
DoughnutChart,
|
||||
},
|
||||
props: {
|
||||
chartId: {
|
||||
type: String,
|
||||
default: "bar-chart",
|
||||
},
|
||||
datasetIdKey: {
|
||||
type: String,
|
||||
default: "label",
|
||||
},
|
||||
width: {
|
||||
type: Number,
|
||||
default: 400,
|
||||
},
|
||||
height: {
|
||||
type: Number,
|
||||
default: 400,
|
||||
},
|
||||
cssClasses: {
|
||||
default: "",
|
||||
type: String,
|
||||
},
|
||||
styles: {
|
||||
type: Object,
|
||||
default: () => {
|
||||
return {};
|
||||
},
|
||||
},
|
||||
chartData: {
|
||||
type: Object as () => TChartData<"doughnut", number[], unknown>,
|
||||
default: () => {
|
||||
return {
|
||||
labels: ["Red", "Blue", "Yellow"],
|
||||
datasets: [
|
||||
{
|
||||
label: "My First Dataset",
|
||||
data: [300, 50, 100],
|
||||
backgroundColor: ["rgb(255, 99, 132)", "rgb(54, 162, 235)", "rgb(255, 205, 86)"],
|
||||
hoverOffset: 4,
|
||||
},
|
||||
],
|
||||
};
|
||||
},
|
||||
},
|
||||
},
|
||||
data() {
|
||||
return {
|
||||
chartOptions: {
|
||||
responsive: false,
|
||||
// Legend on the left
|
||||
plugins: {
|
||||
legend: {
|
||||
position: "bottom",
|
||||
},
|
||||
// Display percentage
|
||||
// tooltip: {
|
||||
// callbacks: {
|
||||
// label: context => {
|
||||
// const label = context.dataset?.label || "";
|
||||
// const value = context.parsed.y;
|
||||
// return `${label}: ${value}%`;
|
||||
// },
|
||||
// },
|
||||
// },
|
||||
},
|
||||
},
|
||||
};
|
||||
},
|
||||
});
|
||||
</script>
|
|
@ -1,113 +0,0 @@
|
|||
<template>
|
||||
<div ref="el" class="min-h-full flex flex-col">
|
||||
{{ styles }}
|
||||
<LineChart :chart-options="options" :chart-data="chartData" :styles="styles" />
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<script lang="ts">
|
||||
import { Line as LineChart } from "vue-chartjs";
|
||||
import {
|
||||
Chart as ChartJS,
|
||||
PointElement,
|
||||
Title,
|
||||
Tooltip,
|
||||
Legend,
|
||||
CategoryScale,
|
||||
LinearScale,
|
||||
LineElement,
|
||||
} from "chart.js";
|
||||
import { TChartData } from "vue-chartjs/dist/types";
|
||||
|
||||
ChartJS.register(Title, Tooltip, Legend, CategoryScale, LinearScale, PointElement, LineElement);
|
||||
|
||||
export default defineComponent({
|
||||
name: "BarChart",
|
||||
components: {
|
||||
LineChart,
|
||||
},
|
||||
props: {
|
||||
chartId: {
|
||||
type: String,
|
||||
default: "bar-chart",
|
||||
},
|
||||
datasetIdKey: {
|
||||
type: String,
|
||||
default: "label",
|
||||
},
|
||||
cssClasses: {
|
||||
default: "",
|
||||
type: String,
|
||||
},
|
||||
chartData: {
|
||||
type: Object as () => TChartData<"line", number[], unknown>,
|
||||
default: () => {
|
||||
return {
|
||||
labels: ["January", "February", "March"],
|
||||
datasets: [{ data: [40, 20, 12] }],
|
||||
};
|
||||
},
|
||||
},
|
||||
},
|
||||
setup() {
|
||||
const el = ref<HTMLElement | null>(null);
|
||||
|
||||
const calcHeight = ref(0);
|
||||
const calcWidth = ref(0);
|
||||
|
||||
function resize() {
|
||||
calcHeight.value = el.value?.offsetHeight || 0;
|
||||
calcWidth.value = el.value?.offsetWidth || 0;
|
||||
}
|
||||
|
||||
onMounted(() => {
|
||||
resize();
|
||||
window.addEventListener("resize", resize);
|
||||
});
|
||||
|
||||
onUnmounted(() => {
|
||||
window.removeEventListener("resize", resize);
|
||||
});
|
||||
|
||||
const styles = computed(() => {
|
||||
return {
|
||||
height: `${calcHeight.value}px`,
|
||||
width: `${calcWidth.value}px`,
|
||||
position: "relative",
|
||||
};
|
||||
});
|
||||
|
||||
return {
|
||||
el,
|
||||
parentHeight: calcHeight,
|
||||
styles,
|
||||
};
|
||||
},
|
||||
data() {
|
||||
return {
|
||||
options: {
|
||||
responsive: true,
|
||||
maintainAspectRatio: false,
|
||||
scales: {
|
||||
x: {
|
||||
display: false,
|
||||
},
|
||||
y: {
|
||||
display: true,
|
||||
},
|
||||
},
|
||||
elements: {
|
||||
line: {
|
||||
borderWidth: 5,
|
||||
},
|
||||
point: {
|
||||
radius: 4,
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
},
|
||||
});
|
||||
</script>
|
||||
|
||||
<style></style>
|
|
@ -10,6 +10,14 @@
|
|||
class="w-full input input-bordered"
|
||||
@change="search = $event.target.value"
|
||||
/>
|
||||
<button
|
||||
v-if="!!value"
|
||||
type="button"
|
||||
class="absolute inset-y-0 right-6 flex items-center rounded-r-md px-2 focus:outline-none"
|
||||
@click="clear"
|
||||
>
|
||||
<Icon name="mdi-close" class="w-5 h-5" />
|
||||
</button>
|
||||
<ComboboxButton class="absolute inset-y-0 right-0 flex items-center rounded-r-md px-2 focus:outline-none">
|
||||
<Icon name="mdi-chevron-down" class="w-5 h-5" />
|
||||
</ComboboxButton>
|
||||
|
@ -86,6 +94,10 @@
|
|||
multiple: false,
|
||||
});
|
||||
|
||||
function clear() {
|
||||
emit("update:modelValue", null);
|
||||
}
|
||||
|
||||
const search = ref("");
|
||||
const value = useVModel(props, "modelValue", emit);
|
||||
|
||||
|
|
|
@ -1,21 +1,21 @@
|
|||
<template>
|
||||
<NuxtLink class="group card rounded-md" :to="`/item/${item.id}`">
|
||||
<div class="rounded-t flex flex-col justify-center bg-neutral text-neutral-content p-5">
|
||||
<h2 class="text-lg mb-1 last:mb-0 font-bold two-line">{{ item.name }}</h2>
|
||||
<div>
|
||||
<NuxtLink v-if="item.location" class="text-sm hover:link" :to="`/location/${item.location.id}`">
|
||||
<NuxtLink class="group card rounded-md border border-gray-300" :to="`/item/${item.id}`">
|
||||
<div class="relative h-[200px]">
|
||||
<img v-if="imageUrl" class="h-[200px] w-full object-cover rounded-t shadow-sm border-gray-300" :src="imageUrl" />
|
||||
<div class="absolute bottom-1 left-1">
|
||||
<NuxtLink
|
||||
v-if="item.location"
|
||||
class="text-sm hover:link badge shadow-md rounded-md"
|
||||
:to="`/location/${item.location.id}`"
|
||||
>
|
||||
{{ item.location.name }}
|
||||
</NuxtLink>
|
||||
<span class="flex-1"></span>
|
||||
</div>
|
||||
</div>
|
||||
<div class="rounded-b p-4 pt-2 flex-grow col-span-4 flex flex-col gap-y-2 bg-base-100">
|
||||
<div class="rounded-b p-4 pt-2 flex-grow col-span-4 flex flex-col gap-y-1 bg-base-100">
|
||||
<h2 class="text-lg font-bold two-line">{{ item.name }}</h2>
|
||||
<div class="divider my-0"></div>
|
||||
<div class="flex justify-between gap-2">
|
||||
<div class="mr-auto tooltip tooltip-tip" data-tip="Purchase Price">
|
||||
<span v-if="item.purchasePrice != '0'" class="badge badge-sm badge-ghost h-5">
|
||||
<Currency :amount="item.purchasePrice" />
|
||||
</span>
|
||||
</div>
|
||||
<div v-if="item.insured" class="tooltip z-10" data-tip="Insured">
|
||||
<Icon class="h-5 w-5 text-primary" name="mdi-shield-check" />
|
||||
</div>
|
||||
|
@ -26,7 +26,6 @@
|
|||
</div>
|
||||
</div>
|
||||
<Markdown class="mb-2 text-clip three-line" :source="item.description" />
|
||||
|
||||
<div class="flex gap-2 flex-wrap -mr-1 mt-auto justify-end">
|
||||
<LabelChip v-for="label in top3" :key="label.id" :label="label" size="sm" />
|
||||
</div>
|
||||
|
@ -37,6 +36,16 @@
|
|||
<script setup lang="ts">
|
||||
import { ItemOut, ItemSummary } from "~~/lib/api/types/data-contracts";
|
||||
|
||||
const api = useUserApi();
|
||||
|
||||
const imageUrl = computed(() => {
|
||||
if (!props.item.imageId) {
|
||||
return "/no-image.jpg";
|
||||
}
|
||||
|
||||
return api.authURL(`/items/${props.item.id}/attachments/${props.item.imageId}`);
|
||||
});
|
||||
|
||||
const top3 = computed(() => {
|
||||
return props.item.labels.slice(0, 3) || [];
|
||||
});
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
<label tabindex="0" class="btn rounded-l-none rounded-r-xl">
|
||||
<Icon class="h-5 w-5" name="mdi-chevron-down" />
|
||||
</label>
|
||||
<ul tabindex="0" class="dropdown-content menu p-2 shadow bg-base-100 rounded-box w-64">
|
||||
<ul tabindex="0" class="dropdown-content menu p-2 shadow bg-base-100 rounded-box w-64 right-0">
|
||||
<li>
|
||||
<button type="button" @click="create(false)">Create and Add Another</button>
|
||||
</li>
|
||||
|
@ -122,6 +122,7 @@
|
|||
};
|
||||
|
||||
const { error, data } = await api.items.create(out);
|
||||
loading.value = false;
|
||||
if (error) {
|
||||
toast.error("Couldn't create item");
|
||||
return;
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
<label tabindex="0" class="btn rounded-l-none rounded-r-xl">
|
||||
<Icon class="h-5 w-5" name="mdi-chevron-down" />
|
||||
</label>
|
||||
<ul tabindex="0" class="dropdown-content menu p-2 shadow bg-base-100 rounded-box w-64">
|
||||
<ul tabindex="0" class="dropdown-content menu p-2 shadow bg-base-100 rounded-box w-64 right-0">
|
||||
<li>
|
||||
<button type="button" @click="create(false)">Create and Add Another</button>
|
||||
</li>
|
||||
|
@ -77,6 +77,7 @@
|
|||
const { error, data } = await api.labels.create(form);
|
||||
if (error) {
|
||||
toast.error("Couldn't create label");
|
||||
loading.value = false;
|
||||
return;
|
||||
}
|
||||
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
<label tabindex="0" class="btn rounded-l-none rounded-r-xl">
|
||||
<Icon class="h-5 w-5" name="mdi-chevron-down" />
|
||||
</label>
|
||||
<ul tabindex="0" class="dropdown-content menu p-2 shadow bg-base-100 rounded-box w-64">
|
||||
<ul tabindex="0" class="dropdown-content menu p-2 shadow bg-base-100 rounded-box w-64 right-0">
|
||||
<li>
|
||||
<button type="button" @click="create(false)">Create and Add Another</button>
|
||||
</li>
|
||||
|
@ -85,6 +85,7 @@
|
|||
});
|
||||
|
||||
if (error) {
|
||||
loading.value = false;
|
||||
toast.error("Couldn't create location");
|
||||
}
|
||||
|
||||
|
|
|
@ -22,7 +22,7 @@
|
|||
get() {
|
||||
return state.value[nodeHash.value] ?? false;
|
||||
},
|
||||
set(value) {
|
||||
set(value: boolean) {
|
||||
state.value[nodeHash.value] = value;
|
||||
},
|
||||
});
|
||||
|
|
|
@ -33,7 +33,12 @@
|
|||
v-if="detail.copyable"
|
||||
class="opacity-0 group-hover:opacity-100 ml-4 my-0 duration-75 transition-opacity"
|
||||
>
|
||||
<CopyText :text="detail.text.toString()" :icon-size="16" class="btn btn-xs btn-ghost btn-circle" />
|
||||
<CopyText
|
||||
v-if="detail.text.toString()"
|
||||
:text="detail.text.toString()"
|
||||
:icon-size="16"
|
||||
class="btn btn-xs btn-ghost btn-circle"
|
||||
/>
|
||||
</span>
|
||||
</span>
|
||||
</template>
|
||||
|
|
|
@ -30,12 +30,15 @@ export function usePublicApi(): PublicApi {
|
|||
export function useUserApi(): UserClient {
|
||||
const authCtx = useAuthContext();
|
||||
|
||||
const requests = new Requests("", () => authCtx.token || "", {});
|
||||
const requests = new Requests("", "", {});
|
||||
requests.addResponseInterceptor(logger);
|
||||
requests.addResponseInterceptor(r => {
|
||||
if (r.status === 401) {
|
||||
console.error("unauthorized request, invalidating session");
|
||||
authCtx.invalidateSession();
|
||||
if (window.location.pathname !== "/") {
|
||||
window.location.href = "/";
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
|
|
|
@ -4,8 +4,7 @@ import { UserOut } from "~~/lib/api/types/data-contracts";
|
|||
import { UserClient } from "~~/lib/api/user";
|
||||
|
||||
export interface IAuthContext {
|
||||
get token(): string | null;
|
||||
get expiresAt(): string | null;
|
||||
get token(): boolean | null;
|
||||
get attachmentToken(): string | null;
|
||||
|
||||
/**
|
||||
|
@ -13,11 +12,6 @@ export interface IAuthContext {
|
|||
*/
|
||||
user?: UserOut;
|
||||
|
||||
/**
|
||||
* Returns true if the session is expired.
|
||||
*/
|
||||
isExpired(): boolean;
|
||||
|
||||
/**
|
||||
* Returns true if the session is authorized.
|
||||
*/
|
||||
|
@ -43,59 +37,41 @@ class AuthContext implements IAuthContext {
|
|||
// eslint-disable-next-line no-use-before-define
|
||||
private static _instance?: AuthContext;
|
||||
|
||||
private static readonly cookieTokenKey = "hb.auth.token";
|
||||
private static readonly cookieExpiresAtKey = "hb.auth.expires_at";
|
||||
private static readonly cookieTokenKey = "hb.auth.session";
|
||||
private static readonly cookieAttachmentTokenKey = "hb.auth.attachment_token";
|
||||
|
||||
user?: UserOut;
|
||||
private _token: CookieRef<string | null>;
|
||||
private _expiresAt: CookieRef<string | null>;
|
||||
private _attachmentToken: CookieRef<string | null>;
|
||||
|
||||
get token() {
|
||||
return this._token.value;
|
||||
}
|
||||
|
||||
get expiresAt() {
|
||||
return this._expiresAt.value;
|
||||
// @ts-ignore sometimes it's a boolean I guess?
|
||||
return this._token.value === "true" || this._token.value === true;
|
||||
}
|
||||
|
||||
get attachmentToken() {
|
||||
return this._attachmentToken.value;
|
||||
}
|
||||
|
||||
private constructor(token: string, expiresAt: string, attachmentToken: string) {
|
||||
private constructor(token: string, attachmentToken: string) {
|
||||
this._token = useCookie(token);
|
||||
this._expiresAt = useCookie(expiresAt);
|
||||
this._attachmentToken = useCookie(attachmentToken);
|
||||
}
|
||||
|
||||
static get instance() {
|
||||
if (!this._instance) {
|
||||
this._instance = new AuthContext(
|
||||
AuthContext.cookieTokenKey,
|
||||
AuthContext.cookieExpiresAtKey,
|
||||
AuthContext.cookieAttachmentTokenKey
|
||||
);
|
||||
this._instance = new AuthContext(AuthContext.cookieTokenKey, AuthContext.cookieAttachmentTokenKey);
|
||||
}
|
||||
|
||||
return this._instance;
|
||||
}
|
||||
|
||||
isExpired() {
|
||||
const expiresAt = this.expiresAt;
|
||||
if (expiresAt === null) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const expiresAtDate = new Date(expiresAt);
|
||||
const now = new Date();
|
||||
|
||||
return now.getTime() > expiresAtDate.getTime();
|
||||
return !this.token;
|
||||
}
|
||||
|
||||
isAuthorized() {
|
||||
return !!this._token.value && !this.isExpired();
|
||||
return this.token;
|
||||
}
|
||||
|
||||
invalidateSession() {
|
||||
|
@ -103,11 +79,8 @@ class AuthContext implements IAuthContext {
|
|||
|
||||
// Delete the cookies
|
||||
this._token.value = null;
|
||||
this._expiresAt.value = null;
|
||||
this._attachmentToken.value = null;
|
||||
|
||||
console.log("Session invalidated");
|
||||
window.location.href = "/";
|
||||
}
|
||||
|
||||
async login(api: PublicApi, email: string, password: string, stayLoggedIn: boolean) {
|
||||
|
@ -115,17 +88,10 @@ class AuthContext implements IAuthContext {
|
|||
|
||||
if (!r.error) {
|
||||
const expiresAt = new Date(r.data.expiresAt);
|
||||
this._token = useCookie(AuthContext.cookieTokenKey, {
|
||||
expires: expiresAt,
|
||||
});
|
||||
this._expiresAt = useCookie(AuthContext.cookieExpiresAtKey, {
|
||||
expires: expiresAt,
|
||||
});
|
||||
this._token = useCookie(AuthContext.cookieTokenKey);
|
||||
this._attachmentToken = useCookie(AuthContext.cookieAttachmentTokenKey, {
|
||||
expires: expiresAt,
|
||||
});
|
||||
this._token.value = r.data.token;
|
||||
this._expiresAt.value = r.data.expiresAt as string;
|
||||
this._attachmentToken.value = r.data.attachmentToken;
|
||||
}
|
||||
|
||||
|
|
|
@ -13,7 +13,12 @@ let socket: WebSocket | null = null;
|
|||
const listeners = new Map<ServerEvent, (() => void)[]>();
|
||||
|
||||
function connect(onmessage: (m: EventMessage) => void) {
|
||||
const ws = new WebSocket(`ws://${window.location.host}/api/v1/ws/events`);
|
||||
let protocol = "ws";
|
||||
if (window.location.protocol === "https:") {
|
||||
protocol = "wss";
|
||||
}
|
||||
|
||||
const ws = new WebSocket(`${protocol}://${window.location.host}/api/v1/ws/events`);
|
||||
|
||||
ws.onopen = () => {
|
||||
console.debug("connected to server");
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue