Compare commits

...
Sign in to create a new pull request.

350 commits

Author SHA1 Message Date
6e1e5a7280
backend: go get -u and mod tidy
Signed-off-by: Vincent Batts <vbatts@hashbangbash.com>
2025-04-12 21:34:25 -04:00
b5fbe698be
backend: go mod tidy
Signed-off-by: Vincent Batts <vbatts@hashbangbash.com>
2025-04-12 19:29:34 -04:00
14466cde2e
frontend: pnpm update
Signed-off-by: Vincent Batts <vbatts@hashbangbash.com>
2025-04-12 19:26:15 -04:00
90e495a748
pnpm: fix-lockfile
Signed-off-by: Vincent Batts <vbatts@hashbangbash.com>
2025-04-12 12:48:27 -04:00
e32ca9442d
Dockerfile: carry over local changes
Signed-off-by: Vincent Batts <vbatts@hashbangbash.com>
2025-04-12 09:57:36 -04:00
9895088668
Dockerfile: dos2unix
Signed-off-by: Vincent Batts <vbatts@hashbangbash.com>
2025-04-12 09:55:05 -04:00
CollapsingStar
6fd8457e5a
chore: remove deprecated version tag for Docker v2 (#902) 2024-05-23 07:53:43 -08:00
Graeme B
c2511d56d7
Update README.md (#891)
Add a Contributing section and the Buy Me A Coffee link since I had to hunt for it.
2024-05-23 07:52:56 -08:00
renovate[bot]
fbb17d66aa
fix(deps): update dependency @vueuse/nuxt to v10.9.0 (#827)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-04-28 08:11:54 -08:00
renovate[bot]
98429ae8c2
fix(deps): update dependency @vueuse/router to v10.9.0 (#828)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-04-28 08:11:46 -08:00
renovate[bot]
69adbacd25
fix(deps): update dependency h3 to v1.11.1 (#831)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-04-28 08:11:34 -08:00
renovate[bot]
43ff04317a
fix(deps): update dependency pinia to v2.1.7 (#833)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-04-28 08:11:22 -08:00
renovate[bot]
361c2121d8
fix(deps): update dependency daisyui to v2.52.0 (#830)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-04-28 08:09:16 -08:00
Spreadcat
0041c277ad
Add currency "norwegian krones" (#836)
* add: norwegian kroner

Adding the currency of norwegian krones back into the file. There was an earlier commit where this seems to have dropped of the table.

* fix: using the correct terms for the currency.
2024-03-12 14:48:51 -08:00
Hayden
f621d3ad5d
fix blocking error during startup when in demo mode (#838) 2024-03-05 06:44:43 -09:00
Hayden
af9aa239af
add email banner 2024-03-02 12:03:13 -06:00
renovate[bot]
f8a2c70f2c
chore(deps): update typescript-eslint monorepo to v6.21.0 (#824)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-03-02 08:14:23 -09:00
renovate[bot]
89b68da86e
fix(deps): update dependency @vuepic/vue-datepicker to v8.2.0 (#826)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-03-02 08:14:03 -09:00
renovate[bot]
03e9596b7a
fix(deps): update dependency @nuxtjs/tailwindcss to v6.11.4 (#825)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-03-02 08:06:01 -09:00
renovate[bot]
90535c8691
chore(deps): update dependency typescript to v5.3.3 (#822)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-03-01 18:32:52 -09:00
renovate[bot]
b7fc0e903c
chore(deps): update dependency vitest to v1.3.1 (#823)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-03-01 18:32:40 -09:00
renovate[bot]
eb420d50b1
chore(deps): update dependency eslint-config-prettier to v9.1.0 (#820)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-03-01 16:44:52 -09:00
renovate[bot]
8f3a081670
chore(deps): update dependency eslint-plugin-vue to v9.22.0 (#821)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-03-01 16:44:46 -09:00
Hayden
2867a05c92
chore: bump http kit (#817)
* use new httpkit runner

* refactor out last httpkit changes

* fix timeout defaults

* fix wrong time input - closes #819
2024-03-01 15:07:03 -09:00
renovate[bot]
77b4d594af
fix(deps): update module ariga.io/atlas to v0.19.1 (#814)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-03-01 15:06:12 -09:00
renovate[bot]
8f4fed1df9
fix(deps): update dependency postcss to v8.4.35 (#812)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-03-01 15:05:54 -09:00
renovate[bot]
b65da05d11
chore(deps): update dependency @nuxtjs/eslint-config-typescript to v12.1.0 (#816)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-03-01 15:05:34 -09:00
renovate[bot]
c2c8441625
chore(deps): update dependency @faker-js/faker to v8.4.1 (#815)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-03-01 15:05:24 -09:00
renovate[bot]
a1eb6e314c
fix(deps): update dependency dompurify to v3.0.9 (#811)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-03-01 10:52:03 -09:00
renovate[bot]
b0c9dcdeee
fix(deps): update dependency autoprefixer to v10.4.18 (#809)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-03-01 10:51:57 -09:00
renovate[bot]
0a609adb0b
fix(deps): update dependency @tailwindcss/typography to v0.5.10 (#806)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-03-01 10:51:47 -09:00
renovate[bot]
19ee6c2687
fix(deps): update dependency @headlessui/vue to v1.7.19 (#803)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-03-01 10:51:40 -09:00
Hayden
b77c9be36f
chore: bump deps (#810)
* bump prettier/eslint-plugin

* bump nuxt pwa

* use typed imports

* set vue version to fix layout errors

* disable import
2024-03-01 09:08:14 -09:00
Hayden
be2910e0df
fix missing icon 2024-03-01 11:34:07 -06:00
Hayden
749ee367d4
upgrade dependency 2024-03-01 10:54:31 -06:00
renovate[bot]
c36ce7ac2d
fix(deps): update module github.com/swaggo/http-swagger to v2 (#514)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-03-01 07:51:26 -09:00
renovate[bot]
d0128c7bd7
fix(deps): update dependency @tailwindcss/forms to v0.5.7 (#804)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-02-29 16:20:51 -09:00
Hayden
f91b33db38
fix: remove external dependency for icones (#805)
* change all icons to use iconify

* fix minor UI elements

* fix layout of table
2024-02-29 16:20:18 -09:00
renovate[bot]
cf166ac641
chore(deps): update dependency prettier to v2.8.8 (#802)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-02-29 12:17:27 -09:00
renovate[bot]
ae86e09970
chore(deps): update dependency @types/markdown-it to v13.0.7 (#801)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-02-29 12:17:19 -09:00
renovate[bot]
a7e0d2d127
fix(deps): update dependency postcss to v8.4.31 [security] (#791)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-02-29 10:48:33 -09:00
renovate[bot]
9866fdddb8
fix(deps): update module modernc.org/sqlite to v1.29.2 (#790)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-02-29 10:48:07 -09:00
renovate[bot]
ff9759ab20
chore(deps): update dependency mkdocs-material to v9.5.12 (#778)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-02-29 10:47:58 -09:00
Hayden
b655cfab28
feat: improve search matching (#800)
* offload search to lunr.js

* update location search when locations are mutated
2024-02-29 10:45:05 -09:00
Hayden
4c9ddac395
fix: date picker improvements (#793)
* use vue component for date picker

* zero out database fields even when set to 0001-xx-xx

* fix wrong datetime display + improved datepicker

* fix ts error

* zero out times

* add date-fns to dependencies
2024-02-29 09:58:26 -09:00
Hayden
c708b1759e
fix: profile bugs and add full path (#792)
* fix template rendering

* tidy

* fix missing currencies and profile errors

* endpoint for fullpath of an item

* endpoint test

* fix assertions
2024-02-25 13:04:24 -09:00
renovate[bot]
3ed50f5a1b
fix(deps): update module modernc.org/sqlite to v1.29.1 (#754)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-02-18 09:30:08 -09:00
renovate[bot]
e6d77b3f72
fix(deps): update module github.com/go-chi/chi/v5 to v5.0.12 (#775)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-02-18 09:29:53 -09:00
renovate[bot]
747815982e
fix(deps): update module github.com/go-playground/validator/v10 to v10.18.0 (#753)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-02-14 10:43:58 -09:00
renovate[bot]
7e23a75908
fix(deps): update module github.com/hay-kot/httpkit to v0.0.6 (#751)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-02-14 10:43:41 -09:00
renovate[bot]
4c8a97fe0d
chore(deps): update dependency mkdocs-material to v9.5.9 (#750)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-02-14 10:43:25 -09:00
renovate[bot]
87b07ab47d
chore(deps): update golangci/golangci-lint-action action to v4 (#747)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-02-14 06:13:34 -09:00
renovate[bot]
cf8000d8ee
chore(deps): update pnpm/action-setup action to v3 (#744)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-02-14 06:11:03 -09:00
renovate[bot]
31b10194f1
fix(deps): update module golang.org/x/crypto to v0.19.0 (#743)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-02-14 06:10:55 -09:00
renovate[bot]
3d748021dc
chore(deps): update dependency mkdocs-material to v9.5.8 (#742)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-02-07 09:45:51 -09:00
renovate[bot]
0f8e1fee35
fix(deps): update module github.com/rs/zerolog to v1.32.0 (#740)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-02-07 09:45:44 -09:00
renovate[bot]
8d18b06012
fix(deps): update module github.com/mattn/go-sqlite3 to v1.14.22 (#735)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-02-04 10:43:08 -09:00
renovate[bot]
6a32762c98
fix(deps): update module github.com/swaggo/swag to v1.16.3 (#731)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-02-04 10:42:59 -09:00
renovate[bot]
2311eda44b
fix(deps): update module ariga.io/atlas to v0.19.0 (#734)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-02-04 10:42:50 -09:00
renovate[bot]
5b52aa8abf
chore(deps): update dependency mkdocs-material to v9.5.7 (#736)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-02-04 10:42:39 -09:00
LINKIWI
aace77ec40
backend: Periodically send client aliveness pings in event stream websocket connection (#729)
* backend: Periodically send client aliveness pings following event websocket connection

* backend: Single persistent global ping goroutine instead of per-session ticker
2024-01-31 10:20:56 -09:00
renovate[bot]
c55d421326
chore(deps): update dependency mkdocs-material to v9.5.6 (#728)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-01-31 10:20:31 -09:00
renovate[bot]
5ca8e0c35d
fix(deps): update module github.com/mattn/go-sqlite3 to v1.14.21 (#730)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-01-31 10:16:31 -09:00
Hayden
aba853d598
redo atlas upgrade with huge binary 2024-01-28 10:50:43 -06:00
Hayden
2c8fd163ee
chore: ignore common bin output 2024-01-27 22:03:46 -06:00
Hayden
d9497bd69e remove large bin + go tidy
Former-commit-id: 830b4d8e3efb66874f0310d6c49d37d57481f986
2024-01-27 21:48:32 -06:00
LINKIWI
893f3f6df6 chore: Enable gofmt in linter, fix all outstanding errors (#727)
Former-commit-id: 1d9477f510bb2d6ada4b720933f113f24c25913e
Former-commit-id: b22ff272ca37181c93c6e83db8f1e1e67a767ebd
2024-01-27 18:29:10 -09:00
LINKIWI
ca55e5ba94 fix: Case-insensitive attachment extension matching for item photo inference (#725)
Former-commit-id: 3a6865079e68e9c1ee9beda651cbf123c58fcfc6
2024-01-27 18:28:58 -09:00
LINKIWI
7753213657 fix: Use zerolog level parser, set global log level correctly from configuration (#724)
Former-commit-id: 08b46c2fe32bda46158eac46f82a2341bb1a93c2
2024-01-27 18:28:43 -09:00
renovate[bot]
b3f7b59243 fix(deps): update module github.com/mattn/go-sqlite3 to v1.14.20 (#723)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Former-commit-id: 858cdec2d59198bb84cb4b713343e9bd503ebb6b
2024-01-26 06:10:22 -09:00
renovate[bot]
18a9b21a87 fix(deps): update module github.com/google/uuid to v1.6.0 (#719)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Former-commit-id: 59a5a1782bd7c9c63684a300a808932fd0072a3f
2024-01-24 08:46:07 -09:00
renovate[bot]
853b473668 chore(deps): update dependency mkdocs-material to v9.5.5 (#720)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Former-commit-id: b8b706ee7447990546fd7d5cb9d257be9a3f23de
2024-01-24 08:42:19 -09:00
renovate[bot]
40905bc100 fix(deps): update module modernc.org/sqlite to v1.28.0 (#667)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Former-commit-id: de4081d0d29fe010d56baed60f0d3e08b13475cc
2024-01-20 07:32:34 -09:00
renovate[bot]
8be9df2b8d fix(deps): update module github.com/go-playground/validator/v10 to v10.17.0 (#717)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Former-commit-id: 94399a93dc1d848fae46c596aa7ed0e719be09b0
2024-01-20 07:32:21 -09:00
renovate[bot]
bd9eb69313 fix(deps): update module github.com/mattn/go-sqlite3 to v1.14.19 (#668)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Former-commit-id: 6ee41038b16e87c68bc1f3ca749265c927a80b5a
2024-01-19 05:55:39 -09:00
renovate[bot]
fa31bb2448 chore(deps): update dependency mkdocs-material to v9.5.4 (#716)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Former-commit-id: b3b81a3ccc025e67d60e1b6fa8aacad4bc84a51e
2024-01-19 05:52:33 -09:00
renovate[bot]
fcd33e59d9 fix(deps): update module golang.org/x/crypto to v0.18.0 (#706)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Former-commit-id: d374fae88f6c64760922bc8b62fe5be40389eb36
2024-01-18 10:46:51 -09:00
zebrapurring
84aee20208 feat: enable search by serial number or model number (#677)
Co-authored-by: zebrapurring <>
Former-commit-id: f2f780936066eb5bb3dbbdbe0d0f9ef319662e73
2024-01-18 10:46:40 -09:00
Hayden
2b79788fbe feat: user defined currencies (#700)
* basic currency service for loading at runtime

* api endpoint for currencies

* sort slice before return

* remove currency validation

* validate using currency service

* implement selecting dynamic currency options

* bump go version

* fix type definition

* specify explicit type

* change go versions

* proper types for assetId

* log/return currency error

* make case insensative

* use ToUpper instead

* feat: adding new currencies (#715)

* fix: task swag (#710)

Co-authored-by: Quoing <pavel.cadersky@mavenir.com>

* [feat] Adding new currencies

---------

Co-authored-by: quoing <quoing@users.noreply.github.com>
Co-authored-by: Quoing <pavel.cadersky@mavenir.com>
Co-authored-by: Bradley <41597815+userbradley@users.noreply.github.com>

* remove ts file and consoldate new values into json

* move flag to options namespace

* add env config for currencies

* basic documentaion

* remove in sync test

---------

Co-authored-by: quoing <quoing@users.noreply.github.com>
Co-authored-by: Quoing <pavel.cadersky@mavenir.com>
Co-authored-by: Bradley <41597815+userbradley@users.noreply.github.com>
Former-commit-id: c4b923847a1b695dcddd1b346adcccfd3f3ce706
2024-01-18 10:45:42 -09:00
quoing
ce923a5b4c fix: task swag (#710)
Co-authored-by: Quoing <pavel.cadersky@mavenir.com>
Former-commit-id: d2bde3f24112aaa033e813546631b194c569694d
2024-01-16 17:39:25 -09:00
renovate[bot]
e83ff89c9d chore(deps): update dependency mkdocs-material to v9.5.3 (#697)
Former-commit-id: d920e1e215426f8f432f0ff0fa55310b7ec17ef1
2024-01-04 15:49:56 -06:00
renovate[bot]
c71c4f3b16 fix(deps): update module github.com/go-chi/chi/v5 to v5.0.11 (#698)
Former-commit-id: 6d8260b4fe276c4766c8d99330cbf8707bd4f88e
2024-01-04 15:48:08 -06:00
renovate[bot]
33661587fd fix(deps): update module github.com/google/uuid to v1.5.0 (#664)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Former-commit-id: 8ede42f79bfadeff674dfc576225eaf2d3a943f4
2024-01-04 12:12:20 -09:00
Marcel K
27b72603e1 feat: show quantity in card view if quantity == 0 (hay-kot/homebox#672) (#673)
* feat: show quantity in card view if quantity == 0 (hay-kot/homebox#672)

* Update frontend/components/Item/Card.vue

---------

Co-authored-by: bee-eater <bee-eater@users.noreply.github.com>
Co-authored-by: Hayden <64056131+hay-kot@users.noreply.github.com>
Former-commit-id: d8e49622e52a45346485fc052932ee39b6518c63
2024-01-04 12:10:44 -09:00
renovate[bot]
a4d91adc1e fix(deps): update module golang.org/x/crypto to v0.17.0 [security] (#676)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Former-commit-id: f3f2116a2b1b0b975693002bf4ce1224fb373a8e
2024-01-04 12:09:24 -09:00
Hayden
03df23d97c fix: inaccruate 401 & sql busy error (#679)
* fix inaccruate 401 error on SQL db error

* init golangci-lint config

* linter autofix

* testify auto fixes

* fix sqlite busy errors

* fix naming

* more linter errors

* fix rest of linter issues

Former-commit-id: e8449b3a7363a6cfd5bc6151609e6d2d94b4f7d8
2024-01-04 08:55:26 -09:00
zebrapurring
5e83b28ff5 fix: Render newlines in markdown fields (#682)
Co-authored-by: zebrapurring <>
Former-commit-id: b9183109f2c7ac309fd44073a1ee6f1f4b013ec8
2023-12-24 18:27:58 -09:00
Hayden
d759fad40c feat: add log statements + remove auto redirect (#671)
Former-commit-id: 3ee150e7d66d18c6369d171994932744a614a74f
2023-12-15 18:32:44 -09:00
Hayden
1cc9bf459a fix: ensure rows are closed (#670)
Former-commit-id: 85c8e4701606c7e4f1850414d467352f436a0e9e
2023-12-15 18:07:17 -09:00
Hayden
0c535aa8d8 feat: extract auth into provider (#663)
* extract auth into provider

* bump go version

* use pointer

* rebase

Former-commit-id: 8538877f52b9aae43b38afb37e5c69d99cb4827c
2023-12-12 05:49:46 -09:00
renovate[bot]
3eb4c21263 fix(deps): update dependency markdown-it to v14 (#655)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Former-commit-id: 522943687ed606da921b3adfa4eada9ac86bfaad
2023-12-12 05:44:48 -09:00
renovate[bot]
19c53dce2f chore(deps): update dependency vitest to v1 (#652)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Former-commit-id: ff2577a739289e815e1c64495e123a051f402996
2023-12-12 05:44:38 -09:00
renovate[bot]
3967bc86d0 chore(deps): update actions/setup-go action to v5 (#653)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Former-commit-id: 0bc3969c0cb51c15f7eda869b0f4228cc08e7b47
2023-12-12 05:44:23 -09:00
renovate[bot]
2b6e42b11f chore(deps): update dependency mkdocs-material to v9.5.2 (#654)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Former-commit-id: 8a85b189974d876fe4e48f0492fe12f8966e79c8
2023-12-12 05:44:11 -09:00
renovate[bot]
d48de6b9f6 fix(deps): update github.com/gocarina/gocsv digest to b87c2d0 (#629)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Former-commit-id: b5592167a260646662e19085e94dc46cd57cf908
2023-12-01 09:39:52 -09:00
renovate[bot]
477a5fce97 fix(deps): update module golang.org/x/crypto to v0.16.0 (#645)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Former-commit-id: 126c06a0fa182197084776e16ba4660d398f25dc
2023-12-01 09:39:44 -09:00
renovate[bot]
65efe7f575 fix(deps): update module github.com/hay-kot/httpkit to v0.0.5 (#643)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Former-commit-id: 014fdea4ff80cc74ac518b0331438bb8ec2dc2c8
2023-12-01 09:39:37 -09:00
Hayden
1b77b15db4 fix: ignore /api paths from service workers (#649)
Former-commit-id: f2363b85cb71e776d685527d10525b4e75e16883
2023-12-01 08:57:59 -09:00
Hayden
fc3bc3f010 fix: loading causing stale data to present in form(#650)
Former-commit-id: 4d9131ba7ea05e150b5c8a332ddf612d72f6055b
2023-12-01 08:57:43 -09:00
Hayden
81e76d9dd4 fix/feat: primary photo auto set and auto-set primary photo (#651)
* fix error when item doesn't have photo attachment

* automatically detect image types and set primary photo if first

* remove charts.js from libs

Former-commit-id: 321a83b6344461233979e4f45e60f3121b1ffcef
2023-12-01 08:57:29 -09:00
renovate[bot]
bd1a241be1 chore(deps): update docker/setup-qemu-action action to v3 (#625)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Former-commit-id: d1d98bcb067c6e510dd3ad0650a33b3daa8593d0
2023-11-30 14:25:55 -09:00
renovate[bot]
bc4c0c0b89 chore(deps): update goreleaser/goreleaser-action action to v5 (#626)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Former-commit-id: db00e7a268a39a9f8112e3b1b8c1f7b1f5384949
2023-11-30 14:25:47 -09:00
renovate[bot]
c7936fc478 chore(deps): update dependency mkdocs-material to v9.4.14 (#630)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Former-commit-id: 7ec22ed9bd24dbebcc1e02c59e35ccf06ec6c764
2023-11-30 14:25:30 -09:00
Kevin Holtkamp
09ee4fef3a docs: Fix a bunch of grammar and spelling, rephrased some sentences to be more readable (#635)
Former-commit-id: 10c030a56b5ba9020b12b6b35f0c7ef737b43d6b
2023-11-30 14:25:22 -09:00
Hayden
d6b0062ae9 feat: set version flag (#632)
Former-commit-id: 9edbda3daa7df20c2ba1df1fbddf0ecabfa3ee92
2023-11-24 10:02:02 -09:00
Hayden
df26a3eeb7 fix: allow zeroing out asset ids (#624)
Former-commit-id: 8cc0f302919758044dc9ff4dc785c452364dc715
2023-11-15 18:46:47 -09:00
Hayden
798d873f76 fix: images in child items (#623)
* support parentID search

* fetch images for item children

Former-commit-id: afbc6a49ac0a39684e42c95eba04517745b4ccf1
2023-11-15 18:41:24 -09:00
renovate[bot]
a26ea3bac5 chore(deps): update dependency @types/markdown-it to v13 (#577)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Former-commit-id: 2594d4cdb4dbbde4c84b43a507d3b8ec7b9eb68c
2023-11-15 18:18:19 -09:00
renovate[bot]
812b464bc7 chore(deps): update dependency @vite-pwa/nuxt to ^0.2.0 (#616)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Former-commit-id: 2eafa8e72f9d6c8c56d088ad54bdef0e29ddfbc7
2023-11-15 18:18:07 -09:00
renovate[bot]
e1f34c2507 fix(deps): update dependency @pinia/nuxt to ^0.5.0 (#620)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Former-commit-id: e65d44fa9eb7cbcba478790594be24f6b36c2bbf
2023-11-15 18:17:57 -09:00
Hayden
79be938531 feat: expose timeout variables (#622)
* expose timeout variables

* formatting

Former-commit-id: eeae790fe49b4a6be95b412ab5b3cf8968ec9cc1
2023-11-15 18:17:43 -09:00
renovate[bot]
e3ddc68eb2 chore(deps): update actions/setup-node action to v4 (#621)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Former-commit-id: a70ee33759712590d63842fe3c8662584b39776e
2023-11-15 18:15:31 -09:00
Hayden
eb9175ab27 fix: use theme aware background color (#619)
Former-commit-id: 80e20713008e34fbf0e66b05d49a1dee262ccd9c
2023-11-15 18:01:03 -09:00
renovate[bot]
54b7b2c35f chore(deps): update pnpm/action-setup action to v2.4.0 (#511)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Former-commit-id: db27d34b4f0d118e417bd5b399b926e6eeada2b2
2023-11-15 17:58:30 -09:00
Owen Valentine
ab98870350 feat : Validate bark, ntfy, generic+ shortcut (#591)
Former-commit-id: da22074ed34da7696b1281bd10c61f1dad2786f0
2023-11-15 17:53:06 -09:00
Hayden
13c437c418 fix: ensure urls isn't encoded (#618)
Former-commit-id: 51ba15f84c47026c09c8193767a1492af4ee44f7
2023-11-15 17:51:19 -09:00
Hayden
ba2e66a014 fix: filepath sep on windows (#615)
Former-commit-id: b408318acbc2b1154d710f559d7572c110a94445
2023-11-15 17:43:44 -09:00
Hayden
1adf24e109 chore: bump all go dependencies (#614)
* bump all

* code-generation

Former-commit-id: c0e8e340655860f5bd3e92397a10ef844320eb11
2023-11-15 17:30:49 -09:00
Hayden
742ece7923 chore: rewrite generator to resolve strange name generation (#612)
* rewrite generator to resolve strange name generation

* fix asset id types

* ignore errored types

* remove depreciated calls

* use more random words

* random user

Former-commit-id: 4738a9b131e75479b30bda3b9967329268f69685
2023-11-15 17:19:51 -09:00
renovate[bot]
f74736b369 chore(deps): update actions/checkout action to v4 (#575)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Former-commit-id: b08e52104c70ee5cbf60bbffd86ecca4a5d5dcd0
2023-11-15 08:08:46 -09:00
renovate[bot]
3a3be89b90 chore(deps): update docker/setup-buildx-action action to v3 (#579)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Former-commit-id: 3e2ab29054b48ef48baf404c64eec2f26d2bb28f
2023-11-15 08:08:37 -09:00
renovate[bot]
4082092560 chore(deps): update dependency mkdocs-material to v9.4.8 (#592)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Former-commit-id: e5f66d99bc0523db58c30e49a17a4323f2030d3a
2023-11-15 08:08:26 -09:00
Hayden
31839eb444 fix: restore location section (#587)
Former-commit-id: 0995478cc0
2023-10-15 06:37:47 -08:00
Hayden
6e203e7833 fix: infinite redirect issue (#583)
Former-commit-id: ae4b95301f
2023-10-10 05:43:44 -08:00
Hayden
0ef9d0deb8 Revert "chore(deps): update dependency nuxt to v3.7.4 (#554)" (#580)
This reverts commit 36e13ab03b [formerly 2cd3c15215].

Former-commit-id: d8482f3a13
2023-10-06 20:29:27 -08:00
Hayden
c71f077466 refactor: rewrite to cookie based auth (#578)
* rewrite to cookie based auth

* remove interceptor

Former-commit-id: 1365bdfd46
2023-10-06 19:44:43 -08:00
renovate[bot]
36e13ab03b chore(deps): update dependency nuxt to v3.7.4 (#554)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Former-commit-id: 2cd3c15215
2023-10-06 18:51:54 -08:00
renovate[bot]
c0f19cdf05 fix(deps): update module modernc.org/sqlite to v1.26.0 (#574)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Former-commit-id: 0dc4fa5d98
2023-10-06 18:51:44 -08:00
Hayden
b20c88e256 feat: primary images (#576)
* add support for primary images

* fix locked loading state issue

* add action to auto-set images

Former-commit-id: 318b8be192
2023-10-06 18:51:08 -08:00
Hayden
ce16b37b97 fix: field values request fails (#573)
Former-commit-id: 63a966c526
2023-10-06 14:10:44 -08:00
Hayden
6ed1893bed feat: make selectables clearable (#572)
Former-commit-id: db16d3fb23
2023-10-06 13:32:49 -08:00
renovate[bot]
bd9ea571aa fix(deps): update module golang.org/x/crypto to v0.14.0 (#570)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Former-commit-id: 1952b9f1cb
2023-10-06 13:24:12 -08:00
renovate[bot]
10d9c4fbcc fix(deps): update module github.com/rs/zerolog to v1.31.0 (#569)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Former-commit-id: 2b31d46ab3
2023-10-06 13:24:00 -08:00
Hayden
eff5db3664 fix: ensure loading in toggled (#571)
Former-commit-id: f3f96723b2
2023-10-06 13:22:16 -08:00
renovate[bot]
f8a6160039 fix(deps): update module github.com/yeqown/go-qrcode/writer/standard to v1.2.2 (#567)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Former-commit-id: b28bb2c4a8
2023-10-06 13:06:00 -08:00
renovate[bot]
4375ff8bd6 fix(deps): update module github.com/containrrr/shoutrrr to v0.8.0 (#555)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Former-commit-id: a33cf54a33
2023-10-06 13:05:53 -08:00
Hayden
9bee6e9863 pr: fixed incorrect sum of the total items price (#568)
* Fixed incorrect sum of the total items price

https://github.com/hay-kot/homebox/issues/458

* fix eslint errors

---------

Co-authored-by: Adamko <33964772+cRaZy92@users.noreply.github.com>
Former-commit-id: f13bf2958d
2023-10-06 12:50:55 -08:00
renovate[bot]
b42d20b6a6 chore(deps): update dependency mkdocs-material to v9.4.4 (#553)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Former-commit-id: a9712c48af
2023-10-06 12:31:51 -08:00
renovate[bot]
ba1e3a905b fix(deps): update module github.com/go-playground/validator/v10 to v10.15.5 (#556)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Former-commit-id: e68b7cf500
2023-10-06 12:30:31 -08:00
renovate[bot]
973d5ca97d fix(deps): update module github.com/swaggo/swag to v1.16.2 (#552)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Former-commit-id: 744a5bbb47
2023-09-14 06:19:57 -08:00
renovate[bot]
57baf22534 fix(deps): update module github.com/google/uuid to v1.3.1 (#551)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Former-commit-id: fc5698410b
2023-09-14 06:19:37 -08:00
renovate[bot]
c615707cbc fix(deps): update module golang.org/x/crypto to v0.13.0 (#532)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Former-commit-id: 455163d637
2023-09-14 06:19:28 -08:00
Aaron von Awesome
992eea3278 fix: minor typo (#546)
Former-commit-id: fbc7e6e33a
2023-09-14 06:13:42 -08:00
renovate[bot]
03288b52ee chore(deps): update dependency eslint-config-prettier to v9 (#533)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Former-commit-id: 5739b2005a
2023-09-14 06:11:08 -08:00
renovate[bot]
a274bbcf64 fix(deps): update module modernc.org/sqlite to v1.25.0 (#531)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Former-commit-id: 5f41960c0a
2023-09-14 06:10:46 -08:00
renovate[bot]
64e9889d41 fix(deps): update module github.com/rs/zerolog to v1.30.0 (#517)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Former-commit-id: c89aa738cf
2023-09-14 06:10:37 -08:00
Jonathan Gawrych
58e80ab3e0 fix: mobile "Create and Add Another" goes off screen (#540)
Former-commit-id: 94fd9c314d
2023-08-31 09:07:45 -08:00
Hayden
94e81d14fa fix websocket over secure connection (#542)
* fix https connection

* explicity dependency

Former-commit-id: 0876deb1e9
2023-08-24 06:28:56 -08:00
tctlrd
0b021e898f feat: add currencies XAG and XAU (#535)
* Added currencies XAG and XAU to currency.ts

I added XAG and XAU for myself and others who prefer to measure value with something of substance.

Review the ISO 4217 standard to view a full list of official currency codes including the ones I have added.

https://www.iso.org/iso-4217-currency-codes.html
https://en.wikipedia.org/wiki/ISO_4217

Example:
https://www.xe.com/currencyconverter/convert/?Amount=100&From=XAG&To=USD

API for exchange rates:
https://openexchangerates.org/

* Added field values xag and xau to group.go

* Update group.go
Former-commit-id: 5438898b49
2023-08-23 09:29:22 -08:00
Hayden
be8d6e8235 update lock file
Former-commit-id: 9fa17bec90
2023-08-09 21:49:32 -05:00
Cheng Gu
d3dcb599ca feat: set cookies' expires attribute and fix remember me (#530)
Former-commit-id: b5987f2e8d
2023-08-09 18:48:39 -08:00
Hayden
0cbe516ca3 feat: WebSocket based implementation of server sent events for cache busting (#527)
* rough implementation of WS based event system for server side notifications of mutation

* fix test construction

* fix deadlock on event bus

* disable linter error

* add item mutation events

* remove old event bus code

* refactor event system to use composables

* refresh items table when new item is added

* fix create form errors

* cleanup unnecessary calls

* fix importer erorrs + limit fn calls on import

Former-commit-id: 2cbcc8bb1d
2023-08-02 13:00:57 -08:00
Hayden
26911e9530 specify h3 dependency
Former-commit-id: cceec06148
2023-08-02 09:05:07 -05:00
Hayden
5524dfa83d try node 18
Former-commit-id: 2e2eed143d
2023-08-02 09:01:47 -05:00
renovate[bot]
21d54eb2ac chore(deps): update dependency vitest to ^0.34.0 (#529)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Former-commit-id: 272cc5a370
2023-08-02 05:59:31 -08:00
Hayden
4152199f3b build nightly rootless
Former-commit-id: 275e106d72
2023-08-02 08:47:53 -05:00
Hayden
1d40251f29 include rootless dockerfile
Former-commit-id: 3f0e65a2ad
2023-08-02 08:45:22 -05:00
Hayden
97e137c411 feat: add support for create + add more for all create modals and support k… (#526)
* add support for create + add more for all create modals and support keyboard bindings

* listen for esc to close modals

Former-commit-id: 22bbaae08f
2023-07-31 09:53:26 -08:00
Hayden
0de6c2338d fix: prevent resetting dialog state on error (#524)
Former-commit-id: 8c7d91ea52
2023-07-31 08:22:08 -08:00
Hayden
a2479155b2 feat: support cmd+s / ctrl+s and rework button display on edit (#523)
Former-commit-id: 5a219f6a9c
2023-07-31 06:57:42 -08:00
Hayden
e2dace75f4 fix: label prop not being passed to password input (#522)
Former-commit-id: 895017b28e
2023-07-31 06:08:35 -08:00
Hayden
ef9a7cd811 fix: assert/asserts (#521)
Former-commit-id: 02ce52dbe3
2023-07-31 06:05:37 -08:00
Hayden
964270e054 feat: more currency support (#520)
* add multiple new currencies

* add multiple new currencies

* remove duplicate yen

Former-commit-id: c5ae6b17f9
2023-07-31 05:59:36 -08:00
renovate[bot]
6b9cdf3294 chore(deps): update dependency mkdocs-material to v9.1.21 (#512)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Former-commit-id: 371fc0a6af
2023-07-29 09:54:55 -08:00
Hayden
6fd228f1f4 ui: rework location/labels pages (#475)
* formatting

* slimdown locations page

* update location/labels

* fix dependency issues

* fix type generator

* cleanup unused variables

Former-commit-id: 016780920d
2023-07-27 13:21:28 -08:00
db8200
0eb0b283b2 fix 3 places where API URLs were not constructed by function route (#451)
* Fixed 3 places where API URLs were not constructed by function route(path, params).

* autofix

---------

Co-authored-by: Hayden <64056131+hay-kot@users.noreply.github.com>
Former-commit-id: 06eb6c1f91
2023-07-22 20:11:29 -08:00
renovate[bot]
4b071bccda fix(deps): update module github.com/swaggo/http-swagger to v2 (#508)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Former-commit-id: 27dad0e118
2023-07-22 20:10:52 -08:00
renovate[bot]
65a7947932 fix(deps): update module github.com/swaggo/http-swagger to v2 (#506)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Former-commit-id: dc9446516a
2023-07-22 20:09:43 -08:00
Hayden
a3e607a887 chore: bump all go deps (#507)
* bump all deps

* run code-gen

Former-commit-id: a042496c71
2023-07-22 19:57:51 -08:00
renovate[bot]
feab9f4c46
chore(deps): update dependency @vite-pwa/nuxt to ^0.1.0 (#474)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-07-22 19:46:08 -08:00
renovate[bot]
fe5622d62a
fix(deps): update module golang.org/x/crypto to v0.11.0 (#495)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-07-22 19:45:56 -08:00
renovate[bot]
e759f2817e
chore(deps): update dependency nuxt to v3.6.5 (#503)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-07-22 19:45:35 -08:00
renovate[bot]
60cc5c2710
chore(deps): update dependency mkdocs-material to v9.1.19 (#497)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-07-22 19:44:19 -08:00
renovate[bot]
25ccd678c9
chore(deps): update typescript-eslint monorepo to v6 (#500)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-07-20 11:13:42 -08:00
Pranshu Agrawal
a77b4cbe71
update "h3" verison to 1.7.1 (#502) 2023-07-20 11:13:18 -08:00
renovate[bot]
aae32b0d74
fix(deps): update module github.com/go-chi/chi/v5 to v5.0.10 (#493)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-07-18 12:08:23 -08:00
renovate[bot]
a94b43a19e
fix(deps): update module github.com/ardanlabs/conf/v3 to v3.1.6 (#492)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-07-18 12:08:05 -08:00
renovate[bot]
9a4c2df552
chore(deps): update dependency vitest to ^0.33.0 (#494)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-07-18 12:07:55 -08:00
renovate[bot]
d5b89a755e
chore(deps): update actions/setup-go action to v4 (#496)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-07-18 12:07:45 -08:00
renovate[bot]
a9acf62d93
chore(deps): update dependency mkdocs-material to v9.1.18 (#491)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-07-16 17:51:13 -08:00
renovate[bot]
c896e198dd
fix(deps): update module github.com/swaggo/http-swagger to v2 (#490)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-07-16 17:39:42 -08:00
Cappy-Bara
c538518b4b
fix: add parseDependency to swag (#486) 2023-07-16 17:37:56 -08:00
renovate[bot]
f66d14eeea
fix(deps): update module github.com/stretchr/testify to v1.8.4 (#468)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-07-16 17:34:51 -08:00
renovate[bot]
bc8feac83c
chore(deps): update dependency nuxt to v3.6.3 (#350)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-07-16 17:34:23 -08:00
renovate[bot]
40a98bcf30
fix(deps): update module modernc.org/sqlite to v1.24.0 (#437)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-07-16 17:34:00 -08:00
renovate[bot]
045e91d9ac
fix(deps): update module github.com/mattn/go-sqlite3 to v1.14.17 (#470)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-07-16 17:33:35 -08:00
renovate[bot]
a80ab0f3e9
fix(deps): update module github.com/go-playground/validator/v10 to v10.14.1 (#478)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-07-16 17:33:22 -08:00
renovate[bot]
e5d209d407
chore(deps): update dependency mkdocs-material to v9.1.15 (#467)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-06-02 13:57:09 -08:00
Hayden
ef1531e561
feat: easily increment quantity (#473)
* fix vue version issue

* new patch API endpoint

* doc-gen

* new API class method for patch operations

* add quantity patch UI support

* fix typegen errors

* fix ts errors
2023-06-02 13:56:40 -08:00
renovate[bot]
4dd036abb2
chore(deps): update dependency @vite-pwa/nuxt to ^0.0.9 (#453)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-05-28 10:59:57 -08:00
Hayden
81e909ccfb
chore: tidy (#466) 2023-05-28 10:59:44 -08:00
Hayden
0cb9d2a8e4
bump nuxt + fix CookieRef (#465) 2023-05-28 09:51:23 -08:00
renovate[bot]
cb16c0e829
fix(deps): update module github.com/go-playground/validator/v10 to v10.14.0 (#462)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-05-23 10:43:42 -08:00
renovate[bot]
9e067ee230
fix(deps): update module github.com/stretchr/testify to v1.8.3 (#460)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-05-23 10:43:34 -08:00
renovate[bot]
8b53d40a2a
chore(deps): update dependency mkdocs-material to v9.1.14 (#461)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-05-23 10:42:49 -08:00
renovate[bot]
4c0ad7a5d8
chore(deps): update dependency mkdocs-material to v9.1.13 (#457)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-05-16 18:24:19 -08:00
D M
66e25ba068
feat: Low-Privileged and Distroless Docker Image (#372)
* feat: use distroless image and non-root user

* fix: remove conflicts after merge

* chore: Commen the Dockerfile

* chore: Update documentation to reflect image changes

* Split docker build in latest and latest-rootless

One more job added to the publish Github Action, to build and push TAG-rootless
images.

* fix: add missing workflow

* feat: update documentation about double tags

* feat: update readme with double tags

---------

Co-authored-by: daniele <daniele@coolbyte.eu>
2023-05-13 10:38:57 -08:00
renovate[bot]
56c98e6e3a
chore(deps): update dependency @faker-js/faker to v8 (#449)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-05-13 10:34:57 -08:00
renovate[bot]
01f305a98e
fix(deps): update github.com/gocarina/gocsv digest to 7f30c79 (#448)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-05-13 10:34:45 -08:00
renovate[bot]
e14cdaccdd
fix(deps): update module golang.org/x/crypto to v0.9.0 (#447)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-05-13 10:34:33 -08:00
renovate[bot]
4ece25b58d
chore(deps): update dependency mkdocs-material to v9.1.12 (#444)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-05-13 10:34:23 -08:00
renovate[bot]
c1957bb927
chore(deps): update dependency vitest to ^0.31.0 (#442)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-05-06 10:00:40 -08:00
renovate[bot]
636ca155e5
fix(deps): update module github.com/go-playground/validator/v10 to v10.13.0 (#438)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-05-06 10:00:33 -08:00
renovate[bot]
17a5b43609
chore(deps): update dependency mkdocs-material to v9.1.9 (#440)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-05-02 17:09:15 -08:00
renovate[bot]
b2b3ccf923
chore(deps): update dependency mkdocs-material to v9.1.8 (#435)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-04-24 23:08:32 -08:00
renovate[bot]
2272c7eb6b
fix(deps): update module modernc.org/sqlite to v1.22.0 (#427)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-04-24 18:30:53 -08:00
renovate[bot]
181c324dd4
chore(deps): update dependency mkdocs-material to v9.1.7 (#432)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-04-24 18:30:41 -08:00
renovate[bot]
89912b18d7
fix(deps): update dependency @vueuse/router to v10 (#418)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-04-21 22:54:09 -08:00
renovate[bot]
85f2af4bc3
fix(deps): update module github.com/swaggo/swag to v1.16.1 (#426)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-04-21 22:53:29 -08:00
Hayden
21ad5a32c1
fix: add generic shoutrrr prefix to validators(#422) 2023-04-16 11:21:33 -08:00
renovate[bot]
4b51a4ad9a
fix(deps): update module ariga.io/atlas to v0.10.1 (#395)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-04-14 20:37:57 -08:00
renovate[bot]
dd7e634b69
fix(deps): update dependency @vueuse/nuxt to v10 (#415)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-04-14 20:37:00 -08:00
renovate[bot]
351ec64bbc
fix(deps): update module github.com/rs/zerolog to v1.29.1 (#414)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-04-14 20:36:14 -08:00
renovate[bot]
3a758e012f
chore(deps): update dependency vitest to ^0.30.0 (#403)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-04-14 20:35:55 -08:00
renovate[bot]
c16084d99f
chore(deps): update dependency mkdocs-material to v9.1.6 (#401)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-04-14 20:35:42 -08:00
renovate[bot]
5591267124
fix(deps): update module golang.org/x/crypto to v0.8.0 (#400)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-04-14 20:35:27 -08:00
renovate[bot]
d46c16f01f
fix(deps): update github.com/gocarina/gocsv digest to 6445c2b (#398)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-04-14 20:35:18 -08:00
zodac
18c22e8a68
Fixing minor typos (#368)
* Adding fullstops for consistency

* Fixing typos

* Fixing eslints
2023-04-14 20:34:40 -08:00
Hayden
64b3ac3e94
change safeserve -> httpkit (#405) 2023-04-09 10:39:43 -08:00
verybadsoldier
c36b9dcf5d
update go version in devcontainer to 1.20 to match version in go.mod (#402) 2023-04-09 10:38:17 -08:00
Hayden
d3b6c93b63
use ref_name 2023-04-03 14:21:54 -08:00
Hayden
3b862e36c8
fix CI 2023-04-03 14:18:08 -08:00
Hayden
f3bb86d905
change publish workflow (#390) 2023-04-03 14:10:45 -08:00
Hayden
4dd925caf0
fix: other minor fixes (#388)
* remove overflow-hidden on when no collapsed

* fix recently added on homescreen

* fix delete account formatting

* add manufacturer to search

* move nav button to left
2023-04-01 22:01:21 -08:00
Hayden
ced5aef6d1
fix: export child relationships (#385)
* tidy

* ensure export contains locations path

* update pnpm lock file

* fix swagger stuff

* code gen

* fix linter issue

* fix reverse order bug in test
2023-04-01 15:10:27 -08:00
Hayden
6a853c07a0
fix: various minor bugs (#384)
* fix insufficiently large max height for cards

* fix listener for resetItemDateTimes

* support YYYY/MM/DD format for imports

* fix columns in docs

* use comma deliminator
2023-04-01 14:07:44 -08:00
renovate[bot]
f0b9a0fce4
chore(deps): update dependency mkdocs-material to v9.1.5 (#382)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-04-01 12:22:34 -08:00
renovate[bot]
00f09fec2f
fix(deps): update module modernc.org/sqlite to v1.21.1 (#381)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-03-28 19:49:16 -08:00
renovate[bot]
6e1863b515
fix(deps): update module github.com/hay-kot/safeserve to v0.0.2 (#378)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-03-28 19:49:07 -08:00
renovate[bot]
dfe2084c74
fix(deps): update github.com/gocarina/gocsv digest to 9a18a84 (#375)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-03-28 19:48:59 -08:00
renovate[bot]
0825f055a7
fix(deps): update module github.com/swaggo/swag to v1.8.12 (#380)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-03-28 19:48:49 -08:00
Hayden
e1e04d49aa
update git ignore 2023-03-25 11:20:38 -08:00
Hayden
9020587c9e
set working directory 2023-03-25 11:17:07 -08:00
Hayden
bd0db1ea37
remove go tidy 2023-03-25 11:13:23 -08:00
Hayden
d2985ff72c
go tidy 2023-03-25 11:12:48 -08:00
Hayden
8c57ff841e
fix: redirect issues for authorized users (#374) 2023-03-25 11:07:22 -08:00
renovate[bot]
0264bfb8c1
chore(deps): update dependency mkdocs-material to v9.1.4 (#371)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-03-24 08:03:56 -08:00
Hayden
5dd6844536
feat: change shit to things (#369) 2023-03-23 19:10:19 -08:00
Hayden
0f8db862b4
feat: pwa support (#366)
* add PWA support

* fix broken URLs for query

* remove unused variable

* restore authURL
2023-03-23 10:27:12 -08:00
Hayden
be6b5c9c56
run frontend build before publish via goreleaser 2023-03-23 09:41:15 -08:00
Hayden
faed343eda
fix: cookie-auth-issues (#365)
* fix session clearing on error

* use singleton context to manage user state

* implement remember-me functionality

* fix errors

* fix more errors
2023-03-22 21:52:25 -08:00
Hayden
ed1230e17d
feat: goreleaser + remove cgo dependency (#363)
* wip: goreleaser

* update image path

* spelling

* set working dir

* change main.go

* remove unused field

* drop cgo requirement

* remove unused workflow step

* generate code

* drop cgo from docker file

* update publish workflow

* annotate as unfinished
2023-03-22 20:49:49 -08:00
zodac
2d768e2b9c
feat Adding NZD currency (#360)
* Adding NZD as currency option

* Updating frontend

* Sorting alphabetically

* Fixing typo
2023-03-22 20:26:51 -08:00
Hayden
40e76bac0c
feat: filter details for zero values (#364)
* filter details for zero values

* ensure exhaustive checks

* update event listener to only bind when collapsable
2023-03-22 19:05:13 -08:00
Hayden
840d220d4f
feat: use notifiers on schedule (#362)
* fix potential memory leak with time.After

* add new background service to manage scheduled notifications

* update docs

* remove old js reference

* closes #278

* tidy
2023-03-21 11:32:48 -08:00
renovate[bot]
975e636fb6
fix(deps): update module github.com/swaggo/swag to v1.8.11 (#361)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-03-21 11:27:40 -08:00
renovate[bot]
d1076baf84
fix(deps): update module github.com/swaggo/http-swagger to v2 (#358)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-03-21 11:26:46 -08:00
renovate[bot]
40fcef4e9b
chore(deps): update dependency typescript to v5 (#355)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-03-21 11:23:30 -08:00
Hayden
97fb94d231
fix: refactoring errors (#359)
* #352 - require one date to be set to save

* fix many type regressions
2023-03-20 21:48:22 -08:00
renovate[bot]
4a8ba6231d
fix(deps): update module entgo.io/ent to v0.11.10 (#328)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-03-20 20:32:48 -08:00
Hayden
db80f8a159
chore: refactor api endpoints (#339)
* move typegen code

* update taskfile to fix code-gen caches and use 'dir' attribute

* enable dumping stack traces for errors

* log request start and stop

* set zerolog stack handler

* fix routes function

* refactor context adapters to use requests directly

* change some method signatures to support GID

* start requiring validation tags

* first pass on updating handlers to use adapters

* add errs package

* code gen

* tidy

* rework API to use external server package
2023-03-20 20:32:10 -08:00
renovate[bot]
184b494fc3
fix(deps): update module github.com/go-playground/validator/v10 to v10.12.0 (#357)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-03-20 18:43:56 -08:00
renovate[bot]
5a3fa23332
fix(deps): update module github.com/swaggo/http-swagger to v1.3.4 (#356)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-03-20 18:43:49 -08:00
renovate[bot]
ef0690d511
chore(deps): update actions/setup-go action to v4 (#351)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-03-20 18:43:33 -08:00
renovate[bot]
9e55c880f6
chore(deps): update dependency @types/dompurify to v3 (#346)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-03-20 18:43:04 -08:00
renovate[bot]
cb9b20e2d2
fix(deps): update module github.com/ardanlabs/conf/v3 to v3.1.5 (#341)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-03-20 18:42:49 -08:00
renovate[bot]
a79e780b4e
chore(deps): update dependency mkdocs-material to v9.1.3 (#340)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-03-20 18:42:38 -08:00
renovate[bot]
90cbb9bfd1
fix(deps): update module ariga.io/atlas to v0.10.0 (#327)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-03-20 18:42:25 -08:00
Vitalij Dovhanyc
dc08dbbd7a
feat: add czech currency (#323) 2023-03-20 18:42:11 -08:00
renovate[bot]
1f47d96e4c
chore(deps): update dependency nuxt to v3.2.3 (#326)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-03-07 10:12:32 -09:00
Hayden
23b5892aef
feat: Notifiers CRUD (#337)
* introduce scaffold for new models

* wip: shoutrrr wrapper (may remove)

* update schema files

* gen: ent code

* gen: migrations

* go mod tidy

* add group_id to notifier

* db migration

* new mapper helpers

* notifier repo

* introduce experimental adapter pattern for hdlrs

* refactor adapters to fit more common use cases

* new routes for notifiers

* update errors to fix validation panic

* go tidy

* reverse checkbox label display

* wip: notifiers UI

* use badges instead of text

* improve documentation

* add scaffold schema reference

* remove notifier service

* refactor schema folder

* support group edges via scaffold

* delete test file

* include link to API docs

* audit and update documentation + improve format

* refactor schema edges

* refactor

* add custom validator

* set validate + order fields by name

* fix failing tests
2023-03-06 21:18:58 -09:00
renovate[bot]
2665b666f1
fix(deps): update github.com/gocarina/gocsv digest to 70c27cb (#308)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-03-06 10:09:17 -09:00
renovate[bot]
14315cb88a
fix(deps): update module golang.org/x/crypto to v0.7.0 (#336)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-03-05 16:49:16 -09:00
Hayden
cf536393f5
fix datetime display issues (again) (#324) 2023-02-27 19:52:56 -09:00
Hayden
025521431e
feat: add scheduled maintenance tasks (#320)
* add scheduled maintenance tasks

* fix failing typecheck
2023-02-26 18:42:23 -09:00
Hayden
70297b9d27
feat: more-currency-support (#316)
* add polish and turkish lira

* add romanian lei

* code-gen
2023-02-25 18:13:52 -09:00
Hayden
729293745f
fix: table row background (#315) 2023-02-25 18:07:03 -09:00
Hayden
a6bcb36c5b
feat: import export rewrite (#290)
* WIP: initial work

* refactoring

* fix failing JS tests

* update import docs

* fix import headers

* fix column headers

* update refs on import

* remove demo status

* finnnneeeee

* formatting
2023-02-25 17:54:40 -09:00
Adrian
a005fa5b9b
feat: add currency swiss francs (#311) 2023-02-25 10:46:27 -09:00
renovate[bot]
a2dfa9dcef
chore(deps): update dependency vitest to ^0.29.0 (#312)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-02-25 10:45:56 -09:00
renovate[bot]
32216f63bd
fix(deps): update module github.com/stretchr/testify to v1.8.2 (#313)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-02-25 10:45:35 -09:00
renovate[bot]
ef190e26df
fix(deps): update github.com/gocarina/gocsv digest to bcce7dc (#307)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-02-19 18:04:36 -09:00
Hayden
c3e3702a7e
refactor: change icon for locations to tree view 2023-02-18 10:56:28 -09:00
Hayden
fb57120067
feat: support item nesting in tree view (#306) 2023-02-18 10:55:14 -09:00
Hayden
9d9b05d8a6
fix: several layout issues (#305)
* fix login version issue

* allow wrapping on action menu
2023-02-18 10:09:19 -09:00
Hayden
3ac6c7c858
fix: use item quantity as count mechanism (#304) 2023-02-18 09:57:09 -09:00
Hayden
859d3b9ffe
fix label store (#303) 2023-02-18 09:47:04 -09:00
renovate[bot]
6cfa6c9fc8
chore(deps): update dependency nuxt to v3.2.2 (#298)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-02-17 21:58:09 -09:00
Hayden
12975ce26e
feat: change auth to use cookies (#301)
* frontend cookie implementation

* accept cookies for authentication

* remove auth store

* add self attr
2023-02-17 21:57:21 -09:00
Hayden
bd321af29f
chore: developer cleanup (#300)
* new PR tasks

* add homebox to know words

* formatting

* bump deps

* generate db models

* ts errors

* drop id

* fix accessor

* drop unused time field

* change CI

* add expected error

* add type check

* resolve serveral type errors

* hoise in CI
2023-02-17 21:41:01 -09:00
renovate[bot]
88f9ff90d4
fix(deps): update module entgo.io/ent to v0.11.8 (#272)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-02-16 10:43:44 -09:00
renovate[bot]
354f1adbee
chore(deps): update dependency nuxt to v3.2.0 (#259)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-02-16 10:43:22 -09:00
renovate[bot]
2a62a43493
fix(deps): update dependency dompurify to v3 (#277)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-02-16 10:42:48 -09:00
Kyle Brown
673db41f37
build(docker): add image source label (#288)
This adds the `org.opencontainers.image.source` label pointing to this repository for cross-referencing information. For example: https://github.com/renovatebot/renovate/blob/main/lib/modules/datasource/docker/readme.md
2023-02-16 10:36:40 -09:00
renovate[bot]
830ce2b0a9
fix(deps): update module github.com/ardanlabs/conf/v3 to v3.1.4 (#291)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-02-16 10:35:54 -09:00
Hayden
e8e6a425dd
fix: button display on light mode (#293) 2023-02-16 10:28:52 -09:00
Hayden
da00db0608
fix: code generation and type processing (#292)
regular expressions are order specific and when applied in a random order you can get a variety of outputs. Using a list preserves order and ensures that the data-contracts.ts file is deterministic.
2023-02-16 10:13:09 -09:00
Hayden
efd7069fe4
feat: hide registration button when disabled (#287)
* add allow registration to API Summary

* code gen

* use env for troubleshooting

* disable registration toggle based on backend
2023-02-15 08:58:38 -09:00
Hayden
dd349aa98e
fix #285 (#286) 2023-02-15 08:52:13 -09:00
Hayden
607b06d2f2
fix: date and datetime regression (#282)
* use custom types.Date implementation

* fix user registration bug

* remove sanity check

* fix datetime bug
2023-02-15 08:40:35 -09:00
Hayden
44f13f751a
docs: update label generator docs 2023-02-13 10:50:49 -09:00
Hayden
986d2c586e
refactor: editor page (#276) 2023-02-13 10:43:09 -09:00
Hayden
9361997a42
feat(reporting): bill of materials (#275)
* new reporting service

* API route

* code gen

* get tsv export from tools page

* fix naming
2023-02-13 10:00:29 -09:00
Hayden
2e96d8c4c2
fix favicon error 2023-02-12 15:14:11 -09:00
Hayden
ff75daf6b3
feat: mvp for label generation/printing (#274)
* initial label generator for QR codes

* use dynamic URL parameter
2023-02-12 15:09:31 -09:00
renovate[bot]
c0953bbd26
fix(deps): update module github.com/go-playground/validator/v10 to v10.11.2 (#253)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-02-10 19:41:39 -09:00
renovate[bot]
1f7e917c70
fix(deps): update module golang.org/x/crypto to v0.6.0 (#267)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-02-10 19:41:29 -09:00
Hayden
6ff2d64996
feat: init tools page (#271) 2023-02-10 19:38:50 -09:00
Hayden
ab22ea6a25
feat: rebuild search UI w/ new filters (#269) 2023-02-09 17:47:41 -09:00
Hayden
ce2fc7712a
fix: add custom action for fixing broken date/times (#268) 2023-02-08 17:59:04 -09:00
Hayden
bd933af874
feat: implement selectable view + sortable table (#264) 2023-02-05 14:00:33 -09:00
Hayden
f36f17b57d
feat: toggle view of password field (#263) 2023-02-05 12:56:47 -09:00
Hayden
504569bed0
fix: prevents re-creating locations and labels when someone joins group (#262)
* closes #258

* remove debug statement
2023-02-05 12:25:30 -09:00
Hayden
bd06fdafaf
feat: enhanced search functions (#260)
* make login case insensitive

* expand query to support by Field and By AID search

* type generation

* new API callers

* rework search to support field queries

* improve unnecessary data fetches

* clear stores on logout

* change verbage

* add labels
2023-02-05 12:12:54 -09:00
Hayden
7b28973c60
fix: tree fixes (#252)
* use case insensitive sort

* support new location selector in create item

* fix incorrect date-time parsing logic
2023-01-29 13:20:18 -09:00
Hayden
cbac17c059
feat: use native date picker + align date formats (#251)
* use native date picker

* use YYYY-MM-DD for date formats
2023-01-28 13:32:39 -09:00
Hayden
6ed1f3695a
chore: upgrade deps + code-gen (#249) 2023-01-28 12:03:51 -09:00
Hayden
3d295b5132
feat: locations tree viewer (#248)
* location tree API

* test fixes

* initial tree location elements

* locations tree page

* update meta-data

* code-gen

* store item display preferences

* introduce basic table/card view elements

* codegen

* set parent location during location creation

* add item support for tree query

* refactor tree view

* wip: location selector improvements

* type gen

* rename items -> search

* remove various log statements

* fix markdown rendering for description

* update location selectors

* fix tests

* fix currency tests

* formatting
2023-01-28 11:53:00 -09:00
renovate[bot]
4d220cdd9c
chore(deps): update dependency vitest to ^0.28.0 (#244)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-01-26 10:05:09 -09:00
Serios
ad20e4e39b
Adding new currency (#243)
Adding Bulgarian lev to list of currencies
2023-01-26 08:59:07 -09:00
renovate[bot]
3e2f6a96bf
chore(deps): update dependency nuxt to v3.1.1 (#245)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-01-26 08:58:21 -09:00
renovate[bot]
2ece97c42a
fix(deps): update module github.com/swaggo/swag to v1.8.10 (#246)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-01-26 08:57:54 -09:00
renovate[bot]
a8ee4d421b
fix(deps): update module github.com/rs/zerolog to v1.29.0 (#247)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-01-26 08:57:45 -09:00
Hayden
91d0c588d9
refactor: refactor item page UI (#235)
* fix generated types

* fix tailwind auto-complete

* force lowercase buttons

* add title and change style for items page

* add copy button support for item details

* empty state for log

* fix duplicate padding

* add option for create without closing the current dialog.

* hide purchase price is not set

* invert toggle for edit mode

* update styles on item cards

* add edit support for maintenance logs
2023-01-21 21:15:23 -09:00
Hayden
c19fe94c08
feat: QR Codes (#226)
* code gen updates

* qrcode support

* remove opacity on toast

* update item view to use tab-like pages

* adjust view for cards

* fix old API calls for ioutils

* move embed

* extract QR code

* add docs for QR codes

* add QR code
2023-01-18 20:44:06 -09:00
renovate[bot]
f532b39c46
chore(deps): update docker/setup-qemu-action action to v2 (#219)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-01-15 13:56:03 -09:00
renovate[bot]
243742a75c
chore(deps): update docker/setup-buildx-action action to v2 (#218)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-01-15 13:55:56 -09:00
renovate[bot]
2b7c1fa429
chore(deps): update dependency @nuxtjs/eslint-config-typescript to v12 (#217)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-01-15 13:55:48 -09:00
renovate[bot]
125b8f5ecd
chore(deps): update actions/setup-go action to v3 (#216)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-01-15 13:55:40 -09:00
renovate[bot]
580de45cec
chore(deps): update actions/checkout action to v3 (#215)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-01-15 13:55:32 -09:00
renovate[bot]
da52b4ec64
chore(deps): update dependency vitest to ^0.27.0 (#214)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-01-15 13:55:23 -09:00
renovate[bot]
5b0dba854d
fix(deps): update dependency mkdocs-material to v9 (#222)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-01-15 13:54:33 -09:00
renovate[bot]
984c1a08ac
fix(deps): update module ariga.io/atlas to v0.9.0 (#213)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-01-14 09:51:47 -09:00
renovate[bot]
8419a17109
fix(deps): update module entgo.io/ent to v0.11.5 (#211)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-01-14 09:40:20 -09:00
renovate[bot]
f3406eec19
chore(deps): update pnpm/action-setup action to v2.2.4 (#210)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-01-14 09:40:14 -09:00
renovate[bot]
3ed07a909f
Add renovate.json (#209)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-01-14 09:28:52 -09:00
Hayden
07441eec8e
feat: add lookup by asset ID (#208)
* add asset id redirecting

* dev env changes

* suggested changes from PR

* remove unnecessary proxy from nuxt config

* fix formatting

* change directory reference

* fix API key storage

* use /a/{id} as redirect

* run generators

* remove dependabot

Co-authored-by: Bradley Nelson <bradley@nel.family>
Co-authored-by: Bradley Nelson <BCNelson@users.noreply.github.com>
2023-01-14 09:24:11 -09:00
dependabot[bot]
c78f10ba5d
fix(deps): bump golang.org/x/crypto from 0.4.0 to 0.5.0 in /backend (#199)
Bumps [golang.org/x/crypto](https://github.com/golang/crypto) from 0.4.0 to 0.5.0.
- [Release notes](https://github.com/golang/crypto/releases)
- [Commits](https://github.com/golang/crypto/compare/v0.4.0...v0.5.0)

---
updated-dependencies:
- dependency-name: golang.org/x/crypto
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-01-08 10:02:14 -09:00
Hayden
490a0ece86
chore: rewrite python script in go (#201) 2023-01-06 22:15:59 -09:00
Hayden
891d41b75f
feat: new-card-design (#196)
* card option 1

* UI updates for item card

* fix test error

* fix pagination issues on backend

* add integer support

* remove date from cards

* implement pagination for search page

* resolve search state problems

* other fixes

* fix broken datetime

* attempt to fix scroll behavior
2023-01-01 12:50:48 -09:00
Hayden
58d6f9a28c
Fix/mobile-layouts (#192)
* partial fix for location card spacing

* update header on mobile
2022-12-29 20:18:49 -09:00
Hayden
6a8a25e3f8
feat: new dashboard implementation (#168)
* wip: charts.js experimental work

* update lock file

* wip: frontend redesign

* wip: more UI fixes for consistency across themes

* cleanup

* improve UI log

* style updates

* fix lint errors
2022-12-29 16:19:15 -09:00
dependabot[bot]
a3954dab0f
fix(deps): bump golang.org/x/crypto from 0.3.0 to 0.4.0 in /backend (#174)
Bumps [golang.org/x/crypto](https://github.com/golang/crypto) from 0.3.0 to 0.4.0.
- [Release notes](https://github.com/golang/crypto/releases)
- [Commits](https://github.com/golang/crypto/compare/v0.3.0...v0.4.0)

---
updated-dependencies:
- dependency-name: golang.org/x/crypto
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2022-12-15 22:36:22 -09:00
dependabot[bot]
08d483d907
fix(deps): bump github.com/go-chi/chi/v5 from 5.0.7 to 5.0.8 in /backend (#172)
Bumps [github.com/go-chi/chi/v5](https://github.com/go-chi/chi) from 5.0.7 to 5.0.8.
- [Release notes](https://github.com/go-chi/chi/releases)
- [Changelog](https://github.com/go-chi/chi/blob/master/CHANGELOG.md)
- [Commits](https://github.com/go-chi/chi/compare/v5.0.7...v5.0.8)

---
updated-dependencies:
- dependency-name: github.com/go-chi/chi/v5
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2022-12-15 22:36:12 -09:00
dependabot[bot]
a53f89ceac
fix(deps): bump github.com/swaggo/swag from 1.8.8 to 1.8.9 in /backend (#178)
Bumps [github.com/swaggo/swag](https://github.com/swaggo/swag) from 1.8.8 to 1.8.9.
- [Release notes](https://github.com/swaggo/swag/releases)
- [Changelog](https://github.com/swaggo/swag/blob/master/.goreleaser.yml)
- [Commits](https://github.com/swaggo/swag/compare/v1.8.8...v1.8.9)

---
updated-dependencies:
- dependency-name: github.com/swaggo/swag
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2022-12-15 18:21:12 -09:00
Hayden
5bbb969763
feat: maintenance log (#170)
* remove repo for document tokens

* remove schema for doc tokens

* fix id template and generate cmd

* schema updates

* code gen

* bump dependencies

* fix broken migrations + add maintenance entry type

* spelling

* remove debug logger

* implement repository layer

* routes

* API client

* wip: maintenance log

* remove depreciated call
2022-12-09 20:57:57 -09:00
Hayden
d6da63187b
feat: new homepage statistic API's (#167)
* add date format and orDefault helpers

* introduce new statistics calculations queries

* rework statistics endpoints

* code generation

* fix styles on photo card

* label and location aggregation endpoints

* code-gen

* cleanup parser and defaults

* remove debug point

* setup E2E Testing

* linters

* formatting

* fmt plus name support on time series data

* code gen
2022-12-05 12:36:32 -09:00
Hayden
de419dc37d
feat: auth-roles, image-gallery, click-to-open (#166)
* schema changes

* db generate

* db migration

* add role based middleware

* implement attachment token access

* generate docs

* implement role based auth

* replace attachment specific tokens with gen token

* run linter

* cleanup temporary token implementation
2022-12-03 10:55:00 -09:00
Hayden
974d6914a2
feat: markdown support (#165)
* initial markdown support via markdown-it

* sanitize markup

* remove pre-padding

* fix linter errors
2022-12-02 16:12:32 -09:00
Steven Nguyen
d2aa022347
Fix typo in documentation (import-csv.md) (#163)
* Fix typo in .csv header snippet

Import RefLocation -> ImportRef Location

* Convert space to tab in header snippet
2022-12-02 15:02:59 -09:00
Hayden
6af048dc93
feat: present loc/labels based on route (#162) 2022-12-01 18:21:49 -09:00
Hayden
73c42f4784
fix: assign extension if not set (#161) 2022-12-01 18:10:50 -09:00
Hayden
f42a917390
feat: add tsv support for import files (#160)
* feat: add tsv support for import files

* add note in docs
2022-12-01 18:06:47 -09:00
Hayden
f149c3e4ab
feat: include notes in search (#159) 2022-12-01 17:32:11 -09:00
Hayden
1dc1ee54e2
feat: chinese-currency (#158)
* run updated code gen

* feat: add RMB currency support
2022-12-01 16:45:28 -09:00
dependabot[bot]
e8f215ce34
fix(deps): bump github.com/swaggo/swag from 1.8.7 to 1.8.8 in /backend (#153)
Bumps [github.com/swaggo/swag](https://github.com/swaggo/swag) from 1.8.7 to 1.8.8.
- [Release notes](https://github.com/swaggo/swag/releases)
- [Changelog](https://github.com/swaggo/swag/blob/master/.goreleaser.yml)
- [Commits](https://github.com/swaggo/swag/compare/v1.8.7...v1.8.8)

---
updated-dependencies:
- dependency-name: github.com/swaggo/swag
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2022-11-24 14:29:43 -09:00
dependabot[bot]
1aafbcd201
fix(deps): bump entgo.io/ent from 0.11.3 to 0.11.4 in /backend (#117)
Bumps [entgo.io/ent](https://github.com/ent/ent) from 0.11.3 to 0.11.4.
- [Release notes](https://github.com/ent/ent/releases)
- [Commits](https://github.com/ent/ent/compare/v0.11.3...v0.11.4)

---
updated-dependencies:
- dependency-name: entgo.io/ent
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2022-11-24 14:29:36 -09:00
Hayden
6dc2ae1bea
feat: asset tags/ids (#142)
* add schema

* run db migration

* bulk seed asset IDs

* breaking: update runtime options

* conditionally increment asset IDs

* update API endpoints

* fix import asset id assignment

* refactor display + marshal/unmarshal

* add docs page

* add to form field

* hide 000-000 values

* update ENV vars
2022-11-13 14:17:55 -09:00
Hayden
976f68252d
feat: add INR currency and sort currencies (#141) 2022-11-12 14:54:24 -09:00
Hayden
8e1947d971
fix: conditionally filter parent locations (#133) 2022-11-02 11:54:43 -08:00
430 changed files with 56301 additions and 24419 deletions

View file

@ -35,6 +35,6 @@
// Comment out to connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root.
"remoteUser": "node",
"features": {
"golang": "1.19"
"golang": "1.21"
}
}

View file

@ -22,3 +22,4 @@
**/secrets.dev.yaml
**/values.dev.yaml
README.md
!Dockerfile.rootless

View file

@ -1,31 +0,0 @@
version: 2
updates:
# Fetch and update latest `npm` packages
- package-ecosystem: npm
directory: "/frontend"
schedule:
interval: daily
time: "00:00"
open-pull-requests-limit: 10
reviewers:
- hay-kot
assignees:
- hay-kot
commit-message:
prefix: fix
prefix-development: chore
include: scope
- package-ecosystem: gomod
directory: backend
schedule:
interval: daily
time: "00:00"
open-pull-requests-limit: 10
reviewers:
- hay-kot
assignees:
- hay-kot
commit-message:
prefix: fix
prefix-development: chore
include: scope

View file

@ -7,12 +7,12 @@ jobs:
Go:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v4
- name: Set up Go
uses: actions/setup-go@v2
uses: actions/setup-go@v5
with:
go-version: 1.19
go-version: "1.21"
- name: Install Task
uses: arduino/setup-task@v1
@ -20,7 +20,7 @@ jobs:
repo-token: ${{ secrets.GITHUB_TOKEN }}
- name: golangci-lint
uses: golangci/golangci-lint-action@v3
uses: golangci/golangci-lint-action@v4
with:
# Optional: version of golangci-lint to use in form of v1.2 or v1.2.3 or `latest` to use the latest version
version: latest

View file

@ -4,11 +4,37 @@ on:
workflow_call:
jobs:
Frontend:
lint:
name: Lint
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
uses: actions/checkout@v4
with:
fetch-depth: 0
- uses: pnpm/action-setup@v3.0.0
with:
version: 6.0.2
- name: Install dependencies
run: pnpm install --shamefully-hoist
working-directory: frontend
- name: Run Lint
run: pnpm run lint:ci
working-directory: frontend
- name: Run Typecheck
run: pnpm run typecheck
working-directory: frontend
integration-tests:
name: Integration Tests
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
@ -18,15 +44,15 @@ jobs:
repo-token: ${{ secrets.GITHUB_TOKEN }}
- name: Set up Go
uses: actions/setup-go@v2
uses: actions/setup-go@v5
with:
go-version: 1.19
go-version: "1.21"
- uses: actions/setup-node@v3
- uses: actions/setup-node@v4
with:
node-version: 18
- uses: pnpm/action-setup@v2.2.2
- uses: pnpm/action-setup@v3.0.0
with:
version: 6.0.2
@ -34,9 +60,5 @@ jobs:
run: pnpm install
working-directory: frontend
- name: Run linter 👀
run: pnpm lint
working-directory: "frontend"
- name: Run Integration Tests
run: task test:ci

View file

@ -20,22 +20,22 @@ jobs:
name: "Publish Homebox"
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v4
- name: Set up Go
uses: actions/setup-go@v2
uses: actions/setup-go@v5
with:
go-version: 1.19
go-version: "1.20"
- name: Set up QEMU
id: qemu
uses: docker/setup-qemu-action@v1
uses: docker/setup-qemu-action@v3
with:
image: tonistiigi/binfmt:latest
platforms: all
- name: install buildx
id: buildx
uses: docker/setup-buildx-action@v1
uses: docker/setup-buildx-action@v3
with:
install: true
@ -44,7 +44,7 @@ jobs:
env:
CR_PAT: ${{ secrets.GH_TOKEN }}
- name: build nightly the image
- name: build nightly image
if: ${{ inputs.release == false }}
run: |
docker build --push --no-cache \
@ -53,6 +53,16 @@ jobs:
--build-arg=BUILD_TIME=$(date -u +"%Y-%m-%dT%H:%M:%SZ") \
--platform=linux/amd64,linux/arm64,linux/arm/v7 .
- name: build nightly-rootless image
if: ${{ inputs.release == false }}
run: |
docker build --push --no-cache \
--tag=ghcr.io/hay-kot/homebox:${{ inputs.tag }}-rootless \
--build-arg=COMMIT=$(git rev-parse HEAD) \
--build-arg=BUILD_TIME=$(date -u +"%Y-%m-%dT%H:%M:%SZ") \
--file Dockerfile.rootless \
--platform=linux/amd64,linux/arm64,linux/arm/v7 .
- name: build release tagged the image
if: ${{ inputs.release == true }}
run: |
@ -64,3 +74,16 @@ jobs:
--build-arg COMMIT=$(git rev-parse HEAD) \
--build-arg BUILD_TIME=$(date -u +"%Y-%m-%dT%H:%M:%SZ") \
--platform linux/amd64,linux/arm64,linux/arm/v7 .
- name: build release tagged the rootless image
if: ${{ inputs.release == true }}
run: |
docker build --push --no-cache \
--tag ghcr.io/hay-kot/homebox:nightly-rootless \
--tag ghcr.io/hay-kot/homebox:latest-rootless \
--tag ghcr.io/hay-kot/homebox:${{ inputs.tag }}-rootless \
--build-arg VERSION=${{ inputs.tag }} \
--build-arg COMMIT=$(git rev-parse HEAD) \
--build-arg BUILD_TIME=$(date -u +"%Y-%m-%dT%H:%M:%SZ") \
--platform linux/amd64,linux/arm64,linux/arm/v7 \
--file Dockerfile.rootless .

View file

@ -1,73 +1,29 @@
name: Build Nightly
name: Publish Dockers
on:
push:
branches:
- main
release:
types:
- published
env:
FLY_API_TOKEN: ${{ secrets.FLY_API_TOKEN }}
jobs:
backend-tests:
name: "Backend Server Tests"
uses: hay-kot/homebox/.github/workflows/partial-backend.yaml@main
frontend-tests:
name: "Frontend and End-to-End Tests"
uses: hay-kot/homebox/.github/workflows/partial-frontend.yaml@main
deploy:
name: "Deploy Nightly to Fly.io"
runs-on: ubuntu-latest
needs:
- backend-tests
- frontend-tests
steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v4
- uses: superfly/flyctl-actions/setup-flyctl@master
- run: flyctl deploy --remote-only
publish-nightly:
name: "Publish Nightly"
if: github.event_name != 'release'
needs:
- backend-tests
- frontend-tests
uses: hay-kot/homebox/.github/workflows/partial-publish.yaml@main
with:
tag: nightly
secrets:
GH_TOKEN: ${{ secrets.CR_PAT }}
publish-tag:
name: "Publish Tag"
if: github.event_name == 'release'
needs:
- backend-tests
- frontend-tests
uses: hay-kot/homebox/.github/workflows/partial-publish.yaml@main
with:
release: true
tag: ${{ github.event.release.tag_name }}
secrets:
GH_TOKEN: ${{ secrets.CR_PAT }}
deploy-docs:
name: Deploy docs
needs:
- publish-tag
runs-on: ubuntu-latest
steps:
- name: Checkout main
uses: actions/checkout@v2
- name: Deploy docs
uses: mhausenblas/mkdocs-deploy-gh-pages@master
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
CONFIG_FILE: docs/mkdocs.yml
EXTRA_PACKAGES: build-base

View file

@ -12,4 +12,4 @@ jobs:
frontend-tests:
name: "Frontend and End-to-End Tests"
uses: ./.github/workflows/partial-frontend.yaml
uses: ./.github/workflows/partial-frontend.yaml

77
.github/workflows/tag.yaml vendored Normal file
View file

@ -0,0 +1,77 @@
name: Publish Release
on:
push:
tags:
- v*
env:
FLY_API_TOKEN: ${{ secrets.FLY_API_TOKEN }}
jobs:
backend-tests:
name: "Backend Server Tests"
uses: hay-kot/homebox/.github/workflows/partial-backend.yaml@main
frontend-tests:
name: "Frontend and End-to-End Tests"
uses: hay-kot/homebox/.github/workflows/partial-frontend.yaml@main
goreleaser:
name: goreleaser
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Set up Go
uses: actions/setup-go@v5
- uses: pnpm/action-setup@v2
with:
version: 7.30.1
- name: Build Frontend and Copy to Backend
working-directory: frontend
run: |
pnpm install --shamefully-hoist
pnpm run build
cp -r ./.output/public ../backend/app/api/static/
- name: Run GoReleaser
uses: goreleaser/goreleaser-action@v5
with:
workdir: "backend"
distribution: goreleaser
version: latest
args: release --clean
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
publish-tag:
name: "Publish Tag"
uses: hay-kot/homebox/.github/workflows/partial-publish.yaml@main
with:
release: true
tag: ${{ github.ref_name }}
secrets:
GH_TOKEN: ${{ secrets.CR_PAT }}
deploy-docs:
name: Deploy docs
needs:
- publish-tag
- goreleaser
runs-on: ubuntu-latest
steps:
- name: Checkout main
uses: actions/checkout@v4
- name: Deploy docs
uses: mhausenblas/mkdocs-deploy-gh-pages@master
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
CONFIG_FILE: docs/mkdocs.yml
EXTRA_PACKAGES: build-base

10
.gitignore vendored
View file

@ -45,3 +45,13 @@ node_modules
.output
.env
dist
.pnpm-store
backend/app/api/app
backend/app/api/__debug_bin
dist/
# Nuxt Publish Dir
backend/app/api/static/public/*
!backend/app/api/static/public/.gitkeep
backend/api

View file

@ -0,0 +1,33 @@
---
# yaml-language-server: $schema=https://hay-kot.github.io/scaffold/schema.json
messages:
pre: |
# Ent Model Generation
With Boilerplate!
post: |
Complete!
questions:
- name: "model"
prompt:
message: "What is the name of the model? (PascalCase)"
required: true
- name: "by_group"
prompt:
confirm: "Include a Group Edge? (group_id -> id)"
required: true
rewrites:
- from: 'templates/model.go'
to: 'backend/internal/data/ent/schema/{{ lower .Scaffold.model }}.go'
inject:
- name: "Insert Groups Edge"
path: 'backend/internal/data/ent/schema/group.go'
at: // $scaffold_edge
template: |
{{- if .Scaffold.by_group -}}
owned("{{ lower .Scaffold.model }}s", {{ .Scaffold.model }}.Type),
{{- end -}}

View file

@ -0,0 +1,40 @@
package schema
import (
"entgo.io/ent"
"github.com/hay-kot/homebox/backend/internal/data/ent/schema/mixins"
)
type {{ .Scaffold.model }} struct {
ent.Schema
}
func ({{ .Scaffold.model }}) Mixin() []ent.Mixin {
return []ent.Mixin{
mixins.BaseMixin{},
{{- if .Scaffold.by_group }}
GroupMixin{ref: "{{ snakecase .Scaffold.model }}s"},
{{- end }}
}
}
// Fields of the {{ .Scaffold.model }}.
func ({{ .Scaffold.model }}) Fields() []ent.Field {
return []ent.Field{
// field.String("name").
}
}
// Edges of the {{ .Scaffold.model }}.
func ({{ .Scaffold.model }}) Edges() []ent.Edge {
return []ent.Edge{
// edge.From("group", Group.Type).
}
}
func ({{ .Scaffold.model }}) Indexes() []ent.Index {
return []ent.Index{
// index.Fields("token"),
}
}

47
.vscode/launch.json vendored Normal file
View file

@ -0,0 +1,47 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"compounds": [
{
"name": "Full Stack",
"configurations": ["Launch Backend", "Launch Frontend"],
"stopAll": true
}
],
"configurations": [
{
"name": "Launch Backend",
"type": "go",
"request": "launch",
"mode": "debug",
"program": "${workspaceRoot}/backend/app/api/",
"args": [],
"env": {
"HBOX_DEMO": "true",
"HBOX_LOG_LEVEL": "debug",
"HBOX_DEBUG_ENABLED": "true",
"HBOX_STORAGE_DATA": "${workspaceRoot}/backend/.data",
"HBOX_STORAGE_SQLITE_URL": "${workspaceRoot}/backend/.data/homebox.db?_fk=1"
},
},
{
"name": "Launch Frontend",
"type": "node",
"request": "launch",
"runtimeExecutable": "pnpm",
"runtimeArgs": [
"run",
"dev"
],
"cwd": "${workspaceFolder}/frontend",
"serverReadyAction": {
"action": "debugWithChrome",
"pattern": "Local: http://localhost:([0-9]+)",
"uriFormat": "http://localhost:%s",
"webRoot": "${workspaceFolder}/frontend"
}
}
]
}

27
.vscode/settings.json vendored
View file

@ -1,7 +1,4 @@
{
"editor.codeActionsOnSave": {
"source.fixAll.eslint": true
},
"yaml.schemas": {
"https://squidfunk.github.io/mkdocs-material/schema.json": "mkdocs.yml"
},
@ -12,6 +9,26 @@
"README.md": "LICENSE, SECURITY.md"
},
"cSpell.words": [
"debughandlers"
]
"debughandlers",
"Homebox"
],
// use ESLint to format code on save
"editor.formatOnSave": false,
"editor.defaultFormatter": "dbaeumer.vscode-eslint",
"editor.codeActionsOnSave": {
"source.fixAll.eslint": "explicit"
},
"[typescript]": {
"editor.defaultFormatter": "dbaeumer.vscode-eslint"
},
"eslint.format.enable": true,
"css.validate": false,
"tailwindCSS.includeLanguages": {
"vue": "html",
"vue-html": "html"
},
"editor.quickSuggestions": {
"strings": true
},
"tailwindCSS.experimental.configFile": "./frontend/tailwind.config.js"
}

View file

@ -1,16 +1,16 @@
# Contributing
## We Develop with Github
## We Develop with GitHub
We use github to host code, to track issues and feature requests, as well as accept pull requests.
We use GitHub to host code, to track issues and feature requests, as well as accept pull requests.
## Branch Flow
We use the `main` branch as the development branch. All PRs should be made to the `main` branch from a feature branch. To create a pull request you can use the following steps:
We use the `main` branch as the development branch. All PRs should be made to the `main` branch from a feature branch. To create a pull request, you can use the following steps:
1. Fork the repository and create a new branch from `main`.
2. If you've added code that should be tested, add tests.
3. If you've changed API's, update the documentation.
3. If you've changed APIs, update the documentation.
4. Ensure that the test suite and linters pass
5. Issue your pull request
@ -18,7 +18,7 @@ We use the `main` branch as the development branch. All PRs should be made to th
### Prerequisites
There is a devcontainer available for this project. If you are using VSCode, you can use the devcontainer to get started. If you are not using VSCode, you can need to ensure that you have the following tools installed:
There is a devcontainer available for this project. If you are using VSCode, you can use the devcontainer to get started. If you are not using VSCode, you need to ensure that you have the following tools installed:
- [Go 1.19+](https://golang.org/doc/install)
- [Swaggo](https://github.com/swaggo/swag)
@ -31,21 +31,27 @@ If you're using `taskfile` you can run `task --list-all` for a list of all comma
### Setup
If you're using the taskfile you can use the `task setup` command to run the required setup commands. Otherwise you can review the commands required in the `Taskfile.yml` file.
If you're using the taskfile, you can use the `task setup` command to run the required setup commands. Otherwise, you can review the commands required in the `Taskfile.yml` file.
Note that when installing dependencies with pnpm you must use the `--shamefully-hoist` flag. If you don't use this flag you will get an error when running the the frontend server.
Note that when installing dependencies with pnpm you must use the `--shamefully-hoist` flag. If you don't use this flag, you will get an error when running the frontend server.
### API Development Notes
start command `task go:run`
1. API Server does not auto reload. You'll need to restart the server after making changes.
2. Unit tests should be written in Go, however end-to-end or user story tests should be written in TypeScript using the client library in the frontend directory.
2. Unit tests should be written in Go, however, end-to-end or user story tests should be written in TypeScript using the client library in the frontend directory.
### Frontend Development Notes
start command `task: ui:dev`
1. The frontend is a Vue 3 app with Nuxt.js that uses Tailwind and DaisyUI for styling.
2. We're using Vitest for our automated testing. you can run these with `task ui:watch`.
3. Tests require the API server to be running and in some cases the first run will fail due to a race condition. If this happens just run the tests again and they should pass.
2. We're using Vitest for our automated testing. You can run these with `task ui:watch`.
3. Tests require the API server to be running, and in some cases the first run will fail due to a race condition. If this happens, just run the tests again and they should pass.
## Publishing Release
Create a new tag in GitHub with the version number vX.X.X. This will trigger a new release to be created.
Test -> Goreleaser -> Publish Release -> Trigger Docker Builds -> Deploy Docs + Fly.io Demo

View file

@ -1,49 +1,48 @@
# Build Nuxt
FROM node:17-alpine as frontend-builder
WORKDIR /app
RUN npm install -g pnpm
COPY frontend/package.json frontend/pnpm-lock.yaml ./
RUN pnpm install --frozen-lockfile --shamefully-hoist
COPY frontend .
RUN pnpm build
# Build API
FROM golang:alpine AS builder
ARG BUILD_TIME
ARG COMMIT
ARG VERSION
RUN apk update && \
apk upgrade && \
apk add --update git build-base gcc g++
WORKDIR /go/src/app
COPY ./backend .
RUN go get -d -v ./...
RUN rm -rf ./app/api/public
COPY --from=frontend-builder /app/.output/public ./app/api/static/public
RUN CGO_ENABLED=1 GOOS=linux go build \
-ldflags "-s -w -X main.commit=$COMMIT -X main.buildTime=$BUILD_TIME -X main.version=$VERSION" \
-o /go/bin/api \
-v ./app/api/*.go
# Production Stage
FROM alpine:latest
ENV HBOX_MODE=production
ENV HBOX_STORAGE_DATA=/data/
ENV HBOX_STORAGE_SQLITE_URL=/data/homebox.db?_fk=1
RUN apk --no-cache add ca-certificates
RUN mkdir /app
COPY --from=builder /go/bin/api /app
RUN chmod +x /app/api
LABEL Name=homebox Version=0.0.1
EXPOSE 7745
WORKDIR /app
VOLUME [ "/data" ]
ENTRYPOINT [ "/app/api" ]
CMD [ "/data/config.yml" ]
# Build Nuxt
FROM r.batts.cloud/nodejs:18 as frontend-builder
WORKDIR /app
RUN npm install -g pnpm@latest-9
COPY frontend/package.json frontend/pnpm-lock.yaml ./
RUN pnpm install --frozen-lockfile --shamefully-hoist
COPY frontend .
RUN pnpm build
# Build API
FROM r.batts.cloud/golang:1.24 AS builder
ARG BUILD_TIME
ARG COMMIT
ARG VERSION
RUN apt update && \
apt install -y git build-essential gcc g++
WORKDIR /go/src/app
COPY ./backend .
RUN go get -d -v ./...
RUN rm -rf ./app/api/public
COPY --from=frontend-builder /app/.output/public ./app/api/static/public
RUN CGO_ENABLED=0 GOOS=linux go build \
-ldflags "-s -w -X main.commit=$COMMIT -X main.buildTime=$BUILD_TIME -X main.version=$VERSION" \
-o /go/bin/api \
-v ./app/api/*.go
# Production Stage
FROM r.batts.cloud/debian:trixie
ENV HBOX_MODE=production
ENV HBOX_STORAGE_DATA=/data/
ENV HBOX_STORAGE_SQLITE_URL=/data/homebox.db?_pragma=busy_timeout=2000&_pragma=journal_mode=WAL&_fk=1
RUN mkdir /app
COPY --from=builder /go/bin/api /app
RUN chmod +x /app/api
LABEL Name=homebox Version=0.0.1
LABEL org.opencontainers.image.source="https://github.com/hay-kot/homebox"
EXPOSE 7745
WORKDIR /app
VOLUME [ "/data" ]
ENTRYPOINT [ "/app/api" ]
CMD [ "/data/config.yml" ]

53
Dockerfile.rootless Normal file
View file

@ -0,0 +1,53 @@
# Build Nuxt
FROM node:17-alpine as frontend-builder
WORKDIR /app
RUN npm install -g pnpm
COPY frontend/package.json frontend/pnpm-lock.yaml ./
RUN pnpm install --frozen-lockfile --shamefully-hoist
COPY frontend .
RUN pnpm build
# Build API
FROM golang:alpine AS builder
ARG BUILD_TIME
ARG COMMIT
ARG VERSION
RUN apk update && \
apk upgrade && \
apk add --update git build-base gcc g++
WORKDIR /go/src/app
COPY ./backend .
RUN go get -d -v ./...
RUN rm -rf ./app/api/public
COPY --from=frontend-builder /app/.output/public ./app/api/static/public
RUN CGO_ENABLED=0 GOOS=linux go build \
-ldflags "-s -w -X main.commit=$COMMIT -X main.buildTime=$BUILD_TIME -X main.version=$VERSION" \
-o /go/bin/api \
-v ./app/api/*.go && \
chmod +x /go/bin/api && \
# create a directory so that we can copy it in the next stage
mkdir /data
# Production Stage
FROM gcr.io/distroless/static
ENV HBOX_MODE=production
ENV HBOX_STORAGE_DATA=/data/
ENV HBOX_STORAGE_SQLITE_URL=/data/homebox.db?_fk=1
# Copy the binary and the (empty) /data dir and
# change the ownership to the low-privileged user
COPY --from=builder --chown=nonroot /go/bin/api /app
COPY --from=builder --chown=nonroot /data /data
LABEL Name=homebox Version=0.0.1
LABEL org.opencontainers.image.source="https://github.com/hay-kot/homebox"
EXPOSE 7745
VOLUME [ "/data" ]
# Drop root and run as low-privileged user
USER nonroot
ENTRYPOINT [ "/app" ]
CMD [ "/data/config.yml" ]

View file

@ -16,12 +16,28 @@
[Configuration & Docker Compose](https://hay-kot.github.io/homebox/quick-start)
```bash
docker run --name=homebox \
--restart=always \
--publish=3100:7745 \
ghcr.io/hay-kot/homebox:latest
# If using the rootless image, ensure data
# folder has correct permissions
mkdir -p /path/to/data/folder
chown 65532:65532 -R /path/to/data/folder
docker run -d \
--name homebox \
--restart unless-stopped \
--publish 3100:7745 \
--env TZ=Europe/Bucharest \
--volume /path/to/data/folder/:/data \
ghcr.io/hay-kot/homebox:latest
# ghcr.io/hay-kot/homebox:latest-rootless
```
<!-- CONTRIBUTING -->
## Contributing
Contributions are what make the open source community such an amazing place to learn, inspire, and create. Any contributions you make are **greatly appreciated**.
If you are not a coder, you can still contribute financially. Financial contributions help me prioritize working on this project over others and helps me know that there is a real demand for project development.
<a href="https://www.buymeacoffee.com/haykot" target="_blank"><img src="https://cdn.buymeacoffee.com/buttons/v2/default-green.png" alt="Buy Me A Coffee" style="height: 30px !important;width: 107px !important;" ></a>
## Credits
- Logo by [@lakotelman](https://github.com/lakotelman)

View file

@ -1,7 +1,9 @@
version: "3"
env:
HBOX_STORAGE_SQLITE_URL: .data/homebox.db?_fk=1
HBOX_LOG_LEVEL: debug
HBOX_STORAGE_SQLITE_URL: .data/homebox.db?_pragma=busy_timeout=1000&_pragma=journal_mode=WAL&_fk=1
HBOX_OPTIONS_ALLOW_REGISTRATION: true
UNSAFE_DISABLE_PASSWORD_PROJECTION: "yes_i_am_sure"
tasks:
setup:
@ -11,61 +13,77 @@ tasks:
- cd backend && go mod tidy
- cd frontend && pnpm install --shamefully-hoist
generate:
desc: |
Generates collateral files from the backend project
including swagger docs and typescripts type for the frontend
deps:
- db:generate
swag:
desc: Generate swagger docs
dir: backend/app/api/static/
vars:
API: "../"
INTERNAL: "../../../internal"
PKGS: "../../../pkgs"
cmds:
- swag fmt --dir={{ .API }}
- swag init --dir={{ .API }},{{ .INTERNAL }}/core/services,{{ .INTERNAL }}/data/repo --parseDependency
sources:
- "./backend/app/api/**/*"
- "./backend/internal/data/**"
- "./backend/internal/core/services/**/*"
- "./backend/app/tools/typegen/main.go"
typescript-types:
desc: Generates typescript types from swagger definition
cmds:
- cd backend/app/api/static && swag fmt --dir=../
- cd backend/app/api/static && swag init --dir=../,../../../internal,../../../pkgs
- |
npx swagger-typescript-api \
--no-client \
--modular \
--path ./backend/app/api/static/docs/swagger.json \
--output ./frontend/lib/api/types
- python3 ./scripts/process-types.py ./frontend/lib/api/types/data-contracts.ts
- go run ./backend/app/tools/typegen/main.go ./frontend/lib/api/types/data-contracts.ts
sources:
- "./backend/app/api/**/*"
- "./backend/internal/data/**"
- "./backend/internal/services/**/*"
- "./scripts/process-types.py"
generates:
- "./frontend/lib/api/types/data-contracts.ts"
- "./backend/internal/data/ent/schema"
- "./backend/app/api/static/docs/swagger.json"
- "./backend/app/api/static/docs/swagger.yaml"
- ./backend/app/tools/typegen/main.go
- ./backend/app/api/static/docs/swagger.json
generate:
deps:
- db:generate
cmds:
- task: swag
- task: typescript-types
- cp ./backend/app/api/static/docs/swagger.json docs/docs/api/openapi-2.0.json
go:run:
desc: Starts the backend api server (depends on generate task)
dir: backend
deps:
- generate
cmds:
- cd backend && go run ./app/api/ {{ .CLI_ARGS }}
- go run ./app/api/ {{ .CLI_ARGS }}
silent: false
go:test:
desc: Runs all go tests using gotestsum - supports passing gotestsum args
dir: backend
cmds:
- cd backend && gotestsum {{ .CLI_ARGS }} ./...
- gotestsum {{ .CLI_ARGS }} ./...
go:coverage:
desc: Runs all go tests with -race flag and generates a coverage report
dir: backend
cmds:
- cd backend && go test -race -coverprofile=coverage.out -covermode=atomic ./app/... ./internal/... ./pkgs/... -v -cover
- go test -race -coverprofile=coverage.out -covermode=atomic ./app/... ./internal/... ./pkgs/... -v -cover
silent: true
go:tidy:
desc: Runs go mod tidy on the backend
dir: backend
cmds:
- cd backend && go mod tidy
- go mod tidy
go:lint:
desc: Runs golangci-lint
dir: backend
cmds:
- cd backend && golangci-lint run ./...
- golangci-lint run ./...
go:all:
desc: Runs all go test and lint related tasks
@ -76,19 +94,18 @@ tasks:
go:build:
desc: Builds the backend binary
dir: backend
cmds:
- cd backend && go build -o ../build/backend ./app/api
- go build -o ../build/backend ./app/api
db:generate:
desc: Run Entgo.io Code Generation
dir: backend/internal/
cmds:
- |
cd backend/internal/ && go generate ./... \
--template=./data/ent/schema/templates/has_id.tmpl
go generate ./...
sources:
- "./backend/internal/data/ent/schema/**/*"
generates:
- "./backend/internal/ent/"
db:migration:
desc: Runs the database diff engine to generate a SQL migration files
@ -99,13 +116,27 @@ tasks:
ui:watch:
desc: Starts the vitest test runner in watch mode
dir: frontend
cmds:
- cd frontend && pnpm run test:watch
- pnpm run test:watch
ui:dev:
desc: Run frontend development server
dir: frontend
cmds:
- cd frontend && pnpm dev
- pnpm dev
ui:fix:
desc: Runs prettier and eslint on the frontend
dir: frontend
cmds:
- pnpm run lint:fix
ui:check:
desc: Runs type checking
dir: frontend
cmds:
- pnpm run typecheck
test:ci:
desc: Runs end-to-end test on a live server (only for use in CI)
@ -115,3 +146,12 @@ tasks:
- sleep 5
- cd frontend && pnpm run test:ci
silent: true
pr:
desc: Runs all tasks required for a PR
cmds:
- task: generate
- task: go:all
- task: ui:check
- task: ui:fix
- task: test:ci

2
backend/.gitignore vendored Normal file
View file

@ -0,0 +1,2 @@
dist/

74
backend/.golangci.yml Normal file
View file

@ -0,0 +1,74 @@
run:
timeout: 10m
skip-dirs:
- internal/data/ent.*
linters-settings:
goconst:
min-len: 5
min-occurrences: 5
exhaustive:
default-signifies-exhaustive: true
revive:
ignore-generated-header: false
severity: warning
confidence: 3
depguard:
rules:
main:
deny:
- pkg: io/util
desc: |
Deprecated: As of Go 1.16, the same functionality is now provided by
package io or package os, and those implementations should be
preferred in new code. See the specific function documentation for
details.
gocritic:
enabled-checks:
- ruleguard
testifylint:
enable-all: true
tagalign:
order:
- json
- schema
- yaml
- yml
- toml
- validate
linters:
disable-all: true
enable:
- asciicheck
- bodyclose
- depguard
- dogsled
- errcheck
- errorlint
- exhaustive
- exportloopref
- gochecknoinits
- goconst
- gocritic
- gocyclo
- gofmt
- goprintffuncname
- gosimple
- govet
- ineffassign
- misspell
- nakedret
- revive
- staticcheck
- stylecheck
- tagalign
- testifylint
- typecheck
- typecheck
- unconvert
- unused
- whitespace
- zerologlint
- sqlclosecheck
issues:
exclude-use-default: false
fix: true

54
backend/.goreleaser.yaml Normal file
View file

@ -0,0 +1,54 @@
# This is an example .goreleaser.yml file with some sensible defaults.
# Make sure to check the documentation at https://goreleaser.com
before:
hooks:
# you may remove this if you don't need go generate
- go generate ./...
builds:
- main: ./app/api
env:
- CGO_ENABLED=0
goos:
- linux
- windows
- darwin
goarch:
- amd64
- "386"
- arm
- arm64
ignore:
- goos: windows
goarch: arm
- goos: windows
goarch: "386"
archives:
- format: tar.gz
# this name template makes the OS and Arch compatible with the results of uname.
name_template: >-
{{ .ProjectName }}_
{{- title .Os }}_
{{- if eq .Arch "amd64" }}x86_64
{{- else if eq .Arch "386" }}i386
{{- else }}{{ .Arch }}{{ end }}
{{- if .Arm }}v{{ .Arm }}{{ end }}
# use zip for windows archives
format_overrides:
- goos: windows
format: zip
checksum:
name_template: 'checksums.txt'
snapshot:
name_template: "{{ incpatch .Version }}-next"
changelog:
sort: asc
filters:
exclude:
- '^docs:'
- '^test:'
# The lines beneath this are called `modelines`. See `:help modeline`
# Feel free to remove those if you don't want/use them.
# yaml-language-server: $schema=https://goreleaser.com/static/schema.json
# vim: set ts=2 sw=2 tw=0 fo=cnqoj

View file

@ -1,23 +1,21 @@
package main
import (
"time"
"github.com/hay-kot/homebox/backend/internal/core/services"
"github.com/hay-kot/homebox/backend/internal/core/services/reporting/eventbus"
"github.com/hay-kot/homebox/backend/internal/data/ent"
"github.com/hay-kot/homebox/backend/internal/data/repo"
"github.com/hay-kot/homebox/backend/internal/sys/config"
"github.com/hay-kot/homebox/backend/pkgs/mailer"
"github.com/hay-kot/homebox/backend/pkgs/server"
)
type app struct {
conf *config.Config
mailer mailer.Mailer
db *ent.Client
server *server.Server
repos *repo.AllRepos
services *services.AllServices
bus *eventbus.EventBus
}
func new(conf *config.Config) *app {
@ -35,10 +33,3 @@ func new(conf *config.Config) *app {
return s
}
func (a *app) startBgTask(t time.Duration, fn func()) {
for {
a.server.Background(fn)
time.Sleep(t)
}
}

View file

@ -0,0 +1,37 @@
package main
import (
"context"
"time"
)
type BackgroundTask struct {
name string
Interval time.Duration
Fn func(context.Context)
}
func (tsk *BackgroundTask) Name() string {
return tsk.name
}
func NewTask(name string, interval time.Duration, fn func(context.Context)) *BackgroundTask {
return &BackgroundTask{
Interval: interval,
Fn: fn,
}
}
func (tsk *BackgroundTask) Start(ctx context.Context) error {
timer := time.NewTimer(tsk.Interval)
for {
select {
case <-ctx.Done():
return nil
case <-timer.C:
timer.Reset(tsk.Interval)
tsk.Fn(ctx)
}
}
}

View file

@ -2,57 +2,61 @@ package main
import (
"context"
"encoding/csv"
"strings"
"time"
"github.com/hay-kot/homebox/backend/internal/core/services"
"github.com/rs/zerolog/log"
)
func (a *app) SetupDemo() {
csvText := `Import Ref,Location,Labels,Quantity,Name,Description,Insured,Serial Number,Model Number,Manufacturer,Notes,Purchase From,Purchased Price,Purchased Time,Lifetime Warranty,Warranty Expires,Warranty Details,Sold To,Sold Price,Sold Time,Sold Notes
csvText := `HB.import_ref,HB.location,HB.labels,HB.quantity,HB.name,HB.description,HB.insured,HB.serial_number,HB.model_number,HB.manufacturer,HB.notes,HB.purchase_from,HB.purchase_price,HB.purchase_time,HB.lifetime_warranty,HB.warranty_expires,HB.warranty_details,HB.sold_to,HB.sold_price,HB.sold_time,HB.sold_notes
,Garage,IOT;Home Assistant; Z-Wave,1,Zooz Universal Relay ZEN17,"Zooz 700 Series Z-Wave Universal Relay ZEN17 for Awnings, Garage Doors, Sprinklers, and More | 2 NO-C-NC Relays (20A, 10A) | Signal Repeater | Hub Required (Compatible with SmartThings and Hubitat)",,,ZEN17,Zooz,,Amazon,39.95,10/13/2021,,,,,,,
,Living Room,IOT;Home Assistant; Z-Wave,1,Zooz Motion Sensor,"Zooz Z-Wave Plus S2 Motion Sensor ZSE18 with Magnetic Mount, Works with Vera and SmartThings",,,ZSE18,Zooz,,Amazon,29.95,10/15/2021,,,,,,,
,Office,IOT;Home Assistant; Z-Wave,1,Zooz 110v Power Switch,"Zooz Z-Wave Plus Power Switch ZEN15 for 110V AC Units, Sump Pumps, Humidifiers, and More",,,ZEN15,Zooz,,Amazon,39.95,10/13/2021,,,,,,,
,Downstairs,IOT;Home Assistant; Z-Wave,1,Ecolink Z-Wave PIR Motion Sensor,"Ecolink Z-Wave PIR Motion Detector Pet Immune, White (PIRZWAVE2.5-ECO)",,,PIRZWAVE2.5-ECO,Ecolink,,Amazon,35.58,10/21/2020,,,,,,,
,Entry,IOT;Home Assistant; Z-Wave,1,Yale Security Touchscreen Deadbolt,"Yale Security YRD226-ZW2-619 YRD226ZW2619 Touchscreen Deadbolt, Satin Nickel",,,YRD226ZW2619,Yale,,Amazon,120.39,10/14/2020,,,,,,,
,Kitchen,IOT;Home Assistant; Z-Wave,1,Smart Rocker Light Dimmer,"UltraPro Z-Wave Smart Rocker Light Dimmer with QuickFit and SimpleWire, 3-Way Ready, Compatible with Alexa, Google Assistant, ZWave Hub Required, Repeater/Range Extender, White Paddle Only, 39351",,,39351,Honeywell,,Amazon,65.98,09/30/0202,,,,,,,
,Kitchen,IOT;Home Assistant; Z-Wave,1,Smart Rocker Light Dimmer,"UltraPro Z-Wave Smart Rocker Light Dimmer with QuickFit and SimpleWire, 3-Way Ready, Compatible with Alexa, Google Assistant, ZWave Hub Required, Repeater/Range Extender, White Paddle Only, 39351",,,39351,Honeywell,,Amazon,65.98,09/30/0202,,,,,,,
`
var (
registration = services.UserRegistration{
Email: "demo@example.com",
Name: "Demo",
Password: "demo",
}
)
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
defer cancel()
registration := services.UserRegistration{
Email: "demo@example.com",
Name: "Demo",
Password: "demo",
}
// First check if we've already setup a demo user and skip if so
_, err := a.services.User.Login(context.Background(), registration.Email, registration.Password)
log.Debug().Msg("Checking if demo user already exists")
_, err := a.services.User.Login(ctx, registration.Email, registration.Password, false)
if err == nil {
log.Info().Msg("Demo user already exists, skipping setup")
return
}
_, err = a.services.User.RegisterUser(context.Background(), registration)
log.Debug().Msg("Demo user does not exist, setting up demo")
_, err = a.services.User.RegisterUser(ctx, registration)
if err != nil {
log.Err(err).Msg("Failed to register demo user")
log.Fatal().Msg("Failed to setup demo")
}
token, _ := a.services.User.Login(context.Background(), registration.Email, registration.Password)
self, _ := a.services.User.GetSelf(context.Background(), token.Raw)
// Read CSV Text
reader := csv.NewReader(strings.NewReader(csvText))
reader.Comma = ','
records, err := reader.ReadAll()
token, err := a.services.User.Login(ctx, registration.Email, registration.Password, false)
if err != nil {
log.Err(err).Msg("Failed to read CSV")
log.Err(err).Msg("Failed to login demo user")
log.Fatal().Msg("Failed to setup demo")
return
}
self, err := a.services.User.GetSelf(ctx, token.Raw)
if err != nil {
log.Err(err).Msg("Failed to get self")
log.Fatal().Msg("Failed to setup demo")
return
}
_, err = a.services.Items.CsvImport(context.Background(), self.GroupID, records)
_, err = a.services.Items.CsvImport(ctx, self.GroupID, strings.NewReader(csvText))
if err != nil {
log.Err(err).Msg("Failed to import CSV")
log.Fatal().Msg("Failed to setup demo")

View file

@ -1,3 +1,4 @@
// Package debughandlers provides handlers for debugging.
package debughandlers
import (

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.5 KiB

View file

@ -1,13 +1,38 @@
// Package v1 provides the API handlers for version 1 of the API.
package v1
import (
"encoding/json"
"net/http"
"time"
"github.com/google/uuid"
"github.com/hay-kot/homebox/backend/internal/core/services"
"github.com/hay-kot/homebox/backend/internal/core/services/reporting/eventbus"
"github.com/hay-kot/homebox/backend/internal/data/repo"
"github.com/hay-kot/homebox/backend/pkgs/server"
"github.com/hay-kot/httpkit/errchain"
"github.com/hay-kot/httpkit/server"
"github.com/rs/zerolog/log"
"github.com/olahol/melody"
)
type Results[T any] struct {
Items []T `json:"items"`
}
func WrapResults[T any](items []T) Results[T] {
return Results[T]{Items: items}
}
type Wrapped struct {
Item interface{} `json:"item"`
}
func Wrap(v any) Wrapped {
return Wrapped{Item: v}
}
func WithMaxUploadSize(maxUploadSize int64) func(*V1Controller) {
return func(ctrl *V1Controller) {
ctrl.maxUploadSize = maxUploadSize
@ -26,12 +51,20 @@ func WithRegistration(allowRegistration bool) func(*V1Controller) {
}
}
func WithSecureCookies(secure bool) func(*V1Controller) {
return func(ctrl *V1Controller) {
ctrl.cookieSecure = secure
}
}
type V1Controller struct {
cookieSecure bool
repo *repo.AllRepos
svc *services.AllServices
maxUploadSize int64
isDemo bool
allowRegistration bool
bus *eventbus.EventBus
}
type (
@ -43,27 +76,29 @@ type (
BuildTime string `json:"buildTime"`
}
ApiSummary struct {
Healthy bool `json:"health"`
Versions []string `json:"versions"`
Title string `json:"title"`
Message string `json:"message"`
Build Build `json:"build"`
Demo bool `json:"demo"`
APISummary struct {
Healthy bool `json:"health"`
Versions []string `json:"versions"`
Title string `json:"title"`
Message string `json:"message"`
Build Build `json:"build"`
Demo bool `json:"demo"`
AllowRegistration bool `json:"allowRegistration"`
}
)
func BaseUrlFunc(prefix string) func(s string) string {
func BaseURLFunc(prefix string) func(s string) string {
return func(s string) string {
return prefix + "/v1" + s
}
}
func NewControllerV1(svc *services.AllServices, repos *repo.AllRepos, options ...func(*V1Controller)) *V1Controller {
func NewControllerV1(svc *services.AllServices, repos *repo.AllRepos, bus *eventbus.EventBus, options ...func(*V1Controller)) *V1Controller {
ctrl := &V1Controller{
repo: repos,
svc: svc,
allowRegistration: true,
bus: bus,
}
for _, opt := range options {
@ -74,19 +109,105 @@ func NewControllerV1(svc *services.AllServices, repos *repo.AllRepos, options ..
}
// HandleBase godoc
// @Summary Retrieves the basic information about the API
// @Tags Base
// @Produce json
// @Success 200 {object} ApiSummary
// @Router /v1/status [GET]
func (ctrl *V1Controller) HandleBase(ready ReadyFunc, build Build) server.HandlerFunc {
//
// @Summary Application Info
// @Tags Base
// @Produce json
// @Success 200 {object} APISummary
// @Router /v1/status [GET]
func (ctrl *V1Controller) HandleBase(ready ReadyFunc, build Build) errchain.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
return server.Respond(w, http.StatusOK, ApiSummary{
Healthy: ready(),
Title: "Go API Template",
Message: "Welcome to the Go API Template Application!",
Build: build,
Demo: ctrl.isDemo,
return server.JSON(w, http.StatusOK, APISummary{
Healthy: ready(),
Title: "Homebox",
Message: "Track, Manage, and Organize your Things",
Build: build,
Demo: ctrl.isDemo,
AllowRegistration: ctrl.allowRegistration,
})
}
}
// HandleCurrency godoc
//
// @Summary Currency
// @Tags Base
// @Produce json
// @Success 200 {object} currencies.Currency
// @Router /v1/currency [GET]
func (ctrl *V1Controller) HandleCurrency() errchain.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
// Set Cache for 10 Minutes
w.Header().Set("Cache-Control", "max-age=600")
return server.JSON(w, http.StatusOK, ctrl.svc.Currencies.Slice())
}
}
func (ctrl *V1Controller) HandleCacheWS() errchain.HandlerFunc {
type eventMsg struct {
Event string `json:"event"`
}
m := melody.New()
m.HandleConnect(func(s *melody.Session) {
auth := services.NewContext(s.Request.Context())
s.Set("gid", auth.GID)
})
factory := func(e string) func(data any) {
return func(data any) {
eventData, ok := data.(eventbus.GroupMutationEvent)
if !ok {
log.Log().Msgf("invalid event data: %v", data)
return
}
msg := &eventMsg{Event: e}
jsonBytes, err := json.Marshal(msg)
if err != nil {
log.Log().Msgf("error marshling event data %v: %v", data, err)
return
}
_ = m.BroadcastFilter(jsonBytes, func(s *melody.Session) bool {
groupIDStr, ok := s.Get("gid")
if !ok {
return false
}
GID := groupIDStr.(uuid.UUID)
return GID == eventData.GID
})
}
}
ctrl.bus.Subscribe(eventbus.EventLabelMutation, factory("label.mutation"))
ctrl.bus.Subscribe(eventbus.EventLocationMutation, factory("location.mutation"))
ctrl.bus.Subscribe(eventbus.EventItemMutation, factory("item.mutation"))
// Persistent asynchronous ticker that keeps all websocket connections alive with periodic pings.
go func() {
const interval = 10 * time.Second
ping := time.NewTicker(interval)
defer ping.Stop()
for range ping.C {
msg := &eventMsg{Event: "ping"}
pingBytes, err := json.Marshal(msg)
if err != nil {
log.Log().Msgf("error marshaling ping: %v", err)
} else {
_ = m.Broadcast(pingBytes)
}
}
}()
return func(w http.ResponseWriter, r *http.Request) error {
return m.HandleRequest(w, r)
}
}

View file

@ -21,7 +21,7 @@ func (ctrl *V1Controller) routeID(r *http.Request) (uuid.UUID, error) {
func (ctrl *V1Controller) routeUUID(r *http.Request, key string) (uuid.UUID, error) {
ID, err := uuid.Parse(chi.URLParam(r, key))
if err != nil {
return uuid.Nil, validate.NewInvalidRouteKeyError(key)
return uuid.Nil, validate.NewRouteKeyError(key)
}
return ID, nil
}

View file

@ -0,0 +1,36 @@
package v1
import (
"net/url"
"strconv"
"github.com/google/uuid"
)
func queryUUIDList(params url.Values, key string) []uuid.UUID {
var ids []uuid.UUID
for _, id := range params[key] {
uid, err := uuid.Parse(id)
if err != nil {
continue
}
ids = append(ids, uid)
}
return ids
}
func queryIntOrNegativeOne(s string) int {
i, err := strconv.Atoi(s)
if err != nil {
return -1
}
return i
}
func queryBool(s string) bool {
b, err := strconv.ParseBool(s)
if err != nil {
return false
}
return b
}

View file

@ -0,0 +1,83 @@
package v1
import (
"context"
"net/http"
"github.com/google/uuid"
"github.com/hay-kot/homebox/backend/internal/core/services"
"github.com/hay-kot/homebox/backend/internal/sys/validate"
"github.com/hay-kot/httpkit/errchain"
"github.com/hay-kot/httpkit/server"
"github.com/rs/zerolog/log"
)
type ActionAmountResult struct {
Completed int `json:"completed"`
}
func actionHandlerFactory(ref string, fn func(context.Context, uuid.UUID) (int, error)) errchain.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
ctx := services.NewContext(r.Context())
totalCompleted, err := fn(ctx, ctx.GID)
if err != nil {
log.Err(err).Str("action_ref", ref).Msg("failed to run action")
return validate.NewRequestError(err, http.StatusInternalServerError)
}
return server.JSON(w, http.StatusOK, ActionAmountResult{Completed: totalCompleted})
}
}
// HandleEnsureAssetID godoc
//
// @Summary Ensure Asset IDs
// @Description Ensures all items in the database have an asset ID
// @Tags Actions
// @Produce json
// @Success 200 {object} ActionAmountResult
// @Router /v1/actions/ensure-asset-ids [Post]
// @Security Bearer
func (ctrl *V1Controller) HandleEnsureAssetID() errchain.HandlerFunc {
return actionHandlerFactory("ensure asset IDs", ctrl.svc.Items.EnsureAssetID)
}
// HandleEnsureImportRefs godoc
//
// @Summary Ensures Import Refs
// @Description Ensures all items in the database have an import ref
// @Tags Actions
// @Produce json
// @Success 200 {object} ActionAmountResult
// @Router /v1/actions/ensure-import-refs [Post]
// @Security Bearer
func (ctrl *V1Controller) HandleEnsureImportRefs() errchain.HandlerFunc {
return actionHandlerFactory("ensure import refs", ctrl.svc.Items.EnsureImportRef)
}
// HandleItemDateZeroOut godoc
//
// @Summary Zero Out Time Fields
// @Description Resets all item date fields to the beginning of the day
// @Tags Actions
// @Produce json
// @Success 200 {object} ActionAmountResult
// @Router /v1/actions/zero-item-time-fields [Post]
// @Security Bearer
func (ctrl *V1Controller) HandleItemDateZeroOut() errchain.HandlerFunc {
return actionHandlerFactory("zero out date time", ctrl.repo.Items.ZeroOutTimeFields)
}
// HandleSetPrimaryPhotos godoc
//
// @Summary Set Primary Photos
// @Description Sets the first photo of each item as the primary photo
// @Tags Actions
// @Produce json
// @Success 200 {object} ActionAmountResult
// @Router /v1/actions/set-primary-photos [Post]
// @Security Bearer
func (ctrl *V1Controller) HandleSetPrimaryPhotos() errchain.HandlerFunc {
return actionHandlerFactory("ensure asset IDs", ctrl.repo.Items.SetPrimaryPhotos)
}

View file

@ -0,0 +1,62 @@
package v1
import (
"net/http"
"strconv"
"strings"
"github.com/go-chi/chi/v5"
"github.com/hay-kot/homebox/backend/internal/core/services"
"github.com/hay-kot/homebox/backend/internal/data/repo"
"github.com/hay-kot/homebox/backend/internal/sys/validate"
"github.com/hay-kot/httpkit/errchain"
"github.com/hay-kot/httpkit/server"
"github.com/rs/zerolog/log"
)
// HandleAssetGet godocs
//
// @Summary Get Item by Asset ID
// @Tags Items
// @Produce json
// @Param id path string true "Asset ID"
// @Success 200 {object} repo.PaginationResult[repo.ItemSummary]{}
// @Router /v1/assets/{id} [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleAssetGet() errchain.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
ctx := services.NewContext(r.Context())
assetIDParam := chi.URLParam(r, "id")
assetIDParam = strings.ReplaceAll(assetIDParam, "-", "") // Remove dashes
// Convert the asset ID to an int64
assetID, err := strconv.ParseInt(assetIDParam, 10, 64)
if err != nil {
return err
}
pageParam := r.URL.Query().Get("page")
var page int64 = -1
if pageParam != "" {
page, err = strconv.ParseInt(pageParam, 10, 64)
if err != nil {
return server.JSON(w, http.StatusBadRequest, "Invalid page number")
}
}
pageSizeParam := r.URL.Query().Get("pageSize")
var pageSize int64 = -1
if pageSizeParam != "" {
pageSize, err = strconv.ParseInt(pageSizeParam, 10, 64)
if err != nil {
return server.JSON(w, http.StatusBadRequest, "Invalid page size")
}
}
items, err := ctrl.repo.Items.QueryByAssetID(r.Context(), ctx.GID, repo.AssetID(assetID), int(page), int(pageSize))
if err != nil {
log.Err(err).Msg("failed to get item")
return validate.NewRequestError(err, http.StatusInternalServerError)
}
return server.JSON(w, http.StatusOK, items)
}
}

View file

@ -3,92 +3,140 @@ package v1
import (
"errors"
"net/http"
"strconv"
"strings"
"time"
"github.com/hay-kot/homebox/backend/internal/core/services"
"github.com/hay-kot/homebox/backend/internal/sys/validate"
"github.com/hay-kot/homebox/backend/pkgs/server"
"github.com/hay-kot/httpkit/errchain"
"github.com/hay-kot/httpkit/server"
"github.com/rs/zerolog/log"
)
const (
cookieNameToken = "hb.auth.token"
cookieNameRemember = "hb.auth.remember"
cookieNameSession = "hb.auth.session"
)
type (
TokenResponse struct {
Token string `json:"token"`
ExpiresAt time.Time `json:"expiresAt"`
Token string `json:"token"`
ExpiresAt time.Time `json:"expiresAt"`
AttachmentToken string `json:"attachmentToken"`
}
LoginForm struct {
Username string `json:"username"`
Password string `json:"password"`
Username string `json:"username"`
Password string `json:"password"`
StayLoggedIn bool `json:"stayLoggedIn"`
}
)
type CookieContents struct {
Token string
ExpiresAt time.Time
Remember bool
}
func GetCookies(r *http.Request) (*CookieContents, error) {
cookie, err := r.Cookie(cookieNameToken)
if err != nil {
return nil, errors.New("authorization cookie is required")
}
rememberCookie, err := r.Cookie(cookieNameRemember)
if err != nil {
return nil, errors.New("remember cookie is required")
}
return &CookieContents{
Token: cookie.Value,
ExpiresAt: cookie.Expires,
Remember: rememberCookie.Value == "true",
}, nil
}
// AuthProvider is an interface that can be implemented by any authentication provider.
// to extend authentication methods for the API.
type AuthProvider interface {
// Name returns the name of the authentication provider. This should be a unique name.
// that is URL friendly.
//
// Example: "local", "ldap"
Name() string
// Authenticate is called when a user attempts to login to the API. The implementation
// should return an error if the user cannot be authenticated. If an error is returned
// the API controller will return a vague error message to the user.
//
// Authenticate should do the following:
//
// 1. Ensure that the user exists within the database (either create, or get)
// 2. On successful authentication, they must set the user cookies.
Authenticate(w http.ResponseWriter, r *http.Request) (services.UserAuthTokenDetail, error)
}
// HandleAuthLogin godoc
// @Summary User Login
// @Tags Authentication
// @Accept x-www-form-urlencoded
// @Accept application/json
// @Param username formData string false "string" example(admin@admin.com)
// @Param password formData string false "string" example(admin)
// @Produce json
// @Success 200 {object} TokenResponse
// @Router /v1/users/login [POST]
func (ctrl *V1Controller) HandleAuthLogin() server.HandlerFunc {
//
// @Summary User Login
// @Tags Authentication
// @Accept x-www-form-urlencoded
// @Accept application/json
// @Param username formData string false "string" example(admin@admin.com)
// @Param password formData string false "string" example(admin)
// @Param payload body LoginForm true "Login Data"
// @Param provider query string false "auth provider"
// @Produce json
// @Success 200 {object} TokenResponse
// @Router /v1/users/login [POST]
func (ctrl *V1Controller) HandleAuthLogin(ps ...AuthProvider) errchain.HandlerFunc {
if len(ps) == 0 {
panic("no auth providers provided")
}
providers := make(map[string]AuthProvider)
for _, p := range ps {
log.Info().Str("name", p.Name()).Msg("registering auth provider")
providers[p.Name()] = p
}
return func(w http.ResponseWriter, r *http.Request) error {
loginForm := &LoginForm{}
switch r.Header.Get("Content-Type") {
case server.ContentFormUrlEncoded:
err := r.ParseForm()
if err != nil {
return server.Respond(w, http.StatusBadRequest, server.Wrap(err))
}
loginForm.Username = r.PostFormValue("username")
loginForm.Password = r.PostFormValue("password")
case server.ContentJSON:
err := server.Decode(r, loginForm)
if err != nil {
log.Err(err).Msg("failed to decode login form")
}
default:
return server.Respond(w, http.StatusBadRequest, errors.New("invalid content type"))
// Extract provider query
provider := r.URL.Query().Get("provider")
if provider == "" {
provider = "local"
}
if loginForm.Username == "" || loginForm.Password == "" {
return validate.NewFieldErrors(
validate.FieldError{
Field: "username",
Error: "username or password is empty",
},
validate.FieldError{
Field: "password",
Error: "username or password is empty",
},
)
// Get the provider
p, ok := providers[provider]
if !ok {
return validate.NewRequestError(errors.New("invalid auth provider"), http.StatusBadRequest)
}
newToken, err := ctrl.svc.User.Login(r.Context(), loginForm.Username, loginForm.Password)
newToken, err := p.Authenticate(w, r)
if err != nil {
return validate.NewRequestError(errors.New("authentication failed"), http.StatusInternalServerError)
log.Err(err).Msg("failed to authenticate")
return server.JSON(w, http.StatusInternalServerError, err.Error())
}
return server.Respond(w, http.StatusOK, TokenResponse{
Token: "Bearer " + newToken.Raw,
ExpiresAt: newToken.ExpiresAt,
ctrl.setCookies(w, noPort(r.Host), newToken.Raw, newToken.ExpiresAt, true)
return server.JSON(w, http.StatusOK, TokenResponse{
Token: "Bearer " + newToken.Raw,
ExpiresAt: newToken.ExpiresAt,
AttachmentToken: newToken.AttachmentToken,
})
}
}
// HandleAuthLogout godoc
// @Summary User Logout
// @Tags Authentication
// @Success 204
// @Router /v1/users/logout [POST]
// @Security Bearer
func (ctrl *V1Controller) HandleAuthLogout() server.HandlerFunc {
//
// @Summary User Logout
// @Tags Authentication
// @Success 204
// @Router /v1/users/logout [POST]
// @Security Bearer
func (ctrl *V1Controller) HandleAuthLogout() errchain.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
token := services.UseTokenCtx(r.Context())
if token == "" {
@ -100,19 +148,21 @@ func (ctrl *V1Controller) HandleAuthLogout() server.HandlerFunc {
return validate.NewRequestError(err, http.StatusInternalServerError)
}
return server.Respond(w, http.StatusNoContent, nil)
ctrl.unsetCookies(w, noPort(r.Host))
return server.JSON(w, http.StatusNoContent, nil)
}
}
// HandleAuthLogout godoc
// @Summary User Token Refresh
// @Description handleAuthRefresh returns a handler that will issue a new token from an existing token.
// @Description This does not validate that the user still exists within the database.
// @Tags Authentication
// @Success 200
// @Router /v1/users/refresh [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleAuthRefresh() server.HandlerFunc {
// HandleAuthRefresh godoc
//
// @Summary User Token Refresh
// @Description handleAuthRefresh returns a handler that will issue a new token from an existing token.
// @Description This does not validate that the user still exists within the database.
// @Tags Authentication
// @Success 200
// @Router /v1/users/refresh [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleAuthRefresh() errchain.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
requestToken := services.UseTokenCtx(r.Context())
if requestToken == "" {
@ -124,6 +174,78 @@ func (ctrl *V1Controller) HandleAuthRefresh() server.HandlerFunc {
return validate.NewUnauthorizedError()
}
return server.Respond(w, http.StatusOK, newToken)
ctrl.setCookies(w, noPort(r.Host), newToken.Raw, newToken.ExpiresAt, false)
return server.JSON(w, http.StatusOK, newToken)
}
}
func noPort(host string) string {
return strings.Split(host, ":")[0]
}
func (ctrl *V1Controller) setCookies(w http.ResponseWriter, domain, token string, expires time.Time, remember bool) {
http.SetCookie(w, &http.Cookie{
Name: cookieNameRemember,
Value: strconv.FormatBool(remember),
Expires: expires,
Domain: domain,
Secure: ctrl.cookieSecure,
HttpOnly: true,
Path: "/",
})
// Set HTTP only cookie
http.SetCookie(w, &http.Cookie{
Name: cookieNameToken,
Value: token,
Expires: expires,
Domain: domain,
Secure: ctrl.cookieSecure,
HttpOnly: true,
Path: "/",
})
// Set Fake Session cookie
http.SetCookie(w, &http.Cookie{
Name: cookieNameSession,
Value: "true",
Expires: expires,
Domain: domain,
Secure: ctrl.cookieSecure,
HttpOnly: false,
Path: "/",
})
}
func (ctrl *V1Controller) unsetCookies(w http.ResponseWriter, domain string) {
http.SetCookie(w, &http.Cookie{
Name: cookieNameToken,
Value: "",
Expires: time.Unix(0, 0),
Domain: domain,
Secure: ctrl.cookieSecure,
HttpOnly: true,
Path: "/",
})
http.SetCookie(w, &http.Cookie{
Name: cookieNameRemember,
Value: "false",
Expires: time.Unix(0, 0),
Domain: domain,
Secure: ctrl.cookieSecure,
HttpOnly: true,
Path: "/",
})
// Set Fake Session cookie
http.SetCookie(w, &http.Cookie{
Name: cookieNameSession,
Value: "false",
Expires: time.Unix(0, 0),
Domain: domain,
Secure: ctrl.cookieSecure,
HttpOnly: false,
Path: "/",
})
}

View file

@ -7,13 +7,13 @@ import (
"github.com/hay-kot/homebox/backend/internal/core/services"
"github.com/hay-kot/homebox/backend/internal/data/repo"
"github.com/hay-kot/homebox/backend/internal/sys/validate"
"github.com/hay-kot/homebox/backend/pkgs/server"
"github.com/rs/zerolog/log"
"github.com/hay-kot/homebox/backend/internal/web/adapters"
"github.com/hay-kot/httpkit/errchain"
)
type (
GroupInvitationCreate struct {
Uses int `json:"uses"`
Uses int `json:"uses" validate:"required,min=1,max=100"`
ExpiresAt time.Time `json:"expiresAt"`
}
@ -25,113 +25,73 @@ type (
)
// HandleGroupGet godoc
// @Summary Get the current user's group
// @Tags Group
// @Produce json
// @Success 200 {object} repo.GroupStatistics
// @Router /v1/groups/statistics [Get]
// @Security Bearer
func (ctrl *V1Controller) HandleGroupStatistics() server.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
ctx := services.NewContext(r.Context())
stats, err := ctrl.repo.Groups.GroupStatistics(ctx, ctx.GID)
if err != nil {
return validate.NewRequestError(err, http.StatusInternalServerError)
}
return server.Respond(w, http.StatusOK, stats)
//
// @Summary Get Group
// @Tags Group
// @Produce json
// @Success 200 {object} repo.Group
// @Router /v1/groups [Get]
// @Security Bearer
func (ctrl *V1Controller) HandleGroupGet() errchain.HandlerFunc {
fn := func(r *http.Request) (repo.Group, error) {
auth := services.NewContext(r.Context())
return ctrl.repo.Groups.GroupByID(auth, auth.GID)
}
}
// HandleGroupGet godoc
// @Summary Get the current user's group
// @Tags Group
// @Produce json
// @Success 200 {object} repo.Group
// @Router /v1/groups [Get]
// @Security Bearer
func (ctrl *V1Controller) HandleGroupGet() server.HandlerFunc {
return ctrl.handleGroupGeneral()
return adapters.Command(fn, http.StatusOK)
}
// HandleGroupUpdate godoc
// @Summary Updates some fields of the current users group
// @Tags Group
// @Produce json
// @Param payload body repo.GroupUpdate true "User Data"
// @Success 200 {object} repo.Group
// @Router /v1/groups [Put]
// @Security Bearer
func (ctrl *V1Controller) HandleGroupUpdate() server.HandlerFunc {
return ctrl.handleGroupGeneral()
}
//
// @Summary Update Group
// @Tags Group
// @Produce json
// @Param payload body repo.GroupUpdate true "User Data"
// @Success 200 {object} repo.Group
// @Router /v1/groups [Put]
// @Security Bearer
func (ctrl *V1Controller) HandleGroupUpdate() errchain.HandlerFunc {
fn := func(r *http.Request, body repo.GroupUpdate) (repo.Group, error) {
auth := services.NewContext(r.Context())
func (ctrl *V1Controller) handleGroupGeneral() server.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
ctx := services.NewContext(r.Context())
switch r.Method {
case http.MethodGet:
group, err := ctrl.repo.Groups.GroupByID(ctx, ctx.GID)
if err != nil {
log.Err(err).Msg("failed to get group")
return validate.NewRequestError(err, http.StatusInternalServerError)
}
return server.Respond(w, http.StatusOK, group)
case http.MethodPut:
data := repo.GroupUpdate{}
if err := server.Decode(r, &data); err != nil {
return validate.NewRequestError(err, http.StatusBadRequest)
}
group, err := ctrl.svc.Group.UpdateGroup(ctx, data)
if err != nil {
log.Err(err).Msg("failed to update group")
return validate.NewRequestError(err, http.StatusInternalServerError)
}
return server.Respond(w, http.StatusOK, group)
ok := ctrl.svc.Currencies.IsSupported(body.Currency)
if !ok {
return repo.Group{}, validate.NewFieldErrors(
validate.NewFieldError("currency", "currency '"+body.Currency+"' is not supported"),
)
}
return nil
return ctrl.svc.Group.UpdateGroup(auth, body)
}
return adapters.Action(fn, http.StatusOK)
}
// HandleGroupInvitationsCreate godoc
// @Summary Get the current user
// @Tags Group
// @Produce json
// @Param payload body GroupInvitationCreate true "User Data"
// @Success 200 {object} GroupInvitation
// @Router /v1/groups/invitations [Post]
// @Security Bearer
func (ctrl *V1Controller) HandleGroupInvitationsCreate() server.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
data := GroupInvitationCreate{}
if err := server.Decode(r, &data); err != nil {
log.Err(err).Msg("failed to decode user registration data")
return validate.NewRequestError(err, http.StatusBadRequest)
//
// @Summary Create Group Invitation
// @Tags Group
// @Produce json
// @Param payload body GroupInvitationCreate true "User Data"
// @Success 200 {object} GroupInvitation
// @Router /v1/groups/invitations [Post]
// @Security Bearer
func (ctrl *V1Controller) HandleGroupInvitationsCreate() errchain.HandlerFunc {
fn := func(r *http.Request, body GroupInvitationCreate) (GroupInvitation, error) {
if body.ExpiresAt.IsZero() {
body.ExpiresAt = time.Now().Add(time.Hour * 24)
}
if data.ExpiresAt.IsZero() {
data.ExpiresAt = time.Now().Add(time.Hour * 24)
}
auth := services.NewContext(r.Context())
ctx := services.NewContext(r.Context())
token, err := ctrl.svc.Group.NewInvitation(auth, body.Uses, body.ExpiresAt)
token, err := ctrl.svc.Group.NewInvitation(ctx, data.Uses, data.ExpiresAt)
if err != nil {
log.Err(err).Msg("failed to create new token")
return validate.NewRequestError(err, http.StatusInternalServerError)
}
return server.Respond(w, http.StatusCreated, GroupInvitation{
return GroupInvitation{
Token: token,
ExpiresAt: data.ExpiresAt,
Uses: data.Uses,
})
ExpiresAt: body.ExpiresAt,
Uses: body.Uses,
}, err
}
return adapters.Action(fn, http.StatusCreated)
}

View file

@ -1,201 +1,293 @@
package v1
import (
"database/sql"
"encoding/csv"
"errors"
"net/http"
"net/url"
"strconv"
"strings"
"github.com/google/uuid"
"github.com/hay-kot/homebox/backend/internal/core/services"
"github.com/hay-kot/homebox/backend/internal/data/repo"
"github.com/hay-kot/homebox/backend/internal/sys/validate"
"github.com/hay-kot/homebox/backend/pkgs/server"
"github.com/hay-kot/homebox/backend/internal/web/adapters"
"github.com/hay-kot/httpkit/errchain"
"github.com/hay-kot/httpkit/server"
"github.com/rs/zerolog/log"
)
// HandleItemsGetAll godoc
// @Summary Get All Items
// @Tags Items
// @Produce json
// @Param q query string false "search string"
// @Param page query int false "page number"
// @Param pageSize query int false "items per page"
// @Param labels query []string false "label Ids" collectionFormat(multi)
// @Param locations query []string false "location Ids" collectionFormat(multi)
// @Success 200 {object} repo.PaginationResult[repo.ItemSummary]{}
// @Router /v1/items [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleItemsGetAll() server.HandlerFunc {
uuidList := func(params url.Values, key string) []uuid.UUID {
var ids []uuid.UUID
for _, id := range params[key] {
uid, err := uuid.Parse(id)
if err != nil {
continue
}
ids = append(ids, uid)
}
return ids
}
intOrNegativeOne := func(s string) int {
i, err := strconv.Atoi(s)
if err != nil {
return -1
}
return i
}
getBool := func(s string) bool {
b, err := strconv.ParseBool(s)
if err != nil {
return false
}
return b
}
//
// @Summary Query All Items
// @Tags Items
// @Produce json
// @Param q query string false "search string"
// @Param page query int false "page number"
// @Param pageSize query int false "items per page"
// @Param labels query []string false "label Ids" collectionFormat(multi)
// @Param locations query []string false "location Ids" collectionFormat(multi)
// @Param parentIds query []string false "parent Ids" collectionFormat(multi)
// @Success 200 {object} repo.PaginationResult[repo.ItemSummary]{}
// @Router /v1/items [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleItemsGetAll() errchain.HandlerFunc {
extractQuery := func(r *http.Request) repo.ItemQuery {
params := r.URL.Query()
return repo.ItemQuery{
Page: intOrNegativeOne(params.Get("page")),
PageSize: intOrNegativeOne(params.Get("perPage")),
Search: params.Get("q"),
LocationIDs: uuidList(params, "locations"),
LabelIDs: uuidList(params, "labels"),
IncludeArchived: getBool(params.Get("includeArchived")),
filterFieldItems := func(raw []string) []repo.FieldQuery {
var items []repo.FieldQuery
for _, v := range raw {
parts := strings.SplitN(v, "=", 2)
if len(parts) == 2 {
items = append(items, repo.FieldQuery{
Name: parts[0],
Value: parts[1],
})
}
}
return items
}
v := repo.ItemQuery{
Page: queryIntOrNegativeOne(params.Get("page")),
PageSize: queryIntOrNegativeOne(params.Get("pageSize")),
Search: params.Get("q"),
LocationIDs: queryUUIDList(params, "locations"),
LabelIDs: queryUUIDList(params, "labels"),
ParentItemIDs: queryUUIDList(params, "parentIds"),
IncludeArchived: queryBool(params.Get("includeArchived")),
Fields: filterFieldItems(params["fields"]),
OrderBy: params.Get("orderBy"),
}
if strings.HasPrefix(v.Search, "#") {
aidStr := strings.TrimPrefix(v.Search, "#")
aid, ok := repo.ParseAssetID(aidStr)
if ok {
v.Search = ""
v.AssetID = aid
}
}
return v
}
return func(w http.ResponseWriter, r *http.Request) error {
ctx := services.NewContext(r.Context())
items, err := ctrl.repo.Items.QueryByGroup(ctx, ctx.GID, extractQuery(r))
if err != nil {
if errors.Is(err, sql.ErrNoRows) {
return server.JSON(w, http.StatusOK, repo.PaginationResult[repo.ItemSummary]{
Items: []repo.ItemSummary{},
})
}
log.Err(err).Msg("failed to get items")
return validate.NewRequestError(err, http.StatusInternalServerError)
}
return server.Respond(w, http.StatusOK, items)
return server.JSON(w, http.StatusOK, items)
}
}
// HandleItemFullPath godoc
//
// @Summary Get the full path of an item
// @Tags Items
// @Produce json
// @Param id path string true "Item ID"
// @Success 200 {object} []repo.ItemPath
// @Router /v1/items/{id}/path [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleItemFullPath() errchain.HandlerFunc {
fn := func(r *http.Request, ID uuid.UUID) ([]repo.ItemPath, error) {
auth := services.NewContext(r.Context())
item, err := ctrl.repo.Items.GetOneByGroup(auth, auth.GID, ID)
if err != nil {
return nil, err
}
paths, err := ctrl.repo.Locations.PathForLoc(auth, auth.GID, item.Location.ID)
if err != nil {
return nil, err
}
if item.Parent != nil {
paths = append(paths, repo.ItemPath{
Type: repo.ItemTypeItem,
ID: item.Parent.ID,
Name: item.Parent.Name,
})
}
paths = append(paths, repo.ItemPath{
Type: repo.ItemTypeItem,
ID: item.ID,
Name: item.Name,
})
return paths, nil
}
return adapters.CommandID("id", fn, http.StatusOK)
}
// HandleItemsCreate godoc
// @Summary Create a new item
// @Tags Items
// @Produce json
// @Param payload body repo.ItemCreate true "Item Data"
// @Success 200 {object} repo.ItemSummary
// @Router /v1/items [POST]
// @Security Bearer
func (ctrl *V1Controller) HandleItemsCreate() server.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
createData := repo.ItemCreate{}
if err := server.Decode(r, &createData); err != nil {
log.Err(err).Msg("failed to decode request body")
return validate.NewRequestError(err, http.StatusInternalServerError)
}
user := services.UseUserCtx(r.Context())
item, err := ctrl.repo.Items.Create(r.Context(), user.GroupID, createData)
if err != nil {
log.Err(err).Msg("failed to create item")
return validate.NewRequestError(err, http.StatusInternalServerError)
}
return server.Respond(w, http.StatusCreated, item)
//
// @Summary Create Item
// @Tags Items
// @Produce json
// @Param payload body repo.ItemCreate true "Item Data"
// @Success 201 {object} repo.ItemSummary
// @Router /v1/items [POST]
// @Security Bearer
func (ctrl *V1Controller) HandleItemsCreate() errchain.HandlerFunc {
fn := func(r *http.Request, body repo.ItemCreate) (repo.ItemOut, error) {
return ctrl.svc.Items.Create(services.NewContext(r.Context()), body)
}
return adapters.Action(fn, http.StatusCreated)
}
// HandleItemGet godocs
// @Summary Gets a item and fields
// @Tags Items
// @Produce json
// @Param id path string true "Item ID"
// @Success 200 {object} repo.ItemOut
// @Router /v1/items/{id} [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleItemGet() server.HandlerFunc {
return ctrl.handleItemsGeneral()
//
// @Summary Get Item
// @Tags Items
// @Produce json
// @Param id path string true "Item ID"
// @Success 200 {object} repo.ItemOut
// @Router /v1/items/{id} [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleItemGet() errchain.HandlerFunc {
fn := func(r *http.Request, ID uuid.UUID) (repo.ItemOut, error) {
auth := services.NewContext(r.Context())
return ctrl.repo.Items.GetOneByGroup(auth, auth.GID, ID)
}
return adapters.CommandID("id", fn, http.StatusOK)
}
// HandleItemDelete godocs
// @Summary deletes a item
// @Tags Items
// @Produce json
// @Param id path string true "Item ID"
// @Success 204
// @Router /v1/items/{id} [DELETE]
// @Security Bearer
func (ctrl *V1Controller) HandleItemDelete() server.HandlerFunc {
return ctrl.handleItemsGeneral()
//
// @Summary Delete Item
// @Tags Items
// @Produce json
// @Param id path string true "Item ID"
// @Success 204
// @Router /v1/items/{id} [DELETE]
// @Security Bearer
func (ctrl *V1Controller) HandleItemDelete() errchain.HandlerFunc {
fn := func(r *http.Request, ID uuid.UUID) (any, error) {
auth := services.NewContext(r.Context())
err := ctrl.repo.Items.DeleteByGroup(auth, auth.GID, ID)
return nil, err
}
return adapters.CommandID("id", fn, http.StatusNoContent)
}
// HandleItemUpdate godocs
// @Summary updates a item
// @Tags Items
// @Produce json
// @Param id path string true "Item ID"
// @Param payload body repo.ItemUpdate true "Item Data"
// @Success 200 {object} repo.ItemOut
// @Router /v1/items/{id} [PUT]
// @Security Bearer
func (ctrl *V1Controller) HandleItemUpdate() server.HandlerFunc {
return ctrl.handleItemsGeneral()
//
// @Summary Update Item
// @Tags Items
// @Produce json
// @Param id path string true "Item ID"
// @Param payload body repo.ItemUpdate true "Item Data"
// @Success 200 {object} repo.ItemOut
// @Router /v1/items/{id} [PUT]
// @Security Bearer
func (ctrl *V1Controller) HandleItemUpdate() errchain.HandlerFunc {
fn := func(r *http.Request, ID uuid.UUID, body repo.ItemUpdate) (repo.ItemOut, error) {
auth := services.NewContext(r.Context())
body.ID = ID
return ctrl.repo.Items.UpdateByGroup(auth, auth.GID, body)
}
return adapters.ActionID("id", fn, http.StatusOK)
}
func (ctrl *V1Controller) handleItemsGeneral() server.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
ctx := services.NewContext(r.Context())
ID, err := ctrl.routeID(r)
// HandleItemPatch godocs
//
// @Summary Update Item
// @Tags Items
// @Produce json
// @Param id path string true "Item ID"
// @Param payload body repo.ItemPatch true "Item Data"
// @Success 200 {object} repo.ItemOut
// @Router /v1/items/{id} [Patch]
// @Security Bearer
func (ctrl *V1Controller) HandleItemPatch() errchain.HandlerFunc {
fn := func(r *http.Request, ID uuid.UUID, body repo.ItemPatch) (repo.ItemOut, error) {
auth := services.NewContext(r.Context())
body.ID = ID
err := ctrl.repo.Items.Patch(auth, auth.GID, ID, body)
if err != nil {
return err
return repo.ItemOut{}, err
}
switch r.Method {
case http.MethodGet:
items, err := ctrl.repo.Items.GetOneByGroup(r.Context(), ctx.GID, ID)
if err != nil {
log.Err(err).Msg("failed to get item")
return validate.NewRequestError(err, http.StatusInternalServerError)
}
return server.Respond(w, http.StatusOK, items)
case http.MethodDelete:
err = ctrl.repo.Items.DeleteByGroup(r.Context(), ctx.GID, ID)
if err != nil {
log.Err(err).Msg("failed to delete item")
return validate.NewRequestError(err, http.StatusInternalServerError)
}
return server.Respond(w, http.StatusNoContent, nil)
case http.MethodPut:
body := repo.ItemUpdate{}
if err := server.Decode(r, &body); err != nil {
log.Err(err).Msg("failed to decode request body")
return validate.NewRequestError(err, http.StatusInternalServerError)
}
body.ID = ID
result, err := ctrl.repo.Items.UpdateByGroup(r.Context(), ctx.GID, body)
if err != nil {
log.Err(err).Msg("failed to update item")
return validate.NewRequestError(err, http.StatusInternalServerError)
}
return server.Respond(w, http.StatusOK, result)
}
return nil
return ctrl.repo.Items.GetOneByGroup(auth, auth.GID, ID)
}
return adapters.ActionID("id", fn, http.StatusOK)
}
// HandleGetAllCustomFieldNames godocs
//
// @Summary Get All Custom Field Names
// @Tags Items
// @Produce json
// @Success 200
// @Router /v1/items/fields [GET]
// @Success 200 {object} []string
// @Security Bearer
func (ctrl *V1Controller) HandleGetAllCustomFieldNames() errchain.HandlerFunc {
fn := func(r *http.Request) ([]string, error) {
auth := services.NewContext(r.Context())
return ctrl.repo.Items.GetAllCustomFieldNames(auth, auth.GID)
}
return adapters.Command(fn, http.StatusOK)
}
// HandleGetAllCustomFieldValues godocs
//
// @Summary Get All Custom Field Values
// @Tags Items
// @Produce json
// @Success 200
// @Router /v1/items/fields/values [GET]
// @Success 200 {object} []string
// @Security Bearer
func (ctrl *V1Controller) HandleGetAllCustomFieldValues() errchain.HandlerFunc {
type query struct {
Field string `schema:"field" validate:"required"`
}
fn := func(r *http.Request, q query) ([]string, error) {
auth := services.NewContext(r.Context())
return ctrl.repo.Items.GetAllCustomFieldValues(auth, auth.GID, q.Field)
}
return adapters.Query(fn, http.StatusOK)
}
// HandleItemsImport godocs
// @Summary imports items into the database
// @Tags Items
// @Produce json
// @Success 204
// @Param csv formData file true "Image to upload"
// @Router /v1/items/import [Post]
// @Security Bearer
func (ctrl *V1Controller) HandleItemsImport() server.HandlerFunc {
//
// @Summary Import Items
// @Tags Items
// @Produce json
// @Success 204
// @Param csv formData file true "Image to upload"
// @Router /v1/items/import [Post]
// @Security Bearer
func (ctrl *V1Controller) HandleItemsImport() errchain.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
err := r.ParseMultipartForm(ctrl.maxUploadSize << 20)
if err != nil {
log.Err(err).Msg("failed to parse multipart form")
@ -208,21 +300,40 @@ func (ctrl *V1Controller) HandleItemsImport() server.HandlerFunc {
return validate.NewRequestError(err, http.StatusInternalServerError)
}
reader := csv.NewReader(file)
data, err := reader.ReadAll()
if err != nil {
log.Err(err).Msg("failed to read csv")
return validate.NewRequestError(err, http.StatusInternalServerError)
}
user := services.UseUserCtx(r.Context())
_, err = ctrl.svc.Items.CsvImport(r.Context(), user.GroupID, data)
_, err = ctrl.svc.Items.CsvImport(r.Context(), user.GroupID, file)
if err != nil {
log.Err(err).Msg("failed to import items")
return validate.NewRequestError(err, http.StatusInternalServerError)
}
return server.Respond(w, http.StatusNoContent, nil)
return server.JSON(w, http.StatusNoContent, nil)
}
}
// HandleItemsExport godocs
//
// @Summary Export Items
// @Tags Items
// @Success 200 {string} string "text/csv"
// @Router /v1/items/export [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleItemsExport() errchain.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
ctx := services.NewContext(r.Context())
csvData, err := ctrl.svc.Items.ExportTSV(r.Context(), ctx.GID)
if err != nil {
log.Err(err).Msg("failed to export items")
return validate.NewRequestError(err, http.StatusInternalServerError)
}
w.Header().Set("Content-Type", "text/tsv")
w.Header().Set("Content-Disposition", "attachment;filename=homebox-items.tsv")
writer := csv.NewWriter(w)
writer.Comma = '\t'
return writer.WriteAll(csvData)
}
}

View file

@ -2,14 +2,16 @@ package v1
import (
"errors"
"fmt"
"net/http"
"path/filepath"
"strings"
"github.com/hay-kot/homebox/backend/internal/core/services"
"github.com/hay-kot/homebox/backend/internal/data/ent/attachment"
"github.com/hay-kot/homebox/backend/internal/data/repo"
"github.com/hay-kot/homebox/backend/internal/sys/validate"
"github.com/hay-kot/homebox/backend/pkgs/server"
"github.com/hay-kot/httpkit/errchain"
"github.com/hay-kot/httpkit/server"
"github.com/rs/zerolog/log"
)
@ -19,25 +21,25 @@ type (
}
)
// HandleItemsImport godocs
// @Summary imports items into the database
// @Tags Items Attachments
// @Produce json
// @Param id path string true "Item ID"
// @Param file formData file true "File attachment"
// @Param type formData string true "Type of file"
// @Param name formData string true "name of the file including extension"
// @Success 200 {object} repo.ItemOut
// @Failure 422 {object} server.ErrorResponse
// @Router /v1/items/{id}/attachments [POST]
// @Security Bearer
func (ctrl *V1Controller) HandleItemAttachmentCreate() server.HandlerFunc {
// HandleItemAttachmentCreate godocs
//
// @Summary Create Item Attachment
// @Tags Items Attachments
// @Produce json
// @Param id path string true "Item ID"
// @Param file formData file true "File attachment"
// @Param type formData string true "Type of file"
// @Param name formData string true "name of the file including extension"
// @Success 200 {object} repo.ItemOut
// @Failure 422 {object} validate.ErrorResponse
// @Router /v1/items/{id}/attachments [POST]
// @Security Bearer
func (ctrl *V1Controller) HandleItemAttachmentCreate() errchain.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
err := r.ParseMultipartForm(ctrl.maxUploadSize << 20)
if err != nil {
log.Err(err).Msg("failed to parse multipart form")
return validate.NewRequestError(errors.New("failed to parse multipart form"), http.StatusBadRequest)
}
errs := validate.NewFieldErrors()
@ -61,12 +63,20 @@ func (ctrl *V1Controller) HandleItemAttachmentCreate() server.HandlerFunc {
}
if !errs.Nil() {
return server.Respond(w, http.StatusUnprocessableEntity, errs)
return server.JSON(w, http.StatusUnprocessableEntity, errs)
}
attachmentType := r.FormValue("type")
if attachmentType == "" {
attachmentType = attachment.TypeAttachment.String()
// Attempt to auto-detect the type of the file
ext := filepath.Ext(attachmentName)
switch strings.ToLower(ext) {
case ".jpg", ".jpeg", ".png", ".webp", ".gif", ".bmp", ".tiff":
attachmentType = attachment.TypePhoto.String()
default:
attachmentType = attachment.TypeAttachment.String()
}
}
id, err := ctrl.routeID(r)
@ -83,78 +93,53 @@ func (ctrl *V1Controller) HandleItemAttachmentCreate() server.HandlerFunc {
attachment.Type(attachmentType),
file,
)
if err != nil {
log.Err(err).Msg("failed to add attachment")
return validate.NewRequestError(err, http.StatusInternalServerError)
}
return server.Respond(w, http.StatusCreated, item)
return server.JSON(w, http.StatusCreated, item)
}
}
// HandleItemAttachmentGet godocs
// @Summary retrieves an attachment for an item
// @Tags Items Attachments
// @Produce application/octet-stream
// @Param id path string true "Item ID"
// @Param token query string true "Attachment token"
// @Success 200
// @Router /v1/items/{id}/attachments/download [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleItemAttachmentDownload() server.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
token := server.GetParam(r, "token", "")
doc, err := ctrl.svc.Items.AttachmentPath(r.Context(), token)
if err != nil {
log.Err(err).Msg("failed to get attachment")
return validate.NewRequestError(err, http.StatusInternalServerError)
}
w.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", doc.Title))
w.Header().Set("Content-Type", "application/octet-stream")
http.ServeFile(w, r, doc.Path)
return nil
}
}
// HandleItemAttachmentToken godocs
// @Summary retrieves an attachment for an item
// @Tags Items Attachments
// @Produce application/octet-stream
// @Param id path string true "Item ID"
// @Param attachment_id path string true "Attachment ID"
// @Success 200 {object} ItemAttachmentToken
// @Router /v1/items/{id}/attachments/{attachment_id} [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleItemAttachmentToken() server.HandlerFunc {
//
// @Summary Get Item Attachment
// @Tags Items Attachments
// @Produce application/octet-stream
// @Param id path string true "Item ID"
// @Param attachment_id path string true "Attachment ID"
// @Success 200 {object} ItemAttachmentToken
// @Router /v1/items/{id}/attachments/{attachment_id} [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleItemAttachmentGet() errchain.HandlerFunc {
return ctrl.handleItemAttachmentsHandler
}
// HandleItemAttachmentDelete godocs
// @Summary retrieves an attachment for an item
// @Tags Items Attachments
// @Param id path string true "Item ID"
// @Param attachment_id path string true "Attachment ID"
// @Success 204
// @Router /v1/items/{id}/attachments/{attachment_id} [DELETE]
// @Security Bearer
func (ctrl *V1Controller) HandleItemAttachmentDelete() server.HandlerFunc {
//
// @Summary Delete Item Attachment
// @Tags Items Attachments
// @Param id path string true "Item ID"
// @Param attachment_id path string true "Attachment ID"
// @Success 204
// @Router /v1/items/{id}/attachments/{attachment_id} [DELETE]
// @Security Bearer
func (ctrl *V1Controller) HandleItemAttachmentDelete() errchain.HandlerFunc {
return ctrl.handleItemAttachmentsHandler
}
// HandleItemAttachmentUpdate godocs
// @Summary retrieves an attachment for an item
// @Tags Items Attachments
// @Param id path string true "Item ID"
// @Param attachment_id path string true "Attachment ID"
// @Param payload body repo.ItemAttachmentUpdate true "Attachment Update"
// @Success 200 {object} repo.ItemOut
// @Router /v1/items/{id}/attachments/{attachment_id} [PUT]
// @Security Bearer
func (ctrl *V1Controller) HandleItemAttachmentUpdate() server.HandlerFunc {
//
// @Summary Update Item Attachment
// @Tags Items Attachments
// @Param id path string true "Item ID"
// @Param attachment_id path string true "Attachment ID"
// @Param payload body repo.ItemAttachmentUpdate true "Attachment Update"
// @Success 200 {object} repo.ItemOut
// @Router /v1/items/{id}/attachments/{attachment_id} [PUT]
// @Security Bearer
func (ctrl *V1Controller) HandleItemAttachmentUpdate() errchain.HandlerFunc {
return ctrl.handleItemAttachmentsHandler
}
@ -171,33 +156,15 @@ func (ctrl *V1Controller) handleItemAttachmentsHandler(w http.ResponseWriter, r
ctx := services.NewContext(r.Context())
switch r.Method {
// Token Handler
case http.MethodGet:
token, err := ctrl.svc.Items.AttachmentToken(ctx, ID, attachmentID)
doc, err := ctrl.svc.Items.AttachmentPath(r.Context(), attachmentID)
if err != nil {
switch err {
case services.ErrNotFound:
log.Err(err).
Str("id", attachmentID.String()).
Msg("failed to find attachment with id")
return validate.NewRequestError(err, http.StatusNotFound)
case services.ErrFileNotFound:
log.Err(err).
Str("id", attachmentID.String()).
Msg("failed to find file path for attachment with id")
log.Warn().Msg("attachment with no file path removed from database")
return validate.NewRequestError(err, http.StatusNotFound)
default:
log.Err(err).Msg("failed to get attachment")
return validate.NewRequestError(err, http.StatusInternalServerError)
}
log.Err(err).Msg("failed to get attachment path")
return validate.NewRequestError(err, http.StatusInternalServerError)
}
return server.Respond(w, http.StatusOK, ItemAttachmentToken{Token: token})
http.ServeFile(w, r, doc.Path)
return nil
// Delete Attachment Handler
case http.MethodDelete:
@ -207,7 +174,7 @@ func (ctrl *V1Controller) handleItemAttachmentsHandler(w http.ResponseWriter, r
return validate.NewRequestError(err, http.StatusInternalServerError)
}
return server.Respond(w, http.StatusNoContent, nil)
return server.JSON(w, http.StatusNoContent, nil)
// Update Attachment Handler
case http.MethodPut:
@ -225,7 +192,7 @@ func (ctrl *V1Controller) handleItemAttachmentsHandler(w http.ResponseWriter, r
return validate.NewRequestError(err, http.StatusInternalServerError)
}
return server.Respond(w, http.StatusOK, val)
return server.JSON(w, http.StatusOK, val)
}
return nil

View file

@ -3,141 +3,100 @@ package v1
import (
"net/http"
"github.com/google/uuid"
"github.com/hay-kot/homebox/backend/internal/core/services"
"github.com/hay-kot/homebox/backend/internal/data/ent"
"github.com/hay-kot/homebox/backend/internal/data/repo"
"github.com/hay-kot/homebox/backend/internal/sys/validate"
"github.com/hay-kot/homebox/backend/pkgs/server"
"github.com/rs/zerolog/log"
"github.com/hay-kot/homebox/backend/internal/web/adapters"
"github.com/hay-kot/httpkit/errchain"
)
// HandleLabelsGetAll godoc
// @Summary Get All Labels
// @Tags Labels
// @Produce json
// @Success 200 {object} server.Results{items=[]repo.LabelOut}
// @Router /v1/labels [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleLabelsGetAll() server.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
user := services.UseUserCtx(r.Context())
labels, err := ctrl.repo.Labels.GetAll(r.Context(), user.GroupID)
if err != nil {
log.Err(err).Msg("error getting labels")
return validate.NewRequestError(err, http.StatusInternalServerError)
}
return server.Respond(w, http.StatusOK, server.Results{Items: labels})
//
// @Summary Get All Labels
// @Tags Labels
// @Produce json
// @Success 200 {object} []repo.LabelOut
// @Router /v1/labels [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleLabelsGetAll() errchain.HandlerFunc {
fn := func(r *http.Request) ([]repo.LabelSummary, error) {
auth := services.NewContext(r.Context())
return ctrl.repo.Labels.GetAll(auth, auth.GID)
}
return adapters.Command(fn, http.StatusOK)
}
// HandleLabelsCreate godoc
// @Summary Create a new label
// @Tags Labels
// @Produce json
// @Param payload body repo.LabelCreate true "Label Data"
// @Success 200 {object} repo.LabelSummary
// @Router /v1/labels [POST]
// @Security Bearer
func (ctrl *V1Controller) HandleLabelsCreate() server.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
createData := repo.LabelCreate{}
if err := server.Decode(r, &createData); err != nil {
log.Err(err).Msg("error decoding label create data")
return validate.NewRequestError(err, http.StatusInternalServerError)
}
user := services.UseUserCtx(r.Context())
label, err := ctrl.repo.Labels.Create(r.Context(), user.GroupID, createData)
if err != nil {
log.Err(err).Msg("error creating label")
return validate.NewRequestError(err, http.StatusInternalServerError)
}
return server.Respond(w, http.StatusCreated, label)
//
// @Summary Create Label
// @Tags Labels
// @Produce json
// @Param payload body repo.LabelCreate true "Label Data"
// @Success 200 {object} repo.LabelSummary
// @Router /v1/labels [POST]
// @Security Bearer
func (ctrl *V1Controller) HandleLabelsCreate() errchain.HandlerFunc {
fn := func(r *http.Request, data repo.LabelCreate) (repo.LabelOut, error) {
auth := services.NewContext(r.Context())
return ctrl.repo.Labels.Create(auth, auth.GID, data)
}
return adapters.Action(fn, http.StatusCreated)
}
// HandleLabelDelete godocs
// @Summary deletes a label
// @Tags Labels
// @Produce json
// @Param id path string true "Label ID"
// @Success 204
// @Router /v1/labels/{id} [DELETE]
// @Security Bearer
func (ctrl *V1Controller) HandleLabelDelete() server.HandlerFunc {
return ctrl.handleLabelsGeneral()
//
// @Summary Delete Label
// @Tags Labels
// @Produce json
// @Param id path string true "Label ID"
// @Success 204
// @Router /v1/labels/{id} [DELETE]
// @Security Bearer
func (ctrl *V1Controller) HandleLabelDelete() errchain.HandlerFunc {
fn := func(r *http.Request, ID uuid.UUID) (any, error) {
auth := services.NewContext(r.Context())
err := ctrl.repo.Labels.DeleteByGroup(auth, auth.GID, ID)
return nil, err
}
return adapters.CommandID("id", fn, http.StatusNoContent)
}
// HandleLabelGet godocs
// @Summary Gets a label and fields
// @Tags Labels
// @Produce json
// @Param id path string true "Label ID"
// @Success 200 {object} repo.LabelOut
// @Router /v1/labels/{id} [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleLabelGet() server.HandlerFunc {
return ctrl.handleLabelsGeneral()
//
// @Summary Get Label
// @Tags Labels
// @Produce json
// @Param id path string true "Label ID"
// @Success 200 {object} repo.LabelOut
// @Router /v1/labels/{id} [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleLabelGet() errchain.HandlerFunc {
fn := func(r *http.Request, ID uuid.UUID) (repo.LabelOut, error) {
auth := services.NewContext(r.Context())
return ctrl.repo.Labels.GetOneByGroup(auth, auth.GID, ID)
}
return adapters.CommandID("id", fn, http.StatusOK)
}
// HandleLabelUpdate godocs
// @Summary updates a label
// @Tags Labels
// @Produce json
// @Param id path string true "Label ID"
// @Success 200 {object} repo.LabelOut
// @Router /v1/labels/{id} [PUT]
// @Security Bearer
func (ctrl *V1Controller) HandleLabelUpdate() server.HandlerFunc {
return ctrl.handleLabelsGeneral()
}
func (ctrl *V1Controller) handleLabelsGeneral() server.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
ctx := services.NewContext(r.Context())
ID, err := ctrl.routeID(r)
if err != nil {
return err
}
switch r.Method {
case http.MethodGet:
labels, err := ctrl.repo.Labels.GetOneByGroup(r.Context(), ctx.GID, ID)
if err != nil {
if ent.IsNotFound(err) {
log.Err(err).
Str("id", ID.String()).
Msg("label not found")
return validate.NewRequestError(err, http.StatusNotFound)
}
log.Err(err).Msg("error getting label")
return validate.NewRequestError(err, http.StatusInternalServerError)
}
return server.Respond(w, http.StatusOK, labels)
case http.MethodDelete:
err = ctrl.repo.Labels.DeleteByGroup(ctx, ctx.GID, ID)
if err != nil {
log.Err(err).Msg("error deleting label")
return validate.NewRequestError(err, http.StatusInternalServerError)
}
return server.Respond(w, http.StatusNoContent, nil)
case http.MethodPut:
body := repo.LabelUpdate{}
if err := server.Decode(r, &body); err != nil {
return validate.NewRequestError(err, http.StatusInternalServerError)
}
body.ID = ID
result, err := ctrl.repo.Labels.UpdateByGroup(ctx, ctx.GID, body)
if err != nil {
return validate.NewRequestError(err, http.StatusInternalServerError)
}
return server.Respond(w, http.StatusOK, result)
}
return nil
//
// @Summary Update Label
// @Tags Labels
// @Produce json
// @Param id path string true "Label ID"
// @Success 200 {object} repo.LabelOut
// @Router /v1/labels/{id} [PUT]
// @Security Bearer
func (ctrl *V1Controller) HandleLabelUpdate() errchain.HandlerFunc {
fn := func(r *http.Request, ID uuid.UUID, data repo.LabelUpdate) (repo.LabelOut, error) {
auth := services.NewContext(r.Context())
data.ID = ID
return ctrl.repo.Labels.UpdateByGroup(auth, auth.GID, data)
}
return adapters.ActionID("id", fn, http.StatusOK)
}

View file

@ -3,146 +3,120 @@ package v1
import (
"net/http"
"github.com/google/uuid"
"github.com/hay-kot/homebox/backend/internal/core/services"
"github.com/hay-kot/homebox/backend/internal/data/ent"
"github.com/hay-kot/homebox/backend/internal/data/repo"
"github.com/hay-kot/homebox/backend/internal/sys/validate"
"github.com/hay-kot/homebox/backend/pkgs/server"
"github.com/rs/zerolog/log"
"github.com/hay-kot/homebox/backend/internal/web/adapters"
"github.com/hay-kot/httpkit/errchain"
)
// HandleLocationGetAll godoc
// @Summary Get All Locations
// @Tags Locations
// @Produce json
// @Success 200 {object} server.Results{items=[]repo.LocationOutCount}
// @Router /v1/locations [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleLocationGetAll() server.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
user := services.UseUserCtx(r.Context())
locations, err := ctrl.repo.Locations.GetAll(r.Context(), user.GroupID)
if err != nil {
log.Err(err).Msg("failed to get locations")
return validate.NewRequestError(err, http.StatusInternalServerError)
}
return server.Respond(w, http.StatusOK, server.Results{Items: locations})
// HandleLocationTreeQuery godoc
//
// @Summary Get Locations Tree
// @Tags Locations
// @Produce json
// @Param withItems query bool false "include items in response tree"
// @Success 200 {object} []repo.TreeItem
// @Router /v1/locations/tree [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleLocationTreeQuery() errchain.HandlerFunc {
fn := func(r *http.Request, query repo.TreeQuery) ([]repo.TreeItem, error) {
auth := services.NewContext(r.Context())
return ctrl.repo.Locations.Tree(auth, auth.GID, query)
}
return adapters.Query(fn, http.StatusOK)
}
// HandleLocationGetAll godoc
//
// @Summary Get All Locations
// @Tags Locations
// @Produce json
// @Param filterChildren query bool false "Filter locations with parents"
// @Success 200 {object} []repo.LocationOutCount
// @Router /v1/locations [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleLocationGetAll() errchain.HandlerFunc {
fn := func(r *http.Request, q repo.LocationQuery) ([]repo.LocationOutCount, error) {
auth := services.NewContext(r.Context())
return ctrl.repo.Locations.GetAll(auth, auth.GID, q)
}
return adapters.Query(fn, http.StatusOK)
}
// HandleLocationCreate godoc
// @Summary Create a new location
// @Tags Locations
// @Produce json
// @Param payload body repo.LocationCreate true "Location Data"
// @Success 200 {object} repo.LocationSummary
// @Router /v1/locations [POST]
// @Security Bearer
func (ctrl *V1Controller) HandleLocationCreate() server.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
createData := repo.LocationCreate{}
if err := server.Decode(r, &createData); err != nil {
log.Err(err).Msg("failed to decode location create data")
return validate.NewRequestError(err, http.StatusInternalServerError)
}
user := services.UseUserCtx(r.Context())
location, err := ctrl.repo.Locations.Create(r.Context(), user.GroupID, createData)
if err != nil {
log.Err(err).Msg("failed to create location")
return validate.NewRequestError(err, http.StatusInternalServerError)
}
return server.Respond(w, http.StatusCreated, location)
//
// @Summary Create Location
// @Tags Locations
// @Produce json
// @Param payload body repo.LocationCreate true "Location Data"
// @Success 200 {object} repo.LocationSummary
// @Router /v1/locations [POST]
// @Security Bearer
func (ctrl *V1Controller) HandleLocationCreate() errchain.HandlerFunc {
fn := func(r *http.Request, createData repo.LocationCreate) (repo.LocationOut, error) {
auth := services.NewContext(r.Context())
return ctrl.repo.Locations.Create(auth, auth.GID, createData)
}
return adapters.Action(fn, http.StatusCreated)
}
// HandleLocationDelete godocs
// @Summary deletes a location
// @Tags Locations
// @Produce json
// @Param id path string true "Location ID"
// @Success 204
// @Router /v1/locations/{id} [DELETE]
// @Security Bearer
func (ctrl *V1Controller) HandleLocationDelete() server.HandlerFunc {
return ctrl.handleLocationGeneral()
}
// HandleLocationGet godocs
// @Summary Gets a location and fields
// @Tags Locations
// @Produce json
// @Param id path string true "Location ID"
// @Success 200 {object} repo.LocationOut
// @Router /v1/locations/{id} [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleLocationGet() server.HandlerFunc {
return ctrl.handleLocationGeneral()
}
// HandleLocationUpdate godocs
// @Summary updates a location
// @Tags Locations
// @Produce json
// @Param id path string true "Location ID"
// @Param payload body repo.LocationUpdate true "Location Data"
// @Success 200 {object} repo.LocationOut
// @Router /v1/locations/{id} [PUT]
// @Security Bearer
func (ctrl *V1Controller) HandleLocationUpdate() server.HandlerFunc {
return ctrl.handleLocationGeneral()
}
func (ctrl *V1Controller) handleLocationGeneral() server.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
ctx := services.NewContext(r.Context())
ID, err := ctrl.routeID(r)
if err != nil {
return err
}
switch r.Method {
case http.MethodGet:
location, err := ctrl.repo.Locations.GetOneByGroup(r.Context(), ctx.GID, ID)
if err != nil {
l := log.Err(err).
Str("ID", ID.String()).
Str("GID", ctx.GID.String())
if ent.IsNotFound(err) {
l.Msg("location not found")
return validate.NewRequestError(err, http.StatusNotFound)
}
l.Msg("failed to get location")
return validate.NewRequestError(err, http.StatusInternalServerError)
}
return server.Respond(w, http.StatusOK, location)
case http.MethodPut:
body := repo.LocationUpdate{}
if err := server.Decode(r, &body); err != nil {
log.Err(err).Msg("failed to decode location update data")
return validate.NewRequestError(err, http.StatusInternalServerError)
}
body.ID = ID
result, err := ctrl.repo.Locations.UpdateOneByGroup(r.Context(), ctx.GID, ID, body)
if err != nil {
log.Err(err).Msg("failed to update location")
return validate.NewRequestError(err, http.StatusInternalServerError)
}
return server.Respond(w, http.StatusOK, result)
case http.MethodDelete:
err = ctrl.repo.Locations.DeleteByGroup(r.Context(), ctx.GID, ID)
if err != nil {
log.Err(err).Msg("failed to delete location")
return validate.NewRequestError(err, http.StatusInternalServerError)
}
return server.Respond(w, http.StatusNoContent, nil)
}
return nil
// HandleLocationDelete godoc
//
// @Summary Delete Location
// @Tags Locations
// @Produce json
// @Param id path string true "Location ID"
// @Success 204
// @Router /v1/locations/{id} [DELETE]
// @Security Bearer
func (ctrl *V1Controller) HandleLocationDelete() errchain.HandlerFunc {
fn := func(r *http.Request, ID uuid.UUID) (any, error) {
auth := services.NewContext(r.Context())
err := ctrl.repo.Locations.DeleteByGroup(auth, auth.GID, ID)
return nil, err
}
return adapters.CommandID("id", fn, http.StatusNoContent)
}
// HandleLocationGet godoc
//
// @Summary Get Location
// @Tags Locations
// @Produce json
// @Param id path string true "Location ID"
// @Success 200 {object} repo.LocationOut
// @Router /v1/locations/{id} [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleLocationGet() errchain.HandlerFunc {
fn := func(r *http.Request, ID uuid.UUID) (repo.LocationOut, error) {
auth := services.NewContext(r.Context())
return ctrl.repo.Locations.GetOneByGroup(auth, auth.GID, ID)
}
return adapters.CommandID("id", fn, http.StatusOK)
}
// HandleLocationUpdate godoc
//
// @Summary Update Location
// @Tags Locations
// @Produce json
// @Param id path string true "Location ID"
// @Param payload body repo.LocationUpdate true "Location Data"
// @Success 200 {object} repo.LocationOut
// @Router /v1/locations/{id} [PUT]
// @Security Bearer
func (ctrl *V1Controller) HandleLocationUpdate() errchain.HandlerFunc {
fn := func(r *http.Request, ID uuid.UUID, body repo.LocationUpdate) (repo.LocationOut, error) {
auth := services.NewContext(r.Context())
body.ID = ID
return ctrl.repo.Locations.UpdateByGroup(auth, auth.GID, ID, body)
}
return adapters.ActionID("id", fn, http.StatusOK)
}

View file

@ -0,0 +1,82 @@
package v1
import (
"net/http"
"github.com/google/uuid"
"github.com/hay-kot/homebox/backend/internal/core/services"
"github.com/hay-kot/homebox/backend/internal/data/repo"
"github.com/hay-kot/homebox/backend/internal/web/adapters"
"github.com/hay-kot/httpkit/errchain"
)
// HandleMaintenanceLogGet godoc
//
// @Summary Get Maintenance Log
// @Tags Maintenance
// @Produce json
// @Success 200 {object} repo.MaintenanceLog
// @Router /v1/items/{id}/maintenance [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleMaintenanceLogGet() errchain.HandlerFunc {
fn := func(r *http.Request, ID uuid.UUID, q repo.MaintenanceLogQuery) (repo.MaintenanceLog, error) {
auth := services.NewContext(r.Context())
return ctrl.repo.MaintEntry.GetLog(auth, auth.GID, ID, q)
}
return adapters.QueryID("id", fn, http.StatusOK)
}
// HandleMaintenanceEntryCreate godoc
//
// @Summary Create Maintenance Entry
// @Tags Maintenance
// @Produce json
// @Param payload body repo.MaintenanceEntryCreate true "Entry Data"
// @Success 201 {object} repo.MaintenanceEntry
// @Router /v1/items/{id}/maintenance [POST]
// @Security Bearer
func (ctrl *V1Controller) HandleMaintenanceEntryCreate() errchain.HandlerFunc {
fn := func(r *http.Request, itemID uuid.UUID, body repo.MaintenanceEntryCreate) (repo.MaintenanceEntry, error) {
auth := services.NewContext(r.Context())
return ctrl.repo.MaintEntry.Create(auth, itemID, body)
}
return adapters.ActionID("id", fn, http.StatusCreated)
}
// HandleMaintenanceEntryDelete godoc
//
// @Summary Delete Maintenance Entry
// @Tags Maintenance
// @Produce json
// @Success 204
// @Router /v1/items/{id}/maintenance/{entry_id} [DELETE]
// @Security Bearer
func (ctrl *V1Controller) HandleMaintenanceEntryDelete() errchain.HandlerFunc {
fn := func(r *http.Request, entryID uuid.UUID) (any, error) {
auth := services.NewContext(r.Context())
err := ctrl.repo.MaintEntry.Delete(auth, entryID)
return nil, err
}
return adapters.CommandID("entry_id", fn, http.StatusNoContent)
}
// HandleMaintenanceEntryUpdate godoc
//
// @Summary Update Maintenance Entry
// @Tags Maintenance
// @Produce json
// @Param payload body repo.MaintenanceEntryUpdate true "Entry Data"
// @Success 200 {object} repo.MaintenanceEntry
// @Router /v1/items/{id}/maintenance/{entry_id} [PUT]
// @Security Bearer
func (ctrl *V1Controller) HandleMaintenanceEntryUpdate() errchain.HandlerFunc {
fn := func(r *http.Request, entryID uuid.UUID, body repo.MaintenanceEntryUpdate) (repo.MaintenanceEntry, error) {
auth := services.NewContext(r.Context())
return ctrl.repo.MaintEntry.Update(auth, entryID, body)
}
return adapters.ActionID("entry_id", fn, http.StatusOK)
}

View file

@ -0,0 +1,105 @@
package v1
import (
"net/http"
"github.com/containrrr/shoutrrr"
"github.com/google/uuid"
"github.com/hay-kot/homebox/backend/internal/core/services"
"github.com/hay-kot/homebox/backend/internal/data/repo"
"github.com/hay-kot/homebox/backend/internal/web/adapters"
"github.com/hay-kot/httpkit/errchain"
)
// HandleGetUserNotifiers godoc
//
// @Summary Get Notifiers
// @Tags Notifiers
// @Produce json
// @Success 200 {object} []repo.NotifierOut
// @Router /v1/notifiers [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleGetUserNotifiers() errchain.HandlerFunc {
fn := func(r *http.Request, _ struct{}) ([]repo.NotifierOut, error) {
user := services.UseUserCtx(r.Context())
return ctrl.repo.Notifiers.GetByUser(r.Context(), user.ID)
}
return adapters.Query(fn, http.StatusOK)
}
// HandleCreateNotifier godoc
//
// @Summary Create Notifier
// @Tags Notifiers
// @Produce json
// @Param payload body repo.NotifierCreate true "Notifier Data"
// @Success 200 {object} repo.NotifierOut
// @Router /v1/notifiers [POST]
// @Security Bearer
func (ctrl *V1Controller) HandleCreateNotifier() errchain.HandlerFunc {
fn := func(r *http.Request, in repo.NotifierCreate) (repo.NotifierOut, error) {
auth := services.NewContext(r.Context())
return ctrl.repo.Notifiers.Create(auth, auth.GID, auth.UID, in)
}
return adapters.Action(fn, http.StatusCreated)
}
// HandleDeleteNotifier godocs
//
// @Summary Delete a Notifier
// @Tags Notifiers
// @Param id path string true "Notifier ID"
// @Success 204
// @Router /v1/notifiers/{id} [DELETE]
// @Security Bearer
func (ctrl *V1Controller) HandleDeleteNotifier() errchain.HandlerFunc {
fn := func(r *http.Request, ID uuid.UUID) (any, error) {
auth := services.NewContext(r.Context())
return nil, ctrl.repo.Notifiers.Delete(auth, auth.UID, ID)
}
return adapters.CommandID("id", fn, http.StatusNoContent)
}
// HandleUpdateNotifier godocs
//
// @Summary Update Notifier
// @Tags Notifiers
// @Param id path string true "Notifier ID"
// @Param payload body repo.NotifierUpdate true "Notifier Data"
// @Success 200 {object} repo.NotifierOut
// @Router /v1/notifiers/{id} [PUT]
// @Security Bearer
func (ctrl *V1Controller) HandleUpdateNotifier() errchain.HandlerFunc {
fn := func(r *http.Request, ID uuid.UUID, in repo.NotifierUpdate) (repo.NotifierOut, error) {
auth := services.NewContext(r.Context())
return ctrl.repo.Notifiers.Update(auth, auth.UID, ID, in)
}
return adapters.ActionID("id", fn, http.StatusOK)
}
// HandlerNotifierTest godoc
//
// @Summary Test Notifier
// @Tags Notifiers
// @Produce json
// @Param id path string true "Notifier ID"
// @Param url query string true "URL"
// @Success 204
// @Router /v1/notifiers/test [POST]
// @Security Bearer
func (ctrl *V1Controller) HandlerNotifierTest() errchain.HandlerFunc {
type body struct {
URL string `json:"url" validate:"required"`
}
fn := func(r *http.Request, q body) (any, error) {
err := shoutrrr.Send(q.URL, "Test message from Homebox")
return nil, err
}
return adapters.Action(fn, http.StatusOK)
}

View file

@ -0,0 +1,72 @@
package v1
import (
"bytes"
"image/png"
"io"
"net/http"
"net/url"
"github.com/hay-kot/homebox/backend/internal/web/adapters"
"github.com/hay-kot/httpkit/errchain"
"github.com/yeqown/go-qrcode/v2"
"github.com/yeqown/go-qrcode/writer/standard"
_ "embed"
)
//go:embed assets/QRIcon.png
var qrcodeLogo []byte
// HandleGenerateQRCode godoc
//
// @Summary Create QR Code
// @Tags Items
// @Produce json
// @Param data query string false "data to be encoded into qrcode"
// @Success 200 {string} string "image/jpeg"
// @Router /v1/qrcode [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleGenerateQRCode() errchain.HandlerFunc {
type query struct {
// 4,296 characters is the maximum length of a QR code
Data string `schema:"data" validate:"required,max=4296"`
}
return func(w http.ResponseWriter, r *http.Request) error {
q, err := adapters.DecodeQuery[query](r)
if err != nil {
return err
}
image, err := png.Decode(bytes.NewReader(qrcodeLogo))
if err != nil {
panic(err)
}
decodedStr, err := url.QueryUnescape(q.Data)
if err != nil {
return err
}
qrc, err := qrcode.New(decodedStr)
if err != nil {
return err
}
toWriteCloser := struct {
io.Writer
io.Closer
}{
Writer: w,
Closer: io.NopCloser(nil),
}
qrwriter := standard.NewWithWriter(toWriteCloser, standard.WithLogoImage(image))
// Return the QR code as a jpeg image
w.Header().Set("Content-Type", "image/jpeg")
w.Header().Set("Content-Disposition", "attachment; filename=qrcode.jpg")
return qrc.Save(qrwriter)
}
}

View file

@ -0,0 +1,32 @@
package v1
import (
"net/http"
"github.com/hay-kot/homebox/backend/internal/core/services"
"github.com/hay-kot/httpkit/errchain"
)
// HandleBillOfMaterialsExport godoc
//
// @Summary Export Bill of Materials
// @Tags Reporting
// @Produce json
// @Success 200 {string} string "text/csv"
// @Router /v1/reporting/bill-of-materials [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleBillOfMaterialsExport() errchain.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
actor := services.UseUserCtx(r.Context())
csv, err := ctrl.svc.Items.ExportBillOfMaterialsTSV(r.Context(), actor.GroupID)
if err != nil {
return err
}
w.Header().Set("Content-Type", "text/tsv")
w.Header().Set("Content-Disposition", "attachment; filename=bill-of-materials.tsv")
_, err = w.Write(csv)
return err
}
}

View file

@ -0,0 +1,104 @@
package v1
import (
"net/http"
"time"
"github.com/hay-kot/homebox/backend/internal/core/services"
"github.com/hay-kot/homebox/backend/internal/data/repo"
"github.com/hay-kot/homebox/backend/internal/sys/validate"
"github.com/hay-kot/homebox/backend/internal/web/adapters"
"github.com/hay-kot/httpkit/errchain"
"github.com/hay-kot/httpkit/server"
)
// HandleGroupStatisticsLocations godoc
//
// @Summary Get Location Statistics
// @Tags Statistics
// @Produce json
// @Success 200 {object} []repo.TotalsByOrganizer
// @Router /v1/groups/statistics/locations [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleGroupStatisticsLocations() errchain.HandlerFunc {
fn := func(r *http.Request) ([]repo.TotalsByOrganizer, error) {
auth := services.NewContext(r.Context())
return ctrl.repo.Groups.StatsLocationsByPurchasePrice(auth, auth.GID)
}
return adapters.Command(fn, http.StatusOK)
}
// HandleGroupStatisticsLabels godoc
//
// @Summary Get Label Statistics
// @Tags Statistics
// @Produce json
// @Success 200 {object} []repo.TotalsByOrganizer
// @Router /v1/groups/statistics/labels [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleGroupStatisticsLabels() errchain.HandlerFunc {
fn := func(r *http.Request) ([]repo.TotalsByOrganizer, error) {
auth := services.NewContext(r.Context())
return ctrl.repo.Groups.StatsLabelsByPurchasePrice(auth, auth.GID)
}
return adapters.Command(fn, http.StatusOK)
}
// HandleGroupStatistics godoc
//
// @Summary Get Group Statistics
// @Tags Statistics
// @Produce json
// @Success 200 {object} repo.GroupStatistics
// @Router /v1/groups/statistics [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleGroupStatistics() errchain.HandlerFunc {
fn := func(r *http.Request) (repo.GroupStatistics, error) {
auth := services.NewContext(r.Context())
return ctrl.repo.Groups.StatsGroup(auth, auth.GID)
}
return adapters.Command(fn, http.StatusOK)
}
// HandleGroupStatisticsPriceOverTime godoc
//
// @Summary Get Purchase Price Statistics
// @Tags Statistics
// @Produce json
// @Success 200 {object} repo.ValueOverTime
// @Param start query string false "start date"
// @Param end query string false "end date"
// @Router /v1/groups/statistics/purchase-price [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleGroupStatisticsPriceOverTime() errchain.HandlerFunc {
parseDate := func(datestr string, defaultDate time.Time) (time.Time, error) {
if datestr == "" {
return defaultDate, nil
}
return time.Parse("2006-01-02", datestr)
}
return func(w http.ResponseWriter, r *http.Request) error {
ctx := services.NewContext(r.Context())
startDate, err := parseDate(r.URL.Query().Get("start"), time.Now().AddDate(0, -1, 0))
if err != nil {
return validate.NewRequestError(err, http.StatusBadRequest)
}
endDate, err := parseDate(r.URL.Query().Get("end"), time.Now())
if err != nil {
return validate.NewRequestError(err, http.StatusBadRequest)
}
stats, err := ctrl.repo.Groups.StatsPurchasePrice(ctx, ctx.GID, startDate, endDate)
if err != nil {
return validate.NewRequestError(err, http.StatusInternalServerError)
}
return server.JSON(w, http.StatusOK, stats)
}
}

View file

@ -1,24 +1,27 @@
package v1
import (
"fmt"
"net/http"
"github.com/google/uuid"
"github.com/hay-kot/homebox/backend/internal/core/services"
"github.com/hay-kot/homebox/backend/internal/data/repo"
"github.com/hay-kot/homebox/backend/internal/sys/validate"
"github.com/hay-kot/homebox/backend/pkgs/server"
"github.com/hay-kot/httpkit/errchain"
"github.com/hay-kot/httpkit/server"
"github.com/rs/zerolog/log"
)
// HandleUserSelf godoc
// @Summary Get the current user
// @Tags User
// @Produce json
// @Param payload body services.UserRegistration true "User Data"
// @Success 204
// @Router /v1/users/register [Post]
func (ctrl *V1Controller) HandleUserRegistration() server.HandlerFunc {
// HandleUserRegistration godoc
//
// @Summary Register New User
// @Tags User
// @Produce json
// @Param payload body services.UserRegistration true "User Data"
// @Success 204
// @Router /v1/users/register [Post]
func (ctrl *V1Controller) HandleUserRegistration() errchain.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
regData := services.UserRegistration{}
@ -28,7 +31,7 @@ func (ctrl *V1Controller) HandleUserRegistration() server.HandlerFunc {
}
if !ctrl.allowRegistration && regData.GroupToken == "" {
return validate.NewRequestError(nil, http.StatusForbidden)
return validate.NewRequestError(fmt.Errorf("user registration disabled"), http.StatusForbidden)
}
_, err := ctrl.svc.User.RegisterUser(r.Context(), regData)
@ -37,18 +40,19 @@ func (ctrl *V1Controller) HandleUserRegistration() server.HandlerFunc {
return validate.NewRequestError(err, http.StatusInternalServerError)
}
return server.Respond(w, http.StatusNoContent, nil)
return server.JSON(w, http.StatusNoContent, nil)
}
}
// HandleUserSelf godoc
// @Summary Get the current user
// @Tags User
// @Produce json
// @Success 200 {object} server.Result{item=repo.UserOut}
// @Router /v1/users/self [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleUserSelf() server.HandlerFunc {
//
// @Summary Get User Self
// @Tags User
// @Produce json
// @Success 200 {object} Wrapped{item=repo.UserOut}
// @Router /v1/users/self [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleUserSelf() errchain.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
token := services.UseTokenCtx(r.Context())
usr, err := ctrl.svc.User.GetSelf(r.Context(), token)
@ -57,19 +61,20 @@ func (ctrl *V1Controller) HandleUserSelf() server.HandlerFunc {
return validate.NewRequestError(err, http.StatusInternalServerError)
}
return server.Respond(w, http.StatusOK, server.Wrap(usr))
return server.JSON(w, http.StatusOK, Wrap(usr))
}
}
// HandleUserSelfUpdate godoc
// @Summary Update the current user
// @Tags User
// @Produce json
// @Param payload body repo.UserUpdate true "User Data"
// @Success 200 {object} server.Result{item=repo.UserUpdate}
// @Router /v1/users/self [PUT]
// @Security Bearer
func (ctrl *V1Controller) HandleUserSelfUpdate() server.HandlerFunc {
//
// @Summary Update Account
// @Tags User
// @Produce json
// @Param payload body repo.UserUpdate true "User Data"
// @Success 200 {object} Wrapped{item=repo.UserUpdate}
// @Router /v1/users/self [PUT]
// @Security Bearer
func (ctrl *V1Controller) HandleUserSelfUpdate() errchain.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
updateData := repo.UserUpdate{}
if err := server.Decode(r, &updateData); err != nil {
@ -79,23 +84,23 @@ func (ctrl *V1Controller) HandleUserSelfUpdate() server.HandlerFunc {
actor := services.UseUserCtx(r.Context())
newData, err := ctrl.svc.User.UpdateSelf(r.Context(), actor.ID, updateData)
if err != nil {
return validate.NewRequestError(err, http.StatusInternalServerError)
}
return server.Respond(w, http.StatusOK, server.Wrap(newData))
return server.JSON(w, http.StatusOK, Wrap(newData))
}
}
// HandleUserSelfDelete godoc
// @Summary Deletes the user account
// @Tags User
// @Produce json
// @Success 204
// @Router /v1/users/self [DELETE]
// @Security Bearer
func (ctrl *V1Controller) HandleUserSelfDelete() server.HandlerFunc {
//
// @Summary Delete Account
// @Tags User
// @Produce json
// @Success 204
// @Router /v1/users/self [DELETE]
// @Security Bearer
func (ctrl *V1Controller) HandleUserSelfDelete() errchain.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
if ctrl.isDemo {
return validate.NewRequestError(nil, http.StatusForbidden)
@ -106,7 +111,7 @@ func (ctrl *V1Controller) HandleUserSelfDelete() server.HandlerFunc {
return validate.NewRequestError(err, http.StatusInternalServerError)
}
return server.Respond(w, http.StatusNoContent, nil)
return server.JSON(w, http.StatusNoContent, nil)
}
}
@ -118,13 +123,14 @@ type (
)
// HandleUserSelfChangePassword godoc
// @Summary Updates the users password
// @Tags User
// @Success 204
// @Param payload body ChangePassword true "Password Payload"
// @Router /v1/users/change-password [PUT]
// @Security Bearer
func (ctrl *V1Controller) HandleUserSelfChangePassword() server.HandlerFunc {
//
// @Summary Change Password
// @Tags User
// @Success 204
// @Param payload body ChangePassword true "Password Payload"
// @Router /v1/users/change-password [PUT]
// @Security Bearer
func (ctrl *V1Controller) HandleUserSelfChangePassword() errchain.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
if ctrl.isDemo {
return validate.NewRequestError(nil, http.StatusForbidden)
@ -143,6 +149,6 @@ func (ctrl *V1Controller) HandleUserSelfChangePassword() server.HandlerFunc {
return validate.NewRequestError(err, http.StatusInternalServerError)
}
return server.Respond(w, http.StatusNoContent, nil)
return server.JSON(w, http.StatusNoContent, nil)
}
}

View file

@ -2,7 +2,6 @@ package main
import (
"os"
"strings"
"github.com/hay-kot/homebox/backend/internal/sys/config"
"github.com/rs/zerolog"
@ -18,24 +17,8 @@ func (a *app) setupLogger() {
log.Logger = log.Output(zerolog.ConsoleWriter{Out: os.Stderr}).With().Caller().Logger()
}
log.Level(getLevel(a.conf.Log.Level))
}
func getLevel(l string) zerolog.Level {
switch strings.ToLower(l) {
case "debug":
return zerolog.DebugLevel
case "info":
return zerolog.InfoLevel
case "warn":
return zerolog.WarnLevel
case "error":
return zerolog.ErrorLevel
case "fatal":
return zerolog.FatalLevel
case "panic":
return zerolog.PanicLevel
default:
return zerolog.InfoLevel
level, err := zerolog.ParseLevel(a.conf.Log.Level)
if err == nil {
zerolog.SetGlobalLevel(level)
}
}

View file

@ -1,7 +1,9 @@
package main
import (
"bytes"
"context"
"fmt"
"net/http"
"os"
"path/filepath"
@ -9,16 +11,24 @@ import (
atlas "ariga.io/atlas/sql/migrate"
"entgo.io/ent/dialect/sql/schema"
"github.com/hay-kot/homebox/backend/app/api/static/docs"
"github.com/go-chi/chi/v5"
"github.com/go-chi/chi/v5/middleware"
"github.com/hay-kot/homebox/backend/internal/core/currencies"
"github.com/hay-kot/homebox/backend/internal/core/services"
"github.com/hay-kot/homebox/backend/internal/core/services/reporting/eventbus"
"github.com/hay-kot/homebox/backend/internal/data/ent"
"github.com/hay-kot/homebox/backend/internal/data/migrations"
"github.com/hay-kot/homebox/backend/internal/data/repo"
"github.com/hay-kot/homebox/backend/internal/sys/config"
"github.com/hay-kot/homebox/backend/internal/web/mid"
"github.com/hay-kot/homebox/backend/pkgs/server"
_ "github.com/mattn/go-sqlite3"
"github.com/hay-kot/httpkit/errchain"
"github.com/hay-kot/httpkit/graceful"
"github.com/rs/zerolog"
"github.com/rs/zerolog/log"
"github.com/rs/zerolog/pkgerrors"
_ "github.com/hay-kot/homebox/backend/pkgs/cgofreesqlite"
)
var (
@ -27,24 +37,32 @@ var (
buildTime = "now"
)
// @title Go API Templates
func build() string {
short := commit
if len(short) > 7 {
short = short[:7]
}
return fmt.Sprintf("%s, commit %s, built at %s", version, short, buildTime)
}
// @title Homebox API
// @version 1.0
// @description This is a simple Rest API Server Template that implements some basic User and Authentication patterns to help you get started and bootstrap your next project!.
// @description Track, Manage, and Organize your Things.
// @contact.name Don't
// @license.name MIT
// @BasePath /api
// @securityDefinitions.apikey Bearer
// @in header
// @name Authorization
// @description "Type 'Bearer TOKEN' to correctly set the API Key"
func main() {
cfg, err := config.New()
zerolog.ErrorStackMarshaler = pkgerrors.MarshalStack
cfg, err := config.New(build(), "Homebox inventory management system")
if err != nil {
panic(err)
}
docs.SwaggerInfo.Host = cfg.Swagger.Host
if err := run(cfg); err != nil {
panic(err)
}
@ -57,17 +75,17 @@ func run(cfg *config.Config) error {
// =========================================================================
// Initialize Database & Repos
err := os.MkdirAll(cfg.Storage.Data, 0755)
err := os.MkdirAll(cfg.Storage.Data, 0o755)
if err != nil {
log.Fatal().Err(err).Msg("failed to create data directory")
}
c, err := ent.Open("sqlite3", cfg.Storage.SqliteUrl)
c, err := ent.Open("sqlite3", cfg.Storage.SqliteURL)
if err != nil {
log.Fatal().
Err(err).
Str("driver", "sqlite").
Str("url", cfg.Storage.SqliteUrl).
Str("url", cfg.Storage.SqliteURL).
Msg("failed opening connection to sqlite")
}
defer func(c *ent.Client) {
@ -100,7 +118,7 @@ func run(cfg *config.Config) error {
log.Fatal().
Err(err).
Str("driver", "sqlite").
Str("url", cfg.Storage.SqliteUrl).
Str("url", cfg.Storage.SqliteURL).
Msg("failed creating schema resources")
}
@ -110,67 +128,145 @@ func run(cfg *config.Config) error {
return err
}
app.db = c
app.repos = repo.New(c, cfg.Storage.Data)
app.services = services.New(app.repos)
// =========================================================================
// Start Server\
logger := log.With().Caller().Logger()
mwLogger := mid.Logger(logger)
if app.conf.Mode == config.ModeDevelopment {
mwLogger = mid.SugarLogger(logger)
collectFuncs := []currencies.CollectorFunc{
currencies.CollectDefaults(),
}
app.server = server.NewServer(
server.WithHost(app.conf.Web.Host),
server.WithPort(app.conf.Web.Port),
server.WithMiddleware(
mwLogger,
mid.Errors(logger),
mid.Panic(app.conf.Mode == config.ModeDevelopment),
),
if cfg.Options.CurrencyConfig != "" {
log.Info().
Str("path", cfg.Options.CurrencyConfig).
Msg("loading currency config file")
content, err := os.ReadFile(cfg.Options.CurrencyConfig)
if err != nil {
log.Fatal().
Err(err).
Str("path", cfg.Options.CurrencyConfig).
Msg("failed to read currency config file")
}
collectFuncs = append(collectFuncs, currencies.CollectJSON(bytes.NewReader(content)))
}
currencies, err := currencies.CollectionCurrencies(collectFuncs...)
if err != nil {
log.Fatal().
Err(err).
Msg("failed to collect currencies")
}
app.bus = eventbus.New()
app.db = c
app.repos = repo.New(c, app.bus, cfg.Storage.Data)
app.services = services.New(
app.repos,
services.WithAutoIncrementAssetID(cfg.Options.AutoIncrementAssetID),
services.WithCurrencies(currencies),
)
app.mountRoutes(app.repos)
// =========================================================================
// Start Server
log.Info().Msgf("Starting HTTP Server on %s:%s", app.server.Host, app.server.Port)
logger := log.With().Caller().Logger()
router := chi.NewMux()
router.Use(
middleware.RequestID,
middleware.RealIP,
mid.Logger(logger),
middleware.Recoverer,
middleware.StripSlashes,
)
chain := errchain.New(mid.Errors(logger))
app.mountRoutes(router, chain, app.repos)
runner := graceful.NewRunner()
runner.AddFunc("server", func(ctx context.Context) error {
httpserver := http.Server{
Addr: fmt.Sprintf("%s:%s", cfg.Web.Host, cfg.Web.Port),
Handler: router,
ReadTimeout: cfg.Web.ReadTimeout,
WriteTimeout: cfg.Web.WriteTimeout,
IdleTimeout: cfg.Web.IdleTimeout,
}
go func() {
<-ctx.Done()
_ = httpserver.Shutdown(context.Background())
}()
log.Info().Msgf("Server is running on %s:%s", cfg.Web.Host, cfg.Web.Port)
return httpserver.ListenAndServe()
})
// =========================================================================
// Start Reoccurring Tasks
go app.startBgTask(time.Duration(24)*time.Hour, func() {
_, err := app.repos.AuthTokens.PurgeExpiredTokens(context.Background())
runner.AddFunc("eventbus", app.bus.Run)
runner.AddFunc("seed_database", func(ctx context.Context) error {
// TODO: Remove through external API that does setup
if cfg.Demo {
log.Info().Msg("Running in demo mode, creating demo data")
app.SetupDemo()
}
return nil
})
runner.AddPlugin(NewTask("purge-tokens", time.Duration(24)*time.Hour, func(ctx context.Context) {
_, err := app.repos.AuthTokens.PurgeExpiredTokens(ctx)
if err != nil {
log.Error().
Err(err).
Msg("failed to purge expired tokens")
}
})
go app.startBgTask(time.Duration(24)*time.Hour, func() {
_, err := app.repos.Groups.InvitationPurge(context.Background())
}))
runner.AddPlugin(NewTask("purge-invitations", time.Duration(24)*time.Hour, func(ctx context.Context) {
_, err := app.repos.Groups.InvitationPurge(ctx)
if err != nil {
log.Error().
Err(err).
Msg("failed to purge expired invitations")
}
})
}))
// TODO: Remove through external API that does setup
if cfg.Demo {
log.Info().Msg("Running in demo mode, creating demo data")
app.SetupDemo()
}
runner.AddPlugin(NewTask("send-notifications", time.Duration(1)*time.Hour, func(ctx context.Context) {
now := time.Now()
if now.Hour() == 8 {
fmt.Println("run notifiers")
err := app.services.BackgroundService.SendNotifiersToday(context.Background())
if err != nil {
log.Error().
Err(err).
Msg("failed to send notifiers")
}
}
}))
if cfg.Debug.Enabled {
debugrouter := app.debugRouter()
go func() {
if err := http.ListenAndServe(":"+cfg.Debug.Port, debugrouter); err != nil {
log.Fatal().Err(err).Msg("failed to start debug server")
runner.AddFunc("debug", func(ctx context.Context) error {
debugserver := http.Server{
Addr: fmt.Sprintf("%s:%s", cfg.Web.Host, cfg.Debug.Port),
Handler: app.debugRouter(),
ReadTimeout: cfg.Web.ReadTimeout,
WriteTimeout: cfg.Web.WriteTimeout,
IdleTimeout: cfg.Web.IdleTimeout,
}
}()
go func() {
<-ctx.Done()
_ = debugserver.Shutdown(context.Background())
}()
log.Info().Msgf("Debug server is running on %s:%s", cfg.Web.Host, cfg.Debug.Port)
return debugserver.ListenAndServe()
})
}
return app.server.Start()
return runner.Start(context.Background())
}

View file

@ -1,31 +1,151 @@
package main
import (
"context"
"errors"
"net/http"
"net/url"
"strings"
v1 "github.com/hay-kot/homebox/backend/app/api/handlers/v1"
"github.com/hay-kot/homebox/backend/internal/core/services"
"github.com/hay-kot/homebox/backend/internal/data/ent"
"github.com/hay-kot/homebox/backend/internal/sys/validate"
"github.com/hay-kot/homebox/backend/pkgs/server"
"github.com/hay-kot/httpkit/errchain"
)
type tokenHasKey struct {
key string
}
var hashedToken = tokenHasKey{key: "hashedToken"}
type RoleMode int
const (
RoleModeOr RoleMode = 0
RoleModeAnd RoleMode = 1
)
// mwRoles is a middleware that will validate the required roles are met. All roles
// are required to be met for the request to be allowed. If the user does not have
// the required roles, a 403 Forbidden will be returned.
//
// WARNING: This middleware _MUST_ be called after mwAuthToken or else it will panic
func (a *app) mwRoles(rm RoleMode, required ...string) errchain.Middleware {
return func(next errchain.Handler) errchain.Handler {
return errchain.HandlerFunc(func(w http.ResponseWriter, r *http.Request) error {
ctx := r.Context()
maybeToken := ctx.Value(hashedToken)
if maybeToken == nil {
panic("mwRoles: token not found in context, you must call mwAuthToken before mwRoles")
}
token := maybeToken.(string)
roles, err := a.repos.AuthTokens.GetRoles(r.Context(), token)
if err != nil {
return err
}
outer:
switch rm {
case RoleModeOr:
for _, role := range required {
if roles.Contains(role) {
break outer
}
}
return validate.NewRequestError(errors.New("Forbidden"), http.StatusForbidden)
case RoleModeAnd:
for _, req := range required {
if !roles.Contains(req) {
return validate.NewRequestError(errors.New("Unauthorized"), http.StatusForbidden)
}
}
}
return next.ServeHTTP(w, r)
})
}
}
type KeyFunc func(r *http.Request) (string, error)
func getBearer(r *http.Request) (string, error) {
auth := r.Header.Get("Authorization")
if auth == "" {
return "", errors.New("authorization header is required")
}
return auth, nil
}
func getQuery(r *http.Request) (string, error) {
token := r.URL.Query().Get("access_token")
if token == "" {
return "", errors.New("access_token query is required")
}
token, err := url.QueryUnescape(token)
if err != nil {
return "", errors.New("access_token query is required")
}
return token, nil
}
// mwAuthToken is a middleware that will check the database for a stateful token
// and attach it to the request context with the user, or return a 401 if it doesn't exist.
func (a *app) mwAuthToken(next server.Handler) server.Handler {
return server.HandlerFunc(func(w http.ResponseWriter, r *http.Request) error {
requestToken := r.Header.Get("Authorization")
// and attach it's user to the request context, or return an appropriate error.
// Authorization support is by token via Headers or Query Parameter
//
// Example:
// - header = "Bearer 1234567890"
// - query = "?access_token=1234567890"
func (a *app) mwAuthToken(next errchain.Handler) errchain.Handler {
return errchain.HandlerFunc(func(w http.ResponseWriter, r *http.Request) error {
var requestToken string
// We ignore the error to allow the next strategy to be attempted
{
cookies, _ := v1.GetCookies(r)
if cookies != nil {
requestToken = cookies.Token
}
}
if requestToken == "" {
return validate.NewRequestError(errors.New("Authorization header is required"), http.StatusUnauthorized)
keyFuncs := [...]KeyFunc{
getBearer,
getQuery,
}
for _, keyFunc := range keyFuncs {
token, err := keyFunc(r)
if err == nil {
requestToken = token
break
}
}
}
if requestToken == "" {
return validate.NewRequestError(errors.New("authorization header or query is required"), http.StatusUnauthorized)
}
requestToken = strings.TrimPrefix(requestToken, "Bearer ")
usr, err := a.services.User.GetSelf(r.Context(), requestToken)
r = r.WithContext(context.WithValue(r.Context(), hashedToken, requestToken))
usr, err := a.services.User.GetSelf(r.Context(), requestToken)
// Check the database for the token
if err != nil {
return validate.NewRequestError(errors.New("Authorization header is required"), http.StatusUnauthorized)
if ent.IsNotFound(err) {
return validate.NewRequestError(errors.New("valid authorization token is required"), http.StatusUnauthorized)
}
return err
}
r = r.WithContext(services.SetUserCtx(r.Context(), &usr, requestToken))

View file

@ -0,0 +1,2 @@
// Package providers provides a authentication abstraction for the backend.
package providers

View file

@ -0,0 +1,55 @@
package providers
import (
"errors"
"net/http"
"github.com/hay-kot/homebox/backend/internal/sys/validate"
"github.com/hay-kot/httpkit/server"
"github.com/rs/zerolog/log"
)
type LoginForm struct {
Username string `json:"username"`
Password string `json:"password"`
StayLoggedIn bool `json:"stayLoggedIn"`
}
func getLoginForm(r *http.Request) (LoginForm, error) {
loginForm := LoginForm{}
switch r.Header.Get("Content-Type") {
case "application/x-www-form-urlencoded":
err := r.ParseForm()
if err != nil {
return loginForm, errors.New("failed to parse form")
}
loginForm.Username = r.PostFormValue("username")
loginForm.Password = r.PostFormValue("password")
loginForm.StayLoggedIn = r.PostFormValue("stayLoggedIn") == "true"
case "application/json":
err := server.Decode(r, &loginForm)
if err != nil {
log.Err(err).Msg("failed to decode login form")
return loginForm, errors.New("failed to decode login form")
}
default:
return loginForm, errors.New("invalid content type")
}
if loginForm.Username == "" || loginForm.Password == "" {
return loginForm, validate.NewFieldErrors(
validate.FieldError{
Field: "username",
Error: "username or password is empty",
},
validate.FieldError{
Field: "password",
Error: "username or password is empty",
},
)
}
return loginForm, nil
}

View file

@ -0,0 +1,30 @@
package providers
import (
"net/http"
"github.com/hay-kot/homebox/backend/internal/core/services"
)
type LocalProvider struct {
service *services.UserService
}
func NewLocalProvider(service *services.UserService) *LocalProvider {
return &LocalProvider{
service: service,
}
}
func (p *LocalProvider) Name() string {
return "local"
}
func (p *LocalProvider) Authenticate(w http.ResponseWriter, r *http.Request) (services.UserAuthTokenDetail, error) {
loginForm, err := getLoginForm(r)
if err != nil {
return services.UserAuthTokenDetail{}, err
}
return p.service.Login(r.Context(), loginForm.Username, loginForm.Password, loginForm.StayLoggedIn)
}

View file

@ -3,19 +3,21 @@ package main
import (
"embed"
"errors"
"fmt"
"io"
"mime"
"net/http"
"path"
"path/filepath"
"github.com/go-chi/chi/v5"
"github.com/hay-kot/homebox/backend/app/api/handlers/debughandlers"
v1 "github.com/hay-kot/homebox/backend/app/api/handlers/v1"
"github.com/hay-kot/homebox/backend/app/api/providers"
_ "github.com/hay-kot/homebox/backend/app/api/static/docs"
"github.com/hay-kot/homebox/backend/internal/data/ent/authroles"
"github.com/hay-kot/homebox/backend/internal/data/repo"
"github.com/hay-kot/homebox/backend/pkgs/server"
httpSwagger "github.com/swaggo/http-swagger" // http-swagger middleware
"github.com/hay-kot/httpkit/errchain"
httpSwagger "github.com/swaggo/http-swagger/v2" // http-swagger middleware
)
const prefix = "/api"
@ -35,78 +37,133 @@ func (a *app) debugRouter() *http.ServeMux {
}
// registerRoutes registers all the routes for the API
func (a *app) mountRoutes(repos *repo.AllRepos) {
func (a *app) mountRoutes(r *chi.Mux, chain *errchain.ErrChain, repos *repo.AllRepos) {
registerMimes()
a.server.Get("/swagger/*", server.ToHandler(httpSwagger.Handler(
httpSwagger.URL(fmt.Sprintf("%s://%s/swagger/doc.json", a.conf.Swagger.Scheme, a.conf.Swagger.Host)),
)))
r.Get("/swagger/*", httpSwagger.Handler(
httpSwagger.URL("/swagger/doc.json"),
))
// =========================================================================
// API Version 1
v1Base := v1.BaseUrlFunc(prefix)
v1Base := v1.BaseURLFunc(prefix)
v1Ctrl := v1.NewControllerV1(
a.services,
a.repos,
a.bus,
v1.WithMaxUploadSize(a.conf.Web.MaxUploadSize),
v1.WithRegistration(a.conf.AllowRegistration),
v1.WithRegistration(a.conf.Options.AllowRegistration),
v1.WithDemoStatus(a.conf.Demo), // Disable Password Change in Demo Mode
)
a.server.Get(v1Base("/status"), v1Ctrl.HandleBase(func() bool { return true }, v1.Build{
r.Get(v1Base("/status"), chain.ToHandlerFunc(v1Ctrl.HandleBase(func() bool { return true }, v1.Build{
Version: version,
Commit: commit,
BuildTime: buildTime,
}))
})))
a.server.Post(v1Base("/users/register"), v1Ctrl.HandleUserRegistration())
a.server.Post(v1Base("/users/login"), v1Ctrl.HandleAuthLogin())
r.Get(v1Base("/currencies"), chain.ToHandlerFunc(v1Ctrl.HandleCurrency()))
// Attachment download URl needs a `token` query param to be passed in the request.
// and also needs to be outside of the `auth` middleware.
a.server.Get(v1Base("/items/{id}/attachments/download"), v1Ctrl.HandleItemAttachmentDownload())
providers := []v1.AuthProvider{
providers.NewLocalProvider(a.services.User),
}
a.server.Get(v1Base("/users/self"), v1Ctrl.HandleUserSelf(), a.mwAuthToken)
a.server.Put(v1Base("/users/self"), v1Ctrl.HandleUserSelfUpdate(), a.mwAuthToken)
a.server.Delete(v1Base("/users/self"), v1Ctrl.HandleUserSelfDelete(), a.mwAuthToken)
a.server.Post(v1Base("/users/logout"), v1Ctrl.HandleAuthLogout(), a.mwAuthToken)
a.server.Get(v1Base("/users/refresh"), v1Ctrl.HandleAuthRefresh(), a.mwAuthToken)
a.server.Put(v1Base("/users/self/change-password"), v1Ctrl.HandleUserSelfChangePassword(), a.mwAuthToken)
r.Post(v1Base("/users/register"), chain.ToHandlerFunc(v1Ctrl.HandleUserRegistration()))
r.Post(v1Base("/users/login"), chain.ToHandlerFunc(v1Ctrl.HandleAuthLogin(providers...)))
a.server.Post(v1Base("/groups/invitations"), v1Ctrl.HandleGroupInvitationsCreate(), a.mwAuthToken)
a.server.Get(v1Base("/groups/statistics"), v1Ctrl.HandleGroupStatistics(), a.mwAuthToken)
userMW := []errchain.Middleware{
a.mwAuthToken,
a.mwRoles(RoleModeOr, authroles.RoleUser.String()),
}
r.Get(v1Base("/ws/events"), chain.ToHandlerFunc(v1Ctrl.HandleCacheWS(), userMW...))
r.Get(v1Base("/users/self"), chain.ToHandlerFunc(v1Ctrl.HandleUserSelf(), userMW...))
r.Put(v1Base("/users/self"), chain.ToHandlerFunc(v1Ctrl.HandleUserSelfUpdate(), userMW...))
r.Delete(v1Base("/users/self"), chain.ToHandlerFunc(v1Ctrl.HandleUserSelfDelete(), userMW...))
r.Post(v1Base("/users/logout"), chain.ToHandlerFunc(v1Ctrl.HandleAuthLogout(), userMW...))
r.Get(v1Base("/users/refresh"), chain.ToHandlerFunc(v1Ctrl.HandleAuthRefresh(), userMW...))
r.Put(v1Base("/users/self/change-password"), chain.ToHandlerFunc(v1Ctrl.HandleUserSelfChangePassword(), userMW...))
r.Post(v1Base("/groups/invitations"), chain.ToHandlerFunc(v1Ctrl.HandleGroupInvitationsCreate(), userMW...))
r.Get(v1Base("/groups/statistics"), chain.ToHandlerFunc(v1Ctrl.HandleGroupStatistics(), userMW...))
r.Get(v1Base("/groups/statistics/purchase-price"), chain.ToHandlerFunc(v1Ctrl.HandleGroupStatisticsPriceOverTime(), userMW...))
r.Get(v1Base("/groups/statistics/locations"), chain.ToHandlerFunc(v1Ctrl.HandleGroupStatisticsLocations(), userMW...))
r.Get(v1Base("/groups/statistics/labels"), chain.ToHandlerFunc(v1Ctrl.HandleGroupStatisticsLabels(), userMW...))
// TODO: I don't like /groups being the URL for users
a.server.Get(v1Base("/groups"), v1Ctrl.HandleGroupGet(), a.mwAuthToken)
a.server.Put(v1Base("/groups"), v1Ctrl.HandleGroupUpdate(), a.mwAuthToken)
r.Get(v1Base("/groups"), chain.ToHandlerFunc(v1Ctrl.HandleGroupGet(), userMW...))
r.Put(v1Base("/groups"), chain.ToHandlerFunc(v1Ctrl.HandleGroupUpdate(), userMW...))
a.server.Get(v1Base("/locations"), v1Ctrl.HandleLocationGetAll(), a.mwAuthToken)
a.server.Post(v1Base("/locations"), v1Ctrl.HandleLocationCreate(), a.mwAuthToken)
a.server.Get(v1Base("/locations/{id}"), v1Ctrl.HandleLocationGet(), a.mwAuthToken)
a.server.Put(v1Base("/locations/{id}"), v1Ctrl.HandleLocationUpdate(), a.mwAuthToken)
a.server.Delete(v1Base("/locations/{id}"), v1Ctrl.HandleLocationDelete(), a.mwAuthToken)
r.Post(v1Base("/actions/ensure-asset-ids"), chain.ToHandlerFunc(v1Ctrl.HandleEnsureAssetID(), userMW...))
r.Post(v1Base("/actions/zero-item-time-fields"), chain.ToHandlerFunc(v1Ctrl.HandleItemDateZeroOut(), userMW...))
r.Post(v1Base("/actions/ensure-import-refs"), chain.ToHandlerFunc(v1Ctrl.HandleEnsureImportRefs(), userMW...))
r.Post(v1Base("/actions/set-primary-photos"), chain.ToHandlerFunc(v1Ctrl.HandleSetPrimaryPhotos(), userMW...))
a.server.Get(v1Base("/labels"), v1Ctrl.HandleLabelsGetAll(), a.mwAuthToken)
a.server.Post(v1Base("/labels"), v1Ctrl.HandleLabelsCreate(), a.mwAuthToken)
a.server.Get(v1Base("/labels/{id}"), v1Ctrl.HandleLabelGet(), a.mwAuthToken)
a.server.Put(v1Base("/labels/{id}"), v1Ctrl.HandleLabelUpdate(), a.mwAuthToken)
a.server.Delete(v1Base("/labels/{id}"), v1Ctrl.HandleLabelDelete(), a.mwAuthToken)
r.Get(v1Base("/locations"), chain.ToHandlerFunc(v1Ctrl.HandleLocationGetAll(), userMW...))
r.Post(v1Base("/locations"), chain.ToHandlerFunc(v1Ctrl.HandleLocationCreate(), userMW...))
r.Get(v1Base("/locations/tree"), chain.ToHandlerFunc(v1Ctrl.HandleLocationTreeQuery(), userMW...))
r.Get(v1Base("/locations/{id}"), chain.ToHandlerFunc(v1Ctrl.HandleLocationGet(), userMW...))
r.Put(v1Base("/locations/{id}"), chain.ToHandlerFunc(v1Ctrl.HandleLocationUpdate(), userMW...))
r.Delete(v1Base("/locations/{id}"), chain.ToHandlerFunc(v1Ctrl.HandleLocationDelete(), userMW...))
a.server.Get(v1Base("/items"), v1Ctrl.HandleItemsGetAll(), a.mwAuthToken)
a.server.Post(v1Base("/items/import"), v1Ctrl.HandleItemsImport(), a.mwAuthToken)
a.server.Post(v1Base("/items"), v1Ctrl.HandleItemsCreate(), a.mwAuthToken)
a.server.Get(v1Base("/items/{id}"), v1Ctrl.HandleItemGet(), a.mwAuthToken)
a.server.Put(v1Base("/items/{id}"), v1Ctrl.HandleItemUpdate(), a.mwAuthToken)
a.server.Delete(v1Base("/items/{id}"), v1Ctrl.HandleItemDelete(), a.mwAuthToken)
r.Get(v1Base("/labels"), chain.ToHandlerFunc(v1Ctrl.HandleLabelsGetAll(), userMW...))
r.Post(v1Base("/labels"), chain.ToHandlerFunc(v1Ctrl.HandleLabelsCreate(), userMW...))
r.Get(v1Base("/labels/{id}"), chain.ToHandlerFunc(v1Ctrl.HandleLabelGet(), userMW...))
r.Put(v1Base("/labels/{id}"), chain.ToHandlerFunc(v1Ctrl.HandleLabelUpdate(), userMW...))
r.Delete(v1Base("/labels/{id}"), chain.ToHandlerFunc(v1Ctrl.HandleLabelDelete(), userMW...))
a.server.Post(v1Base("/items/{id}/attachments"), v1Ctrl.HandleItemAttachmentCreate(), a.mwAuthToken)
a.server.Get(v1Base("/items/{id}/attachments/{attachment_id}"), v1Ctrl.HandleItemAttachmentToken(), a.mwAuthToken)
a.server.Put(v1Base("/items/{id}/attachments/{attachment_id}"), v1Ctrl.HandleItemAttachmentUpdate(), a.mwAuthToken)
a.server.Delete(v1Base("/items/{id}/attachments/{attachment_id}"), v1Ctrl.HandleItemAttachmentDelete(), a.mwAuthToken)
r.Get(v1Base("/items"), chain.ToHandlerFunc(v1Ctrl.HandleItemsGetAll(), userMW...))
r.Post(v1Base("/items"), chain.ToHandlerFunc(v1Ctrl.HandleItemsCreate(), userMW...))
r.Post(v1Base("/items/import"), chain.ToHandlerFunc(v1Ctrl.HandleItemsImport(), userMW...))
r.Get(v1Base("/items/export"), chain.ToHandlerFunc(v1Ctrl.HandleItemsExport(), userMW...))
r.Get(v1Base("/items/fields"), chain.ToHandlerFunc(v1Ctrl.HandleGetAllCustomFieldNames(), userMW...))
r.Get(v1Base("/items/fields/values"), chain.ToHandlerFunc(v1Ctrl.HandleGetAllCustomFieldValues(), userMW...))
a.server.NotFound(notFoundHandler())
r.Get(v1Base("/items/{id}"), chain.ToHandlerFunc(v1Ctrl.HandleItemGet(), userMW...))
r.Get(v1Base("/items/{id}/path"), chain.ToHandlerFunc(v1Ctrl.HandleItemFullPath(), userMW...))
r.Put(v1Base("/items/{id}"), chain.ToHandlerFunc(v1Ctrl.HandleItemUpdate(), userMW...))
r.Patch(v1Base("/items/{id}"), chain.ToHandlerFunc(v1Ctrl.HandleItemPatch(), userMW...))
r.Delete(v1Base("/items/{id}"), chain.ToHandlerFunc(v1Ctrl.HandleItemDelete(), userMW...))
r.Post(v1Base("/items/{id}/attachments"), chain.ToHandlerFunc(v1Ctrl.HandleItemAttachmentCreate(), userMW...))
r.Put(v1Base("/items/{id}/attachments/{attachment_id}"), chain.ToHandlerFunc(v1Ctrl.HandleItemAttachmentUpdate(), userMW...))
r.Delete(v1Base("/items/{id}/attachments/{attachment_id}"), chain.ToHandlerFunc(v1Ctrl.HandleItemAttachmentDelete(), userMW...))
r.Get(v1Base("/items/{id}/maintenance"), chain.ToHandlerFunc(v1Ctrl.HandleMaintenanceLogGet(), userMW...))
r.Post(v1Base("/items/{id}/maintenance"), chain.ToHandlerFunc(v1Ctrl.HandleMaintenanceEntryCreate(), userMW...))
r.Put(v1Base("/items/{id}/maintenance/{entry_id}"), chain.ToHandlerFunc(v1Ctrl.HandleMaintenanceEntryUpdate(), userMW...))
r.Delete(v1Base("/items/{id}/maintenance/{entry_id}"), chain.ToHandlerFunc(v1Ctrl.HandleMaintenanceEntryDelete(), userMW...))
r.Get(v1Base("/assets/{id}"), chain.ToHandlerFunc(v1Ctrl.HandleAssetGet(), userMW...))
// Notifiers
r.Get(v1Base("/notifiers"), chain.ToHandlerFunc(v1Ctrl.HandleGetUserNotifiers(), userMW...))
r.Post(v1Base("/notifiers"), chain.ToHandlerFunc(v1Ctrl.HandleCreateNotifier(), userMW...))
r.Put(v1Base("/notifiers/{id}"), chain.ToHandlerFunc(v1Ctrl.HandleUpdateNotifier(), userMW...))
r.Delete(v1Base("/notifiers/{id}"), chain.ToHandlerFunc(v1Ctrl.HandleDeleteNotifier(), userMW...))
r.Post(v1Base("/notifiers/test"), chain.ToHandlerFunc(v1Ctrl.HandlerNotifierTest(), userMW...))
// Asset-Like endpoints
assetMW := []errchain.Middleware{
a.mwAuthToken,
a.mwRoles(RoleModeOr, authroles.RoleUser.String(), authroles.RoleAttachments.String()),
}
r.Get(
v1Base("/qrcode"),
chain.ToHandlerFunc(v1Ctrl.HandleGenerateQRCode(), assetMW...),
)
r.Get(
v1Base("/items/{id}/attachments/{attachment_id}"),
chain.ToHandlerFunc(v1Ctrl.HandleItemAttachmentGet(), assetMW...),
)
// Reporting Services
r.Get(v1Base("/reporting/bill-of-materials"), chain.ToHandlerFunc(v1Ctrl.HandleBillOfMaterialsExport(), userMW...))
r.NotFound(chain.ToHandlerFunc(notFoundHandler()))
}
func registerMimes() {
@ -123,13 +180,13 @@ func registerMimes() {
// notFoundHandler perform the main logic around handling the internal SPA embed and ensuring that
// the client side routing is handled correctly.
func notFoundHandler() server.HandlerFunc {
func notFoundHandler() errchain.HandlerFunc {
tryRead := func(fs embed.FS, prefix, requestedPath string, w http.ResponseWriter) error {
f, err := fs.Open(path.Join(prefix, requestedPath))
if err != nil {
return err
}
defer f.Close()
defer func() { _ = f.Close() }()
stat, _ := f.Stat()
if stat.IsDir() {

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -2,6 +2,7 @@ package main
import (
"context"
"fmt"
"log"
"os"
@ -39,4 +40,6 @@ func main() {
if err != nil {
log.Fatalf("failed generating migration file: %v", err)
}
fmt.Println("Migration file generated successfully.")
}

View file

@ -0,0 +1,81 @@
package main
import (
"fmt"
"os"
"regexp"
)
type ReReplace struct {
Regex *regexp.Regexp
Text string
}
func NewReReplace(regex string, replace string) ReReplace {
return ReReplace{
Regex: regexp.MustCompile(regex),
Text: replace,
}
}
func NewReDate(dateStr string) ReReplace {
return ReReplace{
Regex: regexp.MustCompile(fmt.Sprintf(`%s: string`, dateStr)),
Text: fmt.Sprintf(`%s: Date | string`, dateStr),
}
}
func main() {
if len(os.Args) != 2 {
fmt.Println("Please provide a file path as an argument")
os.Exit(1)
}
path := os.Args[1]
fmt.Printf("Processing %s\n", path)
if _, err := os.Stat(path); os.IsNotExist(err) {
fmt.Printf("File %s does not exist\n", path)
os.Exit(1)
}
text := "/* post-processed by ./scripts/process-types.go */\n"
data, err := os.ReadFile(path)
if err != nil {
fmt.Println(err)
os.Exit(1)
}
text += string(data)
replaces := [...]ReReplace{
NewReReplace(` Repo`, " "),
NewReReplace(` PaginationResultRepo`, " PaginationResult"),
NewReReplace(` Services`, " "),
NewReReplace(` V1`, " "),
NewReReplace(`\?:`, ":"),
NewReReplace(`(\w+):\s(.*null.*)`, "$1?: $2"), // make null union types optional
NewReDate("createdAt"),
NewReDate("updatedAt"),
NewReDate("soldTime"),
NewReDate("purchaseTime"),
NewReDate("warrantyExpires"),
NewReDate("expiresAt"),
NewReDate("date"),
NewReDate("completedDate"),
NewReDate("scheduledDate"),
}
for _, replace := range replaces {
fmt.Printf("Replacing '%v' -> '%s'\n", replace.Regex, replace.Text)
text = replace.Regex.ReplaceAllString(text, replace.Text)
}
err = os.WriteFile(path, []byte(text), 0644)
if err != nil {
fmt.Println(err)
os.Exit(1)
}
os.Exit(0)
}

View file

@ -1,49 +1,77 @@
module github.com/hay-kot/homebox/backend
go 1.19
go 1.23.0
toolchain go1.24.2
require (
ariga.io/atlas v0.7.2-0.20220927111110-867ee0cca56a
entgo.io/ent v0.11.3
github.com/ardanlabs/conf/v2 v2.2.0
github.com/go-chi/chi/v5 v5.0.7
github.com/go-playground/validator/v10 v10.11.1
github.com/google/uuid v1.3.0
github.com/mattn/go-sqlite3 v1.14.16
github.com/rs/zerolog v1.28.0
github.com/stretchr/testify v1.8.1
github.com/swaggo/http-swagger v1.3.3
github.com/swaggo/swag v1.8.7
golang.org/x/crypto v0.0.0-20220829220503-c86fa9a7ed90
ariga.io/atlas v0.32.0
entgo.io/ent v0.14.4
github.com/ardanlabs/conf/v3 v3.7.1
github.com/containrrr/shoutrrr v0.8.0
github.com/go-chi/chi/v5 v5.2.1
github.com/go-playground/validator/v10 v10.26.0
github.com/gocarina/gocsv v0.0.0-20240520201108-78e41c74b4b1
github.com/google/uuid v1.6.0
github.com/gorilla/schema v1.4.1
github.com/hay-kot/httpkit v0.0.11
github.com/mattn/go-sqlite3 v1.14.27
github.com/olahol/melody v1.2.1
github.com/pkg/errors v0.9.1
github.com/rs/zerolog v1.34.0
github.com/stretchr/testify v1.10.0
github.com/swaggo/http-swagger/v2 v2.0.2
github.com/swaggo/swag v1.16.4
github.com/yeqown/go-qrcode/v2 v2.2.5
github.com/yeqown/go-qrcode/writer/standard v1.2.5
golang.org/x/crypto v0.37.0
modernc.org/sqlite v1.37.0
)
require (
github.com/KyleBanks/depth v1.2.1 // indirect
github.com/agext/levenshtein v1.2.3 // indirect
github.com/apparentlymart/go-textseg/v13 v13.0.0 // indirect
github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect
github.com/bmatcuk/doublestar v1.3.4 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/go-openapi/inflect v0.19.0 // indirect
github.com/go-openapi/jsonpointer v0.19.5 // indirect
github.com/go-openapi/jsonreference v0.20.0 // indirect
github.com/go-openapi/spec v0.20.7 // indirect
github.com/go-openapi/swag v0.22.3 // indirect
github.com/go-playground/locales v0.14.0 // indirect
github.com/go-playground/universal-translator v0.18.0 // indirect
github.com/google/go-cmp v0.5.9 // indirect
github.com/hashicorp/hcl/v2 v2.14.1 // indirect
github.com/dustin/go-humanize v1.0.1 // indirect
github.com/fatih/color v1.18.0 // indirect
github.com/fogleman/gg v1.3.0 // indirect
github.com/gabriel-vasile/mimetype v1.4.8 // indirect
github.com/go-openapi/inflect v0.21.2 // indirect
github.com/go-openapi/jsonpointer v0.21.1 // indirect
github.com/go-openapi/jsonreference v0.21.0 // indirect
github.com/go-openapi/spec v0.21.0 // indirect
github.com/go-openapi/swag v0.23.1 // indirect
github.com/go-playground/locales v0.14.1 // indirect
github.com/go-playground/universal-translator v0.18.1 // indirect
github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0 // indirect
github.com/google/go-cmp v0.7.0 // indirect
github.com/gorilla/websocket v1.5.3 // indirect
github.com/hashicorp/hcl/v2 v2.23.0 // indirect
github.com/josharian/intern v1.0.0 // indirect
github.com/leodido/go-urn v1.2.1 // indirect
github.com/mailru/easyjson v0.7.7 // indirect
github.com/mattn/go-colorable v0.1.13 // indirect
github.com/mattn/go-isatty v0.0.16 // indirect
github.com/leodido/go-urn v1.4.0 // indirect
github.com/mailru/easyjson v0.9.0 // indirect
github.com/mattn/go-colorable v0.1.14 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
github.com/mitchellh/go-wordwrap v1.0.1 // indirect
github.com/ncruces/go-strftime v0.1.9 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
github.com/swaggo/files v0.0.0-20220728132757-551d4a08d97a // indirect
github.com/zclconf/go-cty v1.11.0 // indirect
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4 // indirect
golang.org/x/net v0.0.0-20220923203811-8be639271d50 // indirect
golang.org/x/sys v0.0.0-20220919091848-fb04ddd9f9c8 // indirect
golang.org/x/text v0.3.7 // indirect
golang.org/x/tools v0.1.13-0.20220804200503-81c7dc4e4efa // indirect
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
github.com/swaggo/files/v2 v2.0.2 // indirect
github.com/yeqown/reedsolomon v1.0.0 // indirect
github.com/zclconf/go-cty v1.16.2 // indirect
github.com/zclconf/go-cty-yaml v1.1.0 // indirect
golang.org/x/exp v0.0.0-20250408133849-7e4ce0ab07d0 // indirect
golang.org/x/image v0.26.0 // indirect
golang.org/x/mod v0.24.0 // indirect
golang.org/x/net v0.39.0 // indirect
golang.org/x/sync v0.13.0 // indirect
golang.org/x/sys v0.32.0 // indirect
golang.org/x/text v0.24.0 // indirect
golang.org/x/tools v0.32.0 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
modernc.org/libc v1.62.1 // indirect
modernc.org/mathutil v1.7.1 // indirect
modernc.org/memory v1.9.1 // indirect
)

View file

@ -1,151 +1,190 @@
ariga.io/atlas v0.7.2-0.20220927111110-867ee0cca56a h1:6/nt4DODfgxzHTTg3tYy7YkVzruGQGZ/kRvXpA45KUo=
ariga.io/atlas v0.7.2-0.20220927111110-867ee0cca56a/go.mod h1:ft47uSh5hWGDCmQC9DsztZg6Xk+KagM5Ts/mZYKb9JE=
entgo.io/ent v0.11.3 h1:F5FBGAWiDCGder7YT+lqMnyzXl6d0xU3xMBM/SO3CMc=
entgo.io/ent v0.11.3/go.mod h1:mvDhvynOzAsOe7anH7ynPPtMjA/eeXP96kAfweevyxc=
ariga.io/atlas v0.32.0 h1:y+77nueMrExLiKlz1CcPKh/nU7VSlWfBbwCShsJyvCw=
ariga.io/atlas v0.32.0/go.mod h1:Oe1xWPuu5q9LzyrWfbZmEZxFYeu4BHTyzfjeW2aZp/w=
entgo.io/ent v0.14.4 h1:/DhDraSLXIkBhyiVoJeSshr4ZYi7femzhj6/TckzZuI=
entgo.io/ent v0.14.4/go.mod h1:aDPE/OziPEu8+OWbzy4UlvWmD2/kbRuWfK2A40hcxJM=
github.com/DATA-DOG/go-sqlmock v1.5.0 h1:Shsta01QNfFxHCfpW6YH2STWB0MudeXXEWMr20OEh60=
github.com/DATA-DOG/go-sqlmock v1.5.0/go.mod h1:f/Ixk793poVmq4qj/V1dPUg2JEAKC73Q5eFN3EC/SaM=
github.com/KyleBanks/depth v1.2.1 h1:5h8fQADFrWtarTdtDudMmGsC7GPbOAu6RVB3ffsVFHc=
github.com/KyleBanks/depth v1.2.1/go.mod h1:jzSb9d0L43HxTQfT+oSA1EEp2q+ne2uh6XgeJcm8brE=
github.com/agext/levenshtein v1.2.3 h1:YB2fHEn0UJagG8T1rrWknE3ZQzWM06O8AMAatNn7lmo=
github.com/agext/levenshtein v1.2.3/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558=
github.com/apparentlymart/go-textseg/v13 v13.0.0 h1:Y+KvPE1NYz0xl601PVImeQfFyEy6iT90AvPUL1NNfNw=
github.com/apparentlymart/go-textseg/v13 v13.0.0/go.mod h1:ZK2fH7c4NqDTLtiYLvIkEghdlcqw7yxLeM89kiTRPUo=
github.com/ardanlabs/conf/v2 v2.2.0 h1:ar1+TYIYAh2Tdeg2DQroh7ruR56/vJR8BDfzDIrXgtk=
github.com/ardanlabs/conf/v2 v2.2.0/go.mod h1:m37ZKdW9jwMUEhGX36jRNt8VzSQ/HVmSziLZH2p33nY=
github.com/coreos/go-systemd/v22 v22.3.3-0.20220203105225-a9a7ef127534/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc=
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/apparentlymart/go-textseg/v15 v15.0.0 h1:uYvfpb3DyLSCGWnctWKGj857c6ew1u1fNQOlOtuGxQY=
github.com/apparentlymart/go-textseg/v15 v15.0.0/go.mod h1:K8XmNZdhEBkdlyDdvbmmsvpAG721bKi0joRfFdHIWJ4=
github.com/ardanlabs/conf/v3 v3.7.1 h1:GIV7ylesF/0NexhnJdLmzsi2NIVYY2wVhR0UfvpmAeQ=
github.com/ardanlabs/conf/v3 v3.7.1/go.mod h1:IIucqD+601gt3jfhMXVukxoT16LnoGVd2DzRC2GhHiA=
github.com/bmatcuk/doublestar v1.3.4 h1:gPypJ5xD31uhX6Tf54sDPUOBXTqKH4c9aPY66CyQrS0=
github.com/bmatcuk/doublestar v1.3.4/go.mod h1:wiQtGV+rzVYxB7WIlirSN++5HPtPlXEo9MEoZQC/PmE=
github.com/containrrr/shoutrrr v0.8.0 h1:mfG2ATzIS7NR2Ec6XL+xyoHzN97H8WPjir8aYzJUSec=
github.com/containrrr/shoutrrr v0.8.0/go.mod h1:ioyQAyu1LJY6sILuNyKaQaw+9Ttik5QePU8atnAdO2o=
github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/go-chi/chi/v5 v5.0.7 h1:rDTPXLDHGATaeHvVlLcR4Qe0zftYethFucbjVQ1PxU8=
github.com/go-chi/chi/v5 v5.0.7/go.mod h1:DslCQbL2OYiznFReuXYUmQ2hGd1aDpCnlMNITLSKoi8=
github.com/go-openapi/inflect v0.19.0 h1:9jCH9scKIbHeV9m12SmPilScz6krDxKRasNNSNPXu/4=
github.com/go-openapi/inflect v0.19.0/go.mod h1:lHpZVlpIQqLyKwJ4N+YSc9hchQy/i12fJykb83CRBH4=
github.com/go-openapi/jsonpointer v0.19.3/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg=
github.com/go-openapi/jsonpointer v0.19.5 h1:gZr+CIYByUqjcgeLXnQu2gHYQC9o73G2XUeOFYEICuY=
github.com/go-openapi/jsonpointer v0.19.5/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg=
github.com/go-openapi/jsonreference v0.20.0 h1:MYlu0sBgChmCfJxxUKZ8g1cPWFOB37YSZqewK7OKeyA=
github.com/go-openapi/jsonreference v0.20.0/go.mod h1:Ag74Ico3lPc+zR+qjn4XBUmXymS4zJbYVCZmcgkasdo=
github.com/go-openapi/spec v0.20.7 h1:1Rlu/ZrOCCob0n+JKKJAWhNWMPW8bOZRg8FJaY+0SKI=
github.com/go-openapi/spec v0.20.7/go.mod h1:2OpW+JddWPrpXSCIX8eOx7lZ5iyuWj3RYR6VaaBKcWA=
github.com/go-openapi/swag v0.19.5/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk=
github.com/go-openapi/swag v0.19.15/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ=
github.com/go-openapi/swag v0.22.3 h1:yMBqmnQ0gyZvEb/+KzuWZOXgllrXT4SADYbvDaXHv/g=
github.com/go-openapi/swag v0.22.3/go.mod h1:UzaqsxGiab7freDnrUUra0MwWfN/q7tE4j+VcZ0yl14=
github.com/go-playground/assert/v2 v2.0.1 h1:MsBgLAaY856+nPRTKrp3/OZK38U/wa0CcBYNjji3q3A=
github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
github.com/go-playground/locales v0.14.0 h1:u50s323jtVGugKlcYeyzC0etD1HifMjqmJqb8WugfUU=
github.com/go-playground/locales v0.14.0/go.mod h1:sawfccIbzZTqEDETgFXqTho0QybSa7l++s0DH+LDiLs=
github.com/go-playground/universal-translator v0.18.0 h1:82dyy6p4OuJq4/CByFNOn/jYrnRPArHwAcmLoJZxyho=
github.com/go-playground/universal-translator v0.18.0/go.mod h1:UvRDBj+xPUEGrFYl+lu/H90nyDXpg0fqeB/AQUGNTVA=
github.com/go-playground/validator/v10 v10.11.1 h1:prmOlTVv+YjZjmRmNSF3VmspqJIxJWXmqUsHwfTRRkQ=
github.com/go-playground/validator/v10 v10.11.1/go.mod h1:i+3WkQ1FvaUjjxh1kSvIA4dMGDBiPU55YFDl0WbKdWU=
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
github.com/fatih/color v1.18.0 h1:S8gINlzdQ840/4pfAwic/ZE0djQEH3wM94VfqLTZcOM=
github.com/fatih/color v1.18.0/go.mod h1:4FelSpRwEGDpQ12mAdzqdOukCy4u8WUtOY6lkT/6HfU=
github.com/fogleman/gg v1.3.0 h1:/7zJX8F6AaYQc57WQCyN9cAIz+4bCJGO9B+dyW29am8=
github.com/fogleman/gg v1.3.0/go.mod h1:R/bRT+9gY/C5z7JzPU0zXsXHKM4/ayA+zqcVNZzPa1k=
github.com/gabriel-vasile/mimetype v1.4.8 h1:FfZ3gj38NjllZIeJAmMhr+qKL8Wu+nOoI3GqacKw1NM=
github.com/gabriel-vasile/mimetype v1.4.8/go.mod h1:ByKUIKGjh1ODkGM1asKUbQZOLGrPjydw3hYPU2YU9t8=
github.com/go-chi/chi/v5 v5.2.1 h1:KOIHODQj58PmL80G2Eak4WdvUzjSJSm0vG72crDCqb8=
github.com/go-chi/chi/v5 v5.2.1/go.mod h1:L2yAIGWB3H+phAw1NxKwWM+7eUH/lU8pOMm5hHcoops=
github.com/go-logr/logr v1.2.3 h1:2DntVwHkVopvECVRSlL5PSo9eG+cAkDCuckLubN+rq0=
github.com/go-logr/logr v1.2.3/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
github.com/go-openapi/inflect v0.21.2 h1:0gClGlGcxifcJR56zwvhaOulnNgnhc4qTAkob5ObnSM=
github.com/go-openapi/inflect v0.21.2/go.mod h1:INezMuUu7SJQc2AyR3WO0DqqYUJSj8Kb4hBd7WtjlAw=
github.com/go-openapi/jsonpointer v0.21.1 h1:whnzv/pNXtK2FbX/W9yJfRmE2gsmkfahjMKB0fZvcic=
github.com/go-openapi/jsonpointer v0.21.1/go.mod h1:50I1STOfbY1ycR8jGz8DaMeLCdXiI6aDteEdRNNzpdk=
github.com/go-openapi/jsonreference v0.21.0 h1:Rs+Y7hSXT83Jacb7kFyjn4ijOuVGSvOdF2+tg1TRrwQ=
github.com/go-openapi/jsonreference v0.21.0/go.mod h1:LmZmgsrTkVg9LG4EaHeY8cBDslNPMo06cago5JNLkm4=
github.com/go-openapi/spec v0.21.0 h1:LTVzPc3p/RzRnkQqLRndbAzjY0d0BCL72A6j3CdL9ZY=
github.com/go-openapi/spec v0.21.0/go.mod h1:78u6VdPw81XU44qEWGhtr982gJ5BWg2c0I5XwVMotYk=
github.com/go-openapi/swag v0.23.1 h1:lpsStH0n2ittzTnbaSloVZLuB5+fvSY/+hnagBjSNZU=
github.com/go-openapi/swag v0.23.1/go.mod h1:STZs8TbRvEQQKUA+JZNAm3EWlgaOBGpyFDqQnDHMef0=
github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s=
github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA=
github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY=
github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY=
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
github.com/go-playground/validator/v10 v10.26.0 h1:SP05Nqhjcvz81uJaRfEV0YBSSSGMc/iMaVtFbr3Sw2k=
github.com/go-playground/validator/v10 v10.26.0/go.mod h1:I5QpIEbmr8On7W0TktmJAumgzX4CA1XNl4ZmDuVHKKo=
github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572 h1:tfuBGBXKqDEevZMzYi5KSi8KkcZtzBcTgAUUtapy0OI=
github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572/go.mod h1:9Pwr4B2jHnOSGXyyzV8ROjYa2ojvAY6HCGYYfMoC3Ls=
github.com/go-test/deep v1.0.3 h1:ZrJSEWsXzPOxaZnFteGEfooLba+ju3FYIbOrS+rQd68=
github.com/go-test/deep v1.0.3/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA=
github.com/gocarina/gocsv v0.0.0-20240520201108-78e41c74b4b1 h1:FWNFq4fM1wPfcK40yHE5UO3RUdSNPaBC+j3PokzA6OQ=
github.com/gocarina/gocsv v0.0.0-20240520201108-78e41c74b4b1/go.mod h1:5YoVOkjYAQumqlV356Hj3xeYh4BdZuLE0/nRkf2NKkI=
github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38=
github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I=
github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/hashicorp/hcl/v2 v2.14.1 h1:x0BpjfZ+CYdbiz+8yZTQ+gdLO7IXvOut7Da+XJayx34=
github.com/hashicorp/hcl/v2 v2.14.1/go.mod h1:e4z5nxYlWNPdDSNYX+ph14EvWYMFm3eP0zIUqPc2jr0=
github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0 h1:DACJavvAHhabrF08vX0COfcOBJRhZ8lUbR+ZWIs0Y5g=
github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0/go.mod h1:E/TSTwGwJL78qG/PmXZO1EjYhfJinVAhrmmHX6Z8B9k=
github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg=
github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
github.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e h1:ijClszYn+mADRFY17kjQEVQ1XRhq2/JR1M3sGqeJoxs=
github.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e/go.mod h1:boTsfXsheKC2y+lKOCMpSfarhxDeIzfZG1jqGcPl3cA=
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/gorilla/schema v1.4.1 h1:jUg5hUjCSDZpNGLuXQOgIWGdlgrIdYvgQ0wZtdK1M3E=
github.com/gorilla/schema v1.4.1/go.mod h1:Dg5SSm5PV60mhF2NFaTV1xuYYj8tV8NOPRo4FggUMnM=
github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg=
github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
github.com/hashicorp/hcl/v2 v2.23.0 h1:Fphj1/gCylPxHutVSEOf2fBOh1VE4AuLV7+kbJf3qos=
github.com/hashicorp/hcl/v2 v2.23.0/go.mod h1:62ZYHrXgPoX8xBnzl8QzbWq4dyDsDtfCRgIq1rbJEvA=
github.com/hay-kot/httpkit v0.0.11 h1:ZdB2uqsFBSDpfUoClGK5c5orjBjQkEVSXh7fZX5FKEk=
github.com/hay-kot/httpkit v0.0.11/go.mod h1:0kZdk5/swzdfqfg2c6pBWimcgeJ9PTyO97EbHnYl2Sw=
github.com/jarcoal/httpmock v1.3.0 h1:2RJ8GP0IIaWwcC9Fp2BmVi8Kog3v2Hn7VXM3fTd+nuc=
github.com/jarcoal/httpmock v1.3.0/go.mod h1:3yb8rc4BI7TCBhFY8ng0gjuLKJNquuDNiPaZjnENuYg=
github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY=
github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=
github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0=
github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk=
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348 h1:MtvEpTB6LX3vkb4ax0b5D2DHbNAUsen0Gx5wZoq3lV4=
github.com/leodido/go-urn v1.2.1 h1:BqpAaACuzVSgi/VLzGZIobT2z4v53pjosyNd9Yv6n/w=
github.com/leodido/go-urn v1.2.1/go.mod h1:zt4jvISO2HfUBqxjfIshjdMTYS56ZS/qv49ictyFfxY=
github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
github.com/mailru/easyjson v0.7.6/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0=
github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
github.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4=
github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=
github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ=
github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI=
github.com/mailru/easyjson v0.9.0 h1:PrnmzHw7262yW8sTBwxi1PdJA3Iw/EKBa8psRf7d9a4=
github.com/mailru/easyjson v0.9.0/go.mod h1:1+xMtQp2MRNVL/V1bOzuP3aP8VNwRW55fQUto+XFtTU=
github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94=
github.com/mattn/go-isatty v0.0.16 h1:bq3VjFmv/sOjHtdEhmkEV4x1AJtvUvOJ2PFAZ5+peKQ=
github.com/mattn/go-colorable v0.1.14 h1:9A9LHSqF/7dyVVX6g0U9cwm9pG3kP9gSzcuIPHPsaIE=
github.com/mattn/go-colorable v0.1.14/go.mod h1:6LmQG8QLFO4G5z1gPvYEzlUgJ2wF+stgPZH1UqBm1s8=
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
github.com/mattn/go-runewidth v0.0.9 h1:Lm995f3rfxdpd6TSmuVCHVb/QhupuXlYr8sCI/QdE+0=
github.com/mattn/go-sqlite3 v1.14.16 h1:yOQRA0RpS5PFz/oikGwBEqvAWhWg5ufRz4ETLjwpU1Y=
github.com/mattn/go-sqlite3 v1.14.16/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg=
github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/mattn/go-sqlite3 v1.14.27 h1:drZCnuvf37yPfs95E5jd9s3XhdVWLal+6BOK6qrv6IU=
github.com/mattn/go-sqlite3 v1.14.27/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
github.com/mitchellh/go-wordwrap v1.0.1 h1:TLuKupo69TCn6TQSyGxwI1EblZZEsQ0vMlAFQflz0v0=
github.com/mitchellh/go-wordwrap v1.0.1/go.mod h1:R62XHJLzvMFRBbcrT7m7WgmE1eOyTSsCt+hzestvNj0=
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
github.com/olekukonko/tablewriter v0.0.5 h1:P2Ga83D34wi1o9J6Wh1mRuqd4mF/x/lgBS7N7AbDhec=
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
github.com/ncruces/go-strftime v0.1.9 h1:bY0MQC28UADQmHmaF5dgpLmImcShSi2kHU9XLdhx/f4=
github.com/ncruces/go-strftime v0.1.9/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls=
github.com/olahol/melody v1.2.1 h1:xdwRkzHxf+B0w4TKbGpUSSkV516ZucQZJIWLztOWICQ=
github.com/olahol/melody v1.2.1/go.mod h1:GgkTl6Y7yWj/HtfD48Q5vLKPVoZOH+Qqgfa7CvJgJM4=
github.com/onsi/ginkgo/v2 v2.9.2 h1:BA2GMJOtfGAfagzYtrAlufIP0lq6QERkFmHLMLPwFSU=
github.com/onsi/ginkgo/v2 v2.9.2/go.mod h1:WHcJJG2dIlcCqVfBAwUCrJxSPFb6v4azBwgxeMeDuts=
github.com/onsi/gomega v1.27.6 h1:ENqfyGeS5AX/rlXDd/ETokDz93u0YufY1Pgxuy/PvWE=
github.com/onsi/gomega v1.27.6/go.mod h1:PIQNjfQwkP3aQAH7lf7j87O/5FiNr+ZR8+ipb+qQlhg=
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc=
github.com/rogpeppe/go-internal v1.8.0 h1:FCbCCtXNOY3UtUuHUYaghJg4y7Fd14rXifAYUAtL9R8=
github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE=
github.com/rs/xid v1.4.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg=
github.com/rs/zerolog v1.28.0 h1:MirSo27VyNi7RJYP3078AA1+Cyzd2GB66qy3aUHvsWY=
github.com/rs/zerolog v1.28.0/go.mod h1:NILgTygv/Uej1ra5XxGf82ZFSLk58MFGAUS2o6usyD0=
github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ=
github.com/spf13/cobra v1.5.0 h1:X+jTBEBqF0bHN+9cSMgmfuvv2VHJ9ezmFNf9Y/XstYU=
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
github.com/stretchr/testify v1.8.1 h1:w7B6lhMri9wdJUVmEZPGGhZzrYTPvgJArz7wNPgYKsk=
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
github.com/swaggo/files v0.0.0-20220728132757-551d4a08d97a h1:kAe4YSu0O0UFn1DowNo2MY5p6xzqtJ/wQ7LZynSvGaY=
github.com/swaggo/files v0.0.0-20220728132757-551d4a08d97a/go.mod h1:lKJPbtWzJ9JhsTN1k1gZgleJWY/cqq0psdoMmaThG3w=
github.com/swaggo/http-swagger v1.3.3 h1:Hu5Z0L9ssyBLofaama21iYaF2VbWyA8jdohaaCGpHsc=
github.com/swaggo/http-swagger v1.3.3/go.mod h1:sE+4PjD89IxMPm77FnkDz0sdO+p5lbXzrVWT6OTVVGo=
github.com/swaggo/swag v1.8.7 h1:2K9ivTD3teEO+2fXV6zrZKDqk5IuU2aJtBDo8U7omWU=
github.com/swaggo/swag v1.8.7/go.mod h1:ezQVUUhly8dludpVk+/PuwJWvLLanB13ygV5Pr9enSk=
github.com/zclconf/go-cty v1.11.0 h1:726SxLdi2SDnjY+BStqB9J1hNp4+2WlzyXLuimibIe0=
github.com/zclconf/go-cty v1.11.0/go.mod h1:s9IfD1LK5ccNMSWCVFCE2rJfHiZgi7JijgeWIMfhLvA=
golang.org/x/crypto v0.0.0-20211215153901-e495a2d5b3d3/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/crypto v0.0.0-20220829220503-c86fa9a7ed90 h1:Y/gsMcFOcR+6S6f3YeMKl5g+dZMEWqcz5Czj/GWYbkM=
golang.org/x/crypto v0.0.0-20220829220503-c86fa9a7ed90/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4 h1:6zppjxzCulZykYSLyVDYbneBfbaBIQPYMevg0bEwv2s=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20220923203811-8be639271d50 h1:vKyz8L3zkd+xrMeIaBsQ/MNVPVFSffdaU3ZyYlBGFnI=
golang.org/x/net v0.0.0-20220923203811-8be639271d50/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
github.com/rogpeppe/go-internal v1.11.0 h1:cWPaGQEPrBb5/AsnsZesgZZ9yb1OQ+GOISoDNXVBh4M=
github.com/rogpeppe/go-internal v1.11.0/go.mod h1:ddIwULY96R17DhadqLgMfk9H9tvdUzkipdSkR5nkCZA=
github.com/rs/xid v1.6.0/go.mod h1:7XoLgs4eV+QndskICGsho+ADou8ySMSjJKDIan90Nz0=
github.com/rs/zerolog v1.34.0 h1:k43nTLIwcTVQAncfCw4KZ2VY6ukYoZaBPNOE8txlOeY=
github.com/rs/zerolog v1.34.0/go.mod h1:bJsvje4Z08ROH4Nhs5iH600c3IkWhwp44iRc54W6wYQ=
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
github.com/swaggo/files/v2 v2.0.2 h1:Bq4tgS/yxLB/3nwOMcul5oLEUKa877Ykgz3CJMVbQKU=
github.com/swaggo/files/v2 v2.0.2/go.mod h1:TVqetIzZsO9OhHX1Am9sRf9LdrFZqoK49N37KON/jr0=
github.com/swaggo/http-swagger/v2 v2.0.2 h1:FKCdLsl+sFCx60KFsyM0rDarwiUSZ8DqbfSyIKC9OBg=
github.com/swaggo/http-swagger/v2 v2.0.2/go.mod h1:r7/GBkAWIfK6E/OLnE8fXnviHiDeAHmgIyooa4xm3AQ=
github.com/swaggo/swag v1.16.4 h1:clWJtd9LStiG3VeijiCfOVODP6VpHtKdQy9ELFG3s1A=
github.com/swaggo/swag v1.16.4/go.mod h1:VBsHJRsDvfYvqoiMKnsdwhNV9LEMHgEDZcyVYX0sxPg=
github.com/yeqown/go-qrcode/v2 v2.2.5 h1:HCOe2bSjkhZyYoyyNaXNzh4DJZll6inVJQQw+8228Zk=
github.com/yeqown/go-qrcode/v2 v2.2.5/go.mod h1:uHpt9CM0V1HeXLz+Wg5MN50/sI/fQhfkZlOM+cOTHxw=
github.com/yeqown/go-qrcode/writer/standard v1.2.5 h1:m+5BUIcbsaG2md76FIqI/oZULrAju8tsk47eOohovQ0=
github.com/yeqown/go-qrcode/writer/standard v1.2.5/go.mod h1:O4MbzsotGCvy8upYPCR91j81dr5XLT7heuljcNXW+oQ=
github.com/yeqown/reedsolomon v1.0.0 h1:x1h/Ej/uJnNu8jaX7GLHBWmZKCAWjEJTetkqaabr4B0=
github.com/yeqown/reedsolomon v1.0.0/go.mod h1:P76zpcn2TCuL0ul1Fso373qHRc69LKwAw/Iy6g1WiiM=
github.com/zclconf/go-cty v1.16.2 h1:LAJSwc3v81IRBZyUVQDUdZ7hs3SYs9jv0eZJDWHD/70=
github.com/zclconf/go-cty v1.16.2/go.mod h1:VvMs5i0vgZdhYawQNq5kePSpLAoz8u1xvZgrPIxfnZE=
github.com/zclconf/go-cty-debug v0.0.0-20240509010212-0d6042c53940 h1:4r45xpDWB6ZMSMNJFMOjqrGHynW3DIBuR2H9j0ug+Mo=
github.com/zclconf/go-cty-debug v0.0.0-20240509010212-0d6042c53940/go.mod h1:CmBdvvj3nqzfzJ6nTCIwDTPZ56aVGvDrmztiO5g3qrM=
github.com/zclconf/go-cty-yaml v1.1.0 h1:nP+jp0qPHv2IhUVqmQSzjvqAWcObN0KBkUl2rWBdig0=
github.com/zclconf/go-cty-yaml v1.1.0/go.mod h1:9YLUH4g7lOhVWqUbctnVlZ5KLpg7JAprQNgxSZ1Gyxs=
golang.org/x/crypto v0.37.0 h1:kJNSjF/Xp7kU0iB2Z+9viTPMW4EqqsrywMXLJOOsXSE=
golang.org/x/crypto v0.37.0/go.mod h1:vg+k43peMZ0pUMhYmVAWysMK35e6ioLh3wB8ZCAfbVc=
golang.org/x/exp v0.0.0-20250408133849-7e4ce0ab07d0 h1:R84qjqJb5nVJMxqWYb3np9L5ZsaDtB+a39EqjV0JSUM=
golang.org/x/exp v0.0.0-20250408133849-7e4ce0ab07d0/go.mod h1:S9Xr4PYopiDyqSyp5NjCrhFrqg6A5zA2E/iPHPhqnS8=
golang.org/x/image v0.26.0 h1:4XjIFEZWQmCZi6Wv8BoxsDhRU3RVnLX04dToTDAEPlY=
golang.org/x/image v0.26.0/go.mod h1:lcxbMFAovzpnJxzXS3nyL83K27tmqtKzIJpctK8YO5c=
golang.org/x/mod v0.24.0 h1:ZfthKaKaT4NrhGVZHO1/WDTwGES4De8KtWO0SIbNJMU=
golang.org/x/mod v0.24.0/go.mod h1:IXM97Txy2VM4PJ3gI61r1YEk/gAj6zAHN3AdZt6S9Ww=
golang.org/x/net v0.39.0 h1:ZCu7HMWDxpXpaiKdhzIfaltL9Lp31x/3fCP11bc6/fY=
golang.org/x/net v0.39.0/go.mod h1:X7NRbYVEA+ewNkCNyJ513WmMdQ3BineSwVtN2zD/d+E=
golang.org/x/sync v0.13.0 h1:AauUjRAJ9OSnvULf/ARrrVywoJDy0YS2AwQ98I37610=
golang.org/x/sync v0.13.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220919091848-fb04ddd9f9c8 h1:h+EGohizhe9XlX18rfpa8k8RAc5XyaeamM+0VHRd4lc=
golang.org/x/sys v0.0.0-20220919091848-fb04ddd9f9c8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7 h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.1.13-0.20220804200503-81c7dc4e4efa h1:uKcci2q7Qtp6nMTC/AAvfNUAldFtJuHWV9/5QWiypts=
golang.org/x/tools v0.1.13-0.20220804200503-81c7dc4e4efa/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.32.0 h1:s77OFDvIQeibCmezSnk/q6iAfkdiQaJi4VzroCFrN20=
golang.org/x/sys v0.32.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
golang.org/x/text v0.24.0 h1:dd5Bzh4yt5KYA8f9CJHCP4FB4D51c2c6JvN37xJJkJ0=
golang.org/x/text v0.24.0/go.mod h1:L8rBsPeo2pSS+xqN0d5u2ikmjtmoJbDBT1b7nHvFCdU=
golang.org/x/tools v0.32.0 h1:Q7N1vhpkQv7ybVzLFtTjvQya2ewbwNDZzUgfXGqtMWU=
golang.org/x/tools v0.32.0/go.mod h1:ZxrU41P/wAbZD8EDa6dDCa6XfpkhJ7HFMjHJXfBDu8s=
google.golang.org/protobuf v1.28.1 h1:d0NfwRgPtno5B1Wa6L2DAG+KivqkdutMf1UhdNx175w=
google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
modernc.org/cc/v4 v4.25.2 h1:T2oH7sZdGvTaie0BRNFbIYsabzCxUQg8nLqCdQ2i0ic=
modernc.org/cc/v4 v4.25.2/go.mod h1:uVtb5OGqUKpoLWhqwNQo/8LwvoiEBLvZXIQ/SmO6mL0=
modernc.org/ccgo/v4 v4.25.1 h1:TFSzPrAGmDsdnhT9X2UrcPMI3N/mJ9/X9ykKXwLhDsU=
modernc.org/ccgo/v4 v4.25.1/go.mod h1:njjuAYiPflywOOrm3B7kCB444ONP5pAVr8PIEoE0uDw=
modernc.org/fileutil v1.3.0 h1:gQ5SIzK3H9kdfai/5x41oQiKValumqNTDXMvKo62HvE=
modernc.org/fileutil v1.3.0/go.mod h1:XatxS8fZi3pS8/hKG2GH/ArUogfxjpEKs3Ku3aK4JyQ=
modernc.org/gc/v2 v2.6.5 h1:nyqdV8q46KvTpZlsw66kWqwXRHdjIlJOhG6kxiV/9xI=
modernc.org/gc/v2 v2.6.5/go.mod h1:YgIahr1ypgfe7chRuJi2gD7DBQiKSLMPgBQe9oIiito=
modernc.org/libc v1.62.1 h1:s0+fv5E3FymN8eJVmnk0llBe6rOxCu/DEU+XygRbS8s=
modernc.org/libc v1.62.1/go.mod h1:iXhATfJQLjG3NWy56a6WVU73lWOcdYVxsvwCgoPljuo=
modernc.org/mathutil v1.7.1 h1:GCZVGXdaN8gTqB1Mf/usp1Y/hSqgI2vAGGP4jZMCxOU=
modernc.org/mathutil v1.7.1/go.mod h1:4p5IwJITfppl0G4sUEDtCr4DthTaT47/N3aT6MhfgJg=
modernc.org/memory v1.9.1 h1:V/Z1solwAVmMW1yttq3nDdZPJqV1rM05Ccq6KMSZ34g=
modernc.org/memory v1.9.1/go.mod h1:/JP4VbVC+K5sU2wZi9bHoq2MAkCnrt2r98UGeSK7Mjw=
modernc.org/opt v0.1.4 h1:2kNGMRiUjrp4LcaPuLY2PzUfqM/w9N23quVwhKt5Qm8=
modernc.org/opt v0.1.4/go.mod h1:03fq9lsNfvkYSfxrfUhZCWPk1lm4cq4N+Bh//bEtgns=
modernc.org/sortutil v1.2.1 h1:+xyoGf15mM3NMlPDnFqrteY07klSFxLElE2PVuWIJ7w=
modernc.org/sortutil v1.2.1/go.mod h1:7ZI3a3REbai7gzCLcotuw9AC4VZVpYMjDzETGsSMqJE=
modernc.org/sqlite v1.37.0 h1:s1TMe7T3Q3ovQiK2Ouz4Jwh7dw4ZDqbebSDTlSJdfjI=
modernc.org/sqlite v1.37.0/go.mod h1:5YiWv+YviqGMuGw4V+PNplcyaJ5v+vQd7TQOgkACoJM=
modernc.org/strutil v1.2.1 h1:UneZBkQA+DX2Rp35KcM69cSsNES9ly8mQWD71HKlOA0=
modernc.org/strutil v1.2.1/go.mod h1:EHkiggD70koQxjVdSBM3JKM7k6L0FbGE5eymy9i3B9A=
modernc.org/token v1.1.0 h1:Xl7Ap9dKaEs5kLoOQeQmPWevfnk/DM5qcLcYlA8ys6Y=
modernc.org/token v1.1.0/go.mod h1:UGzOrNV1mAFSEB63lOFHIpNRUVMvYTc6yu1SMY/XTDM=

View file

@ -0,0 +1,104 @@
// Package currencies provides a shared definition of currencies. This uses a global
// variable to hold the currencies.
package currencies
import (
"bytes"
_ "embed"
"encoding/json"
"io"
"slices"
"strings"
"sync"
)
//go:embed currencies.json
var defaults []byte
type CollectorFunc func() ([]Currency, error)
func CollectJSON(reader io.Reader) CollectorFunc {
return func() ([]Currency, error) {
var currencies []Currency
err := json.NewDecoder(reader).Decode(&currencies)
if err != nil {
return nil, err
}
return currencies, nil
}
}
func CollectDefaults() CollectorFunc {
return CollectJSON(bytes.NewReader(defaults))
}
func CollectionCurrencies(collectors ...CollectorFunc) ([]Currency, error) {
out := make([]Currency, 0, len(collectors))
for i := range collectors {
c, err := collectors[i]()
if err != nil {
return nil, err
}
out = append(out, c...)
}
return out, nil
}
type Currency struct {
Name string `json:"name"`
Code string `json:"code"`
Local string `json:"local"`
Symbol string `json:"symbol"`
}
type CurrencyRegistry struct {
mu sync.RWMutex
registry map[string]Currency
}
func NewCurrencyService(currencies []Currency) *CurrencyRegistry {
registry := make(map[string]Currency, len(currencies))
for i := range currencies {
registry[currencies[i].Code] = currencies[i]
}
return &CurrencyRegistry{
registry: registry,
}
}
func (cs *CurrencyRegistry) Slice() []Currency {
cs.mu.RLock()
defer cs.mu.RUnlock()
out := make([]Currency, 0, len(cs.registry))
for key := range cs.registry {
out = append(out, cs.registry[key])
}
slices.SortFunc(out, func(a, b Currency) int {
if a.Name < b.Name {
return -1
}
if a.Name > b.Name {
return 1
}
return 0
})
return out
}
func (cs *CurrencyRegistry) IsSupported(code string) bool {
upper := strings.ToUpper(code)
cs.mu.RLock()
defer cs.mu.RUnlock()
_, ok := cs.registry[upper]
return ok
}

View file

@ -0,0 +1,638 @@
[
{
"code": "USD",
"local": "United States",
"symbol": "$",
"name": "United States Dollar"
},
{
"code": "AED",
"local": "United Arab Emirates",
"symbol": "د.إ",
"name": "United Arab Emirates Dirham"
},
{
"code": "AFN",
"local": "Afghanistan",
"symbol": "؋",
"name": "Afghan Afghani"
},
{
"code": "ALL",
"local": "Albania",
"symbol": "L",
"name": "Albanian Lek"
},
{
"code": "AMD",
"local": "Armenia",
"symbol": "֏",
"name": "Armenian Dram"
},
{
"code": "ANG",
"local": "Netherlands Antilles",
"symbol": "ƒ",
"name": "Netherlands Antillean Guilder"
},
{
"code": "AOA",
"local": "Angola",
"symbol": "Kz",
"name": "Angolan Kwanza"
},
{
"code": "ARS",
"local": "Argentina",
"symbol": "$",
"name": "Argentine Peso"
},
{
"code": "AUD",
"local": "Australia",
"symbol": "A$",
"name": "Australian Dollar"
},
{
"code": "AWG",
"local": "Aruba",
"symbol": "ƒ",
"name": "Aruban Florin"
},
{
"code": "AZN",
"local": "Azerbaijan",
"symbol": "₼",
"name": "Azerbaijani Manat"
},
{
"code": "BAM",
"local": "Bosnia and Herzegovina",
"symbol": "KM",
"name": "Bosnia and Herzegovina Convertible Mark"
},
{
"code": "BBD",
"local": "Barbados",
"symbol": "Bds$",
"name": "Barbadian Dollar"
},
{
"code": "BDT",
"local": "Bangladesh",
"symbol": "৳",
"name": "Bangladeshi Taka"
},
{
"code": "BGN",
"local": "Bulgaria",
"symbol": "лв",
"name": "Bulgarian lev"
},
{
"code": "BHD",
"local": "Bahrain",
"symbol": "ب.د",
"name": "Bahraini Dinar"
},
{
"code": "BIF",
"local": "Burundi",
"symbol": "FBu",
"name": "Burundian Franc"
},
{
"code": "BMD",
"local": "Bermuda",
"symbol": "BD$",
"name": "Bermudian Dollar"
},
{
"code": "BND",
"local": "Brunei",
"symbol": "B$",
"name": "Brunei Dollar"
},
{
"code": "BOB",
"local": "Bolivia",
"symbol": "Bs.",
"name": "Bolivian Boliviano"
},
{
"code": "BRL",
"local": "Brazil",
"symbol": "R$",
"name": "Brazilian Real"
},
{
"code": "BSD",
"local": "Bahamas",
"symbol": "B$",
"name": "Bahamian Dollar"
},
{
"code": "BTN",
"local": "Bhutan",
"symbol": "Nu.",
"name": "Bhutanese Ngultrum"
},
{
"code": "BWP",
"local": "Botswana",
"symbol": "P",
"name": "Botswana Pula"
},
{
"code": "BYN",
"local": "Belarus",
"symbol": "Br",
"name": "Belarusian Ruble"
},
{
"code": "BZD",
"local": "Belize",
"symbol": "BZ$",
"name": "Belize Dollar"
},
{
"code": "CAD",
"local": "Canada",
"symbol": "C$",
"name": "Canadian Dollar"
},
{
"code": "CDF",
"local": "Democratic Republic of the Congo",
"symbol": "FC",
"name": "Congolese Franc"
},
{
"code": "CHF",
"local": "Switzerland",
"symbol": "CHF",
"name": "Swiss Franc"
},
{
"code": "CLP",
"local": "Chile",
"symbol": "CL$",
"name": "Chilean Peso"
},
{
"code": "CNY",
"local": "China",
"symbol": "¥",
"name": "Chinese Yuan"
},
{
"code": "COP",
"local": "Colombia",
"symbol": "COL$",
"name": "Colombian Peso"
},
{
"code": "CRC",
"local": "Costa Rica",
"symbol": "₡",
"name": "Costa Rican Colón"
},
{
"code": "CUP",
"local": "Cuba",
"symbol": "₱",
"name": "Cuban Peso"
},
{
"code": "CVE",
"local": "Cape Verde",
"symbol": "$",
"name": "Cape Verdean Escudo"
},
{
"code": "CZK",
"local": "Czech Republic",
"symbol": "Kč",
"name": "Czech Koruna"
},
{
"code": "DJF",
"local": "Djibouti",
"symbol": "Fdj",
"name": "Djiboutian Franc"
},
{
"code": "DKK",
"local": "Denmark",
"symbol": "kr",
"name": "Danish Krone"
},
{
"code": "DOP",
"local": "Dominican Republic",
"symbol": "RD$",
"name": "Dominican Peso"
},
{
"code": "DZD",
"local": "Algeria",
"symbol": "د.ج",
"name": "Algerian Dinar"
},
{
"code": "EGP",
"local": "Egypt",
"symbol": "£",
"name": "Egyptian Pound"
},
{
"code": "ERN",
"local": "Eritrea",
"symbol": "Nfk",
"name": "Eritrean Nakfa"
},
{
"code": "ETB",
"local": "Ethiopia",
"symbol": "Br",
"name": "Ethiopian Birr"
},
{
"code": "EUR",
"local": "Eurozone",
"symbol": "€",
"name": "Euro"
},
{
"code": "FJD",
"local": "Fiji",
"symbol": "FJ$",
"name": "Fijian Dollar"
},
{
"code": "FKP",
"local": "Falkland Islands",
"symbol": "£",
"name": "Falkland Islands Pound"
},
{
"code": "FOK",
"local": "Faroe Islands",
"symbol": "kr",
"name": "Faroese Króna"
},
{
"code": "GBP",
"local": "United Kingdom",
"symbol": "£",
"name": "British Pound Sterling"
},
{
"code": "GEL",
"local": "Georgia",
"symbol": "₾",
"name": "Georgian Lari"
},
{
"code": "GGP",
"local": "Guernsey",
"symbol": "£",
"name": "Guernsey Pound"
},
{
"code": "GHS",
"local": "Ghana",
"symbol": "GH₵",
"name": "Ghanaian Cedi"
},
{
"code": "GIP",
"local": "Gibraltar",
"symbol": "£",
"name": "Gibraltar Pound"
},
{
"code": "GMD",
"local": "Gambia",
"symbol": "D",
"name": "Gambian Dalasi"
},
{
"code": "GNF",
"local": "Guinea",
"symbol": "FG",
"name": "Guinean Franc"
},
{
"code": "GTQ",
"local": "Guatemala",
"symbol": "Q",
"name": "Guatemalan Quetzal"
},
{
"code": "GYD",
"local": "Guyana",
"symbol": "GY$",
"name": "Guyanese Dollar"
},
{
"code": "HKD",
"local": "Hong Kong",
"symbol": "HK$",
"name": "Hong Kong Dollar"
},
{
"code": "HNL",
"local": "Honduras",
"symbol": "L",
"name": "Honduran Lempira"
},
{
"code": "HRK",
"local": "Croatia",
"symbol": "kn",
"name": "Croatian Kuna"
},
{
"code": "HTG",
"local": "Haiti",
"symbol": "G",
"name": "Haitian Gourde"
},
{
"code": "HUF",
"local": "Hungary",
"symbol": "Ft",
"name": "Hungarian Forint"
},
{
"code": "IDR",
"local": "Indonesia",
"symbol": "Rp",
"name": "Indonesian Rupiah"
},
{
"code": "ILS",
"local": "Israel",
"symbol": "₪",
"name": "Israeli New Shekel"
},
{
"code": "IMP",
"local": "Isle of Man",
"symbol": "£",
"name": "Manx Pound"
},
{
"code": "INR",
"local": "India",
"symbol": "₹",
"name": "Indian Rupee"
},
{
"code": "IQD",
"local": "Iraq",
"symbol": "ع.د",
"name": "Iraqi Dinar"
},
{
"code": "IRR",
"local": "Iran",
"symbol": "﷼",
"name": "Iranian Rial"
},
{
"code": "ISK",
"local": "Iceland",
"symbol": "kr",
"name": "Icelandic Króna"
},
{
"code": "JEP",
"local": "Jersey",
"symbol": "£",
"name": "Jersey Pound"
},
{
"code": "JMD",
"local": "Jamaica",
"symbol": "J$",
"name": "Jamaican Dollar"
},
{
"code": "JOD",
"local": "Jordan",
"symbol": "د.ا",
"name": "Jordanian Dinar"
},
{
"code": "JPY",
"local": "Japan",
"symbol": "¥",
"name": "Japanese Yen"
},
{
"code": "KES",
"local": "Kenya",
"symbol": "KSh",
"name": "Kenyan Shilling"
},
{
"code": "KGS",
"local": "Kyrgyzstan",
"symbol": "с",
"name": "Kyrgyzstani Som"
},
{
"code": "KHR",
"local": "Cambodia",
"symbol": "៛",
"name": "Cambodian Riel"
},
{
"code": "KID",
"local": "Kiribati",
"symbol": "$",
"name": "Kiribati Dollar"
},
{
"code": "KMF",
"local": "Comoros",
"symbol": "CF",
"name": "Comorian Franc"
},
{
"code": "KRW",
"local": "South Korea",
"symbol": "₩",
"name": "South Korean Won"
},
{
"code": "KWD",
"local": "Kuwait",
"symbol": "د.ك",
"name": "Kuwaiti Dinar"
},
{
"code": "KYD",
"local": "Cayman Islands",
"symbol": "CI$",
"name": "Cayman Islands Dollar"
},
{
"code": "KZT",
"local": "Kazakhstan",
"symbol": "₸",
"name": "Kazakhstani Tenge"
},
{
"code": "LAK",
"local": "Laos",
"symbol": "₭",
"name": "Lao Kip"
},
{
"code": "LBP",
"local": "Lebanon",
"symbol": "ل.ل",
"name": "Lebanese Pound"
},
{
"code": "LKR",
"local": "Sri Lanka",
"symbol": "₨",
"name": "Sri Lankan Rupee"
},
{
"code": "LRD",
"local": "Liberia",
"symbol": "L$",
"name": "Liberian Dollar"
},
{
"code": "LSL",
"local": "Lesotho",
"symbol": "M",
"name": "Lesotho Loti"
},
{
"code": "LYD",
"local": "Libya",
"symbol": "ل.د",
"name": "Libyan Dinar"
},
{
"code": "MAD",
"local": "Morocco",
"symbol": "د.م.",
"name": "Moroccan Dirham"
},
{
"code": "MDL",
"local": "Moldova",
"symbol": "lei",
"name": "Moldovan Leu"
},
{
"code": "MGA",
"local": "Madagascar",
"symbol": "Ar",
"name": "Malagasy Ariary"
},
{
"code": "MKD",
"local": "North Macedonia",
"symbol": "ден",
"name": "Macedonian Denar"
},
{
"code": "MMK",
"local": "Myanmar",
"symbol": "K",
"name": "Myanmar Kyat"
},
{
"code": "MNT",
"local": "Mongolia",
"symbol": "₮",
"name": "Mongolian Tugrik"
},
{
"code": "MOP",
"local": "Macau",
"symbol": "MOP$",
"name": "Macanese Pataca"
},
{
"code": "MRU",
"local": "Mauritania",
"symbol": "UM",
"name": "Mauritanian Ouguiya"
},
{
"code": "MUR",
"local": "Mauritius",
"symbol": "₨",
"name": "Mauritian Rupee"
},
{
"code": "MVR",
"local": "Maldives",
"symbol": "Rf",
"name": "Maldivian Rufiyaa"
},
{
"code": "MWK",
"local": "Malawi",
"symbol": "MK",
"name": "Malawian Kwacha"
},
{
"code": "MXN",
"local": "Mexico",
"symbol": "Mex$",
"name": "Mexican Peso"
},
{
"code": "MYR",
"local": "Malaysia",
"symbol": "RM",
"name": "Malaysian Ringgit"
},
{
"code": "MZN",
"local": "Mozambique",
"symbol": "MT",
"name": "Mozambican Metical"
},
{
"code": "NAD",
"local": "Namibia",
"symbol": "N$",
"name": "Namibian Dollar"
},
{
"code": "NGN",
"local": "Nigeria",
"symbol": "₦",
"name": "Nigerian Naira"
},
{
"code": "NIO",
"local": "Nicaragua",
"symbol": "C$",
"name": "Nicaraguan Córdoba"
},
{
"code": "NOK",
"local": "Norway",
"symbol": "kr",
"name": "Norwegian Krone"
},
{
"code": "UAH",
"local": "Ukraine",
"symbol": "₴",
"name": "Ukrainian Hryvnia"
}
]

View file

@ -1,24 +1,67 @@
// Package services provides the core business logic for the application.
package services
import "github.com/hay-kot/homebox/backend/internal/data/repo"
import (
"github.com/hay-kot/homebox/backend/internal/core/currencies"
"github.com/hay-kot/homebox/backend/internal/data/repo"
)
type AllServices struct {
User *UserService
Group *GroupService
Items *ItemService
User *UserService
Group *GroupService
Items *ItemService
BackgroundService *BackgroundService
Currencies *currencies.CurrencyRegistry
}
func New(repos *repo.AllRepos) *AllServices {
type OptionsFunc func(*options)
type options struct {
autoIncrementAssetID bool
currencies []currencies.Currency
}
func WithAutoIncrementAssetID(v bool) func(*options) {
return func(o *options) {
o.autoIncrementAssetID = v
}
}
func WithCurrencies(v []currencies.Currency) func(*options) {
return func(o *options) {
o.currencies = v
}
}
func New(repos *repo.AllRepos, opts ...OptionsFunc) *AllServices {
if repos == nil {
panic("repos cannot be nil")
}
defaultCurrencies, err := currencies.CollectionCurrencies(
currencies.CollectDefaults(),
)
if err != nil {
panic("failed to collect default currencies")
}
options := &options{
autoIncrementAssetID: true,
currencies: defaultCurrencies,
}
for _, opt := range opts {
opt(options)
}
return &AllServices{
User: &UserService{repos},
Group: &GroupService{repos},
Items: &ItemService{
repo: repos,
at: attachmentTokens{},
repo: repos,
autoIncrementAssetID: options.autoIncrementAssetID,
},
BackgroundService: &BackgroundService{repos},
Currencies: currencies.NewCurrencyService(options.currencies),
}
}

View file

@ -3,11 +3,11 @@ package services
import (
"context"
"log"
"math/rand"
"os"
"testing"
"time"
"github.com/hay-kot/homebox/backend/internal/core/currencies"
"github.com/hay-kot/homebox/backend/internal/core/services/reporting/eventbus"
"github.com/hay-kot/homebox/backend/internal/data/ent"
"github.com/hay-kot/homebox/backend/internal/data/repo"
"github.com/hay-kot/homebox/backend/pkgs/faker"
@ -15,7 +15,8 @@ import (
)
var (
fk = faker.NewFaker()
fk = faker.NewFaker()
tbus = eventbus.New()
tCtx = Context{}
tClient *ent.Client
@ -49,8 +50,6 @@ func bootstrap() {
}
func TestMain(m *testing.M) {
rand.Seed(int64(time.Now().Unix()))
client, err := ent.Open("sqlite3", "file:ent?mode=memory&cache=shared&_fk=1")
if err != nil {
log.Fatalf("failed opening connection to sqlite: %v", err)
@ -62,9 +61,14 @@ func TestMain(m *testing.M) {
}
tClient = client
tRepos = repo.New(tClient, os.TempDir()+"/homebox")
tSvc = New(tRepos)
defer client.Close()
tRepos = repo.New(tClient, tbus, os.TempDir()+"/homebox")
defaults, _ := currencies.CollectionCurrencies(
currencies.CollectDefaults(),
)
tSvc = New(tRepos, WithCurrencies(defaults))
defer func() { _ = client.Close() }()
bootstrap()
tCtx = Context{

View file

@ -0,0 +1,5 @@
HB.location,HB.name,HB.quantity,HB.description,HB.field.Custom Field 1,HB.field.Custom Field 2,HB.field.Custom Field 3
loc,Item 1,1,Description 1,Value 1[1],Value 1[2],Value 1[3]
loc,Item 2,2,Description 2,Value 2[1],Value 2[2],Value 2[3]
loc,Item 3,3,Description 3,Value 3[1],Value 3[2],Value 3[3]
1 HB.location HB.name HB.quantity HB.description HB.field.Custom Field 1 HB.field.Custom Field 2 HB.field.Custom Field 3
2 loc Item 1 1 Description 1 Value 1[1] Value 1[2] Value 1[3]
3 loc Item 2 2 Description 2 Value 2[1] Value 2[2] Value 2[3]
4 loc Item 3 3 Description 3 Value 3[1] Value 3[2] Value 3[3]

View file

@ -0,0 +1,4 @@
HB.location,HB.name,HB.quantity,HB.description
loc,Item 1,1,Description 1
loc,Item 2,2,Description 2
loc,Item 3,3,Description 3
1 HB.location HB.name HB.quantity HB.description
2 loc Item 1 1 Description 1
3 loc Item 2 2 Description 2
4 loc Item 3 3 Description 3

View file

@ -0,0 +1,4 @@
HB.name,HB.asset_id,HB.location,HB.labels
Item 1,1,Path / To / Location 1,L1 ; L2 ; L3
Item 2,000-002,Path /To/ Location 2,L1;L2;L3
Item 3,1000-003,Path / To /Location 3 , L1;L2; L3
1 HB.name HB.asset_id HB.location HB.labels
2 Item 1 1 Path / To / Location 1 L1 ; L2 ; L3
3 Item 2 000-002 Path /To/ Location 2 L1;L2;L3
4 Item 3 1000-003 Path / To /Location 3 L1;L2; L3

View file

@ -0,0 +1,42 @@
package reporting
import (
"github.com/gocarina/gocsv"
"github.com/hay-kot/homebox/backend/internal/data/repo"
"github.com/hay-kot/homebox/backend/internal/data/types"
)
// =================================================================================================
type BillOfMaterialsEntry struct {
PurchaseDate types.Date `csv:"Purchase Date"`
Name string `csv:"Name"`
Description string `csv:"Description"`
Manufacturer string `csv:"Manufacturer"`
SerialNumber string `csv:"Serial Number"`
ModelNumber string `csv:"Model Number"`
Quantity int `csv:"Quantity"`
Price float64 `csv:"Price"`
TotalPrice float64 `csv:"Total Price"`
}
// BillOfMaterialsTSV returns a byte slice of the Bill of Materials for a given GID in TSV format
// See BillOfMaterialsEntry for the format of the output
func BillOfMaterialsTSV(entities []repo.ItemOut) ([]byte, error) {
bomEntries := make([]BillOfMaterialsEntry, len(entities))
for i, entity := range entities {
bomEntries[i] = BillOfMaterialsEntry{
PurchaseDate: entity.PurchaseTime,
Name: entity.Name,
Description: entity.Description,
Manufacturer: entity.Manufacturer,
SerialNumber: entity.SerialNumber,
ModelNumber: entity.ModelNumber,
Quantity: entity.Quantity,
Price: entity.PurchasePrice,
TotalPrice: entity.PurchasePrice * float64(entity.Quantity),
}
}
return gocsv.MarshalBytes(&bomEntries)
}

View file

@ -0,0 +1,91 @@
// Package eventbus provides an interface for event bus.
package eventbus
import (
"context"
"sync"
"github.com/google/uuid"
)
type Event string
const (
EventLabelMutation Event = "label.mutation"
EventLocationMutation Event = "location.mutation"
EventItemMutation Event = "item.mutation"
)
type GroupMutationEvent struct {
GID uuid.UUID
}
type eventData struct {
event Event
data any
}
type EventBus struct {
started bool
ch chan eventData
mu sync.RWMutex
subscribers map[Event][]func(any)
}
func New() *EventBus {
return &EventBus{
ch: make(chan eventData, 100),
subscribers: map[Event][]func(any){
EventLabelMutation: {},
EventLocationMutation: {},
EventItemMutation: {},
},
}
}
func (e *EventBus) Run(ctx context.Context) error {
if e.started {
panic("event bus already started")
}
e.started = true
for {
select {
case <-ctx.Done():
return nil
case event := <-e.ch:
e.mu.RLock()
arr, ok := e.subscribers[event.event]
e.mu.RUnlock()
if !ok {
continue
}
for _, fn := range arr {
fn(event.data)
}
}
}
}
func (e *EventBus) Publish(event Event, data any) {
e.ch <- eventData{
event: event,
data: data,
}
}
func (e *EventBus) Subscribe(event Event, fn func(any)) {
e.mu.Lock()
defer e.mu.Unlock()
arr, ok := e.subscribers[event]
if !ok {
panic("event not found")
}
e.subscribers[event] = append(arr, fn)
}

View file

@ -0,0 +1,94 @@
// Package reporting provides a way to import CSV files into the database.
package reporting
import (
"bytes"
"encoding/csv"
"errors"
"io"
"strings"
)
var (
ErrNoHomeboxHeaders = errors.New("no headers found")
ErrMissingRequiredHeaders = errors.New("missing required headers `HB.location` or `HB.name`")
)
// determineSeparator determines the separator used in the CSV file
// It returns the separator as a rune and an error if it could not be determined
//
// It is assumed that the first row is the header row and that the separator is the same
// for all rows.
//
// Supported separators are `,` and `\t`
func determineSeparator(data []byte) (rune, error) {
// First row
firstRow := bytes.Split(data, []byte("\n"))[0]
// find first comma or /t
comma := bytes.IndexByte(firstRow, ',')
tab := bytes.IndexByte(firstRow, '\t')
switch {
case comma == -1 && tab == -1:
return 0, errors.New("could not determine separator")
case tab > comma:
return '\t', nil
default:
return ',', nil
}
}
// readRawCsv reads a CSV file and returns the raw data as a 2D string array
// It determines the separator used in the CSV file and returns an error if
// it could not be determined
func readRawCsv(r io.Reader) ([][]string, error) {
data, err := io.ReadAll(r)
if err != nil {
return nil, err
}
reader := csv.NewReader(bytes.NewReader(data))
// Determine separator
sep, err := determineSeparator(data)
if err != nil {
return nil, err
}
reader.Comma = sep
return reader.ReadAll()
}
// parseHeaders parses the homebox headers from the CSV file and returns a map of the headers
// and their column index as well as a list of the field headers (HB.field.*) in the order
// they appear in the CSV file
//
// It returns an error if no homebox headers are found
func parseHeaders(headers []string) (hbHeaders map[string]int, fieldHeaders []string, err error) {
hbHeaders = map[string]int{} // initialize map
for col, h := range headers {
if strings.HasPrefix(h, "HB.field.") {
fieldHeaders = append(fieldHeaders, h)
}
if strings.HasPrefix(h, "HB.") {
hbHeaders[h] = col
}
}
required := []string{"HB.location", "HB.name"}
for _, h := range required {
if _, ok := hbHeaders[h]; !ok {
return nil, nil, ErrMissingRequiredHeaders
}
}
if len(hbHeaders) == 0 {
return nil, nil, ErrNoHomeboxHeaders
}
return hbHeaders, fieldHeaders, nil
}

View file

@ -0,0 +1,95 @@
package reporting
import (
"strings"
"github.com/hay-kot/homebox/backend/internal/data/repo"
"github.com/hay-kot/homebox/backend/internal/data/types"
)
type ExportItemFields struct {
Name string
Value string
}
type ExportTSVRow struct {
ImportRef string `csv:"HB.import_ref"`
Location LocationString `csv:"HB.location"`
LabelStr LabelString `csv:"HB.labels"`
AssetID repo.AssetID `csv:"HB.asset_id"`
Archived bool `csv:"HB.archived"`
Name string `csv:"HB.name"`
Quantity int `csv:"HB.quantity"`
Description string `csv:"HB.description"`
Insured bool `csv:"HB.insured"`
Notes string `csv:"HB.notes"`
PurchasePrice float64 `csv:"HB.purchase_price"`
PurchaseFrom string `csv:"HB.purchase_from"`
PurchaseTime types.Date `csv:"HB.purchase_time"`
Manufacturer string `csv:"HB.manufacturer"`
ModelNumber string `csv:"HB.model_number"`
SerialNumber string `csv:"HB.serial_number"`
LifetimeWarranty bool `csv:"HB.lifetime_warranty"`
WarrantyExpires types.Date `csv:"HB.warranty_expires"`
WarrantyDetails string `csv:"HB.warranty_details"`
SoldTo string `csv:"HB.sold_to"`
SoldPrice float64 `csv:"HB.sold_price"`
SoldTime types.Date `csv:"HB.sold_time"`
SoldNotes string `csv:"HB.sold_notes"`
Fields []ExportItemFields `csv:"-"`
}
// ============================================================================
// LabelString is a string slice that is used to represent a list of labels.
//
// For example, a list of labels "Important; Work" would be represented as a
// LabelString with the following values:
//
// LabelString{"Important", "Work"}
type LabelString []string
func parseLabelString(s string) LabelString {
v, _ := parseSeparatedString(s, ";")
return v
}
func (ls LabelString) String() string {
return strings.Join(ls, "; ")
}
// ============================================================================
// LocationString is a string slice that is used to represent a location
// hierarchy.
//
// For example, a location hierarchy of "Home / Bedroom / Desk" would be
// represented as a LocationString with the following values:
//
// LocationString{"Home", "Bedroom", "Desk"}
type LocationString []string
func parseLocationString(s string) LocationString {
v, _ := parseSeparatedString(s, "/")
return v
}
func (csf LocationString) String() string {
return strings.Join(csf, " / ")
}
func fromPathSlice(s []repo.ItemPath) LocationString {
v := make(LocationString, len(s))
for i := range s {
v[i] = s[i].Name
}
return v
}

View file

@ -0,0 +1,322 @@
package reporting
import (
"context"
"fmt"
"io"
"reflect"
"sort"
"strconv"
"strings"
"github.com/google/uuid"
"github.com/hay-kot/homebox/backend/internal/data/repo"
"github.com/hay-kot/homebox/backend/internal/data/types"
"github.com/rs/zerolog/log"
)
// IOSheet is the representation of a CSV/TSV sheet that is used for importing/exporting
// items from homebox. It is used to read/write the data from/to a CSV/TSV file given
// the standard format of the file.
//
// See ExportTSVRow for the format of the data in the sheet.
type IOSheet struct {
headers []string
custom []int
index map[string]int
Rows []ExportTSVRow
}
func (s *IOSheet) indexHeaders() {
s.index = make(map[string]int)
for i, h := range s.headers {
if strings.HasPrefix(h, "HB.field") {
s.custom = append(s.custom, i)
}
if strings.HasPrefix(h, "HB.") {
s.index[h] = i
}
}
}
func (s *IOSheet) GetColumn(str string) (col int, ok bool) {
if s.index == nil {
s.indexHeaders()
}
col, ok = s.index[str]
return
}
// Read reads a CSV/TSV and populates the "Rows" field with the data from the sheet
// Custom Fields are supported via the `HB.field.*` headers. The `HB.field.*` the "Name"
// of the field is the part after the `HB.field.` prefix. Additionally, Custom Fields with
// no value are excluded from the row.Fields slice, this includes empty strings.
//
// Note That
// - the first row is assumed to be the header
// - at least 1 row of data is required
// - rows and columns must be rectangular (i.e. all rows must have the same number of columns)
func (s *IOSheet) Read(data io.Reader) error {
sheet, err := readRawCsv(data)
if err != nil {
return err
}
if len(sheet) < 2 {
return fmt.Errorf("sheet must have at least 1 row of data (header + 1)")
}
s.headers = sheet[0]
s.Rows = make([]ExportTSVRow, len(sheet)-1)
for i, row := range sheet[1:] {
if len(row) != len(s.headers) {
return fmt.Errorf("row has %d columns, expected %d", len(row), len(s.headers))
}
rowData := ExportTSVRow{}
st := reflect.TypeOf(ExportTSVRow{})
for i := 0; i < st.NumField(); i++ {
field := st.Field(i)
tag := field.Tag.Get("csv")
if tag == "" || tag == "-" {
continue
}
col, ok := s.GetColumn(tag)
if !ok {
continue
}
val := row[col]
var v interface{}
switch field.Type {
case reflect.TypeOf(""):
v = val
case reflect.TypeOf(int(0)):
v = parseInt(val)
case reflect.TypeOf(bool(false)):
v = parseBool(val)
case reflect.TypeOf(float64(0)):
v = parseFloat(val)
// Custom Types
case reflect.TypeOf(types.Date{}):
v = types.DateFromString(val)
case reflect.TypeOf(repo.AssetID(0)):
v, _ = repo.ParseAssetID(val)
case reflect.TypeOf(LocationString{}):
v = parseLocationString(val)
case reflect.TypeOf(LabelString{}):
v = parseLabelString(val)
}
log.Debug().
Str("tag", tag).
Interface("val", v).
Str("type", fmt.Sprintf("%T", v)).
Msg("parsed value")
// Nil values are not allowed at the moment. This may change.
if v == nil {
return fmt.Errorf("could not convert %q to %s", val, field.Type)
}
ptrField := reflect.ValueOf(&rowData).Elem().Field(i)
ptrField.Set(reflect.ValueOf(v))
}
for _, col := range s.custom {
colName := strings.TrimPrefix(s.headers[col], "HB.field.")
customVal := row[col]
if customVal == "" {
continue
}
rowData.Fields = append(rowData.Fields, ExportItemFields{
Name: colName,
Value: customVal,
})
}
s.Rows[i] = rowData
}
return nil
}
// ReadItems writes the sheet to a writer.
func (s *IOSheet) ReadItems(ctx context.Context, items []repo.ItemOut, GID uuid.UUID, repos *repo.AllRepos) error {
s.Rows = make([]ExportTSVRow, len(items))
extraHeaders := map[string]struct{}{}
for i := range items {
item := items[i]
// TODO: Support fetching nested locations
locID := item.Location.ID
locPaths, err := repos.Locations.PathForLoc(context.Background(), GID, locID)
if err != nil {
log.Error().Err(err).Msg("could not get location path")
return err
}
locString := fromPathSlice(locPaths)
labelString := make([]string, len(item.Labels))
for i, l := range item.Labels {
labelString[i] = l.Name
}
customFields := make([]ExportItemFields, len(item.Fields))
for i, f := range item.Fields {
extraHeaders[f.Name] = struct{}{}
customFields[i] = ExportItemFields{
Name: f.Name,
Value: f.TextValue,
}
}
s.Rows[i] = ExportTSVRow{
// fill struct
Location: locString,
LabelStr: labelString,
ImportRef: item.ImportRef,
AssetID: item.AssetID,
Name: item.Name,
Quantity: item.Quantity,
Description: item.Description,
Insured: item.Insured,
Archived: item.Archived,
PurchasePrice: item.PurchasePrice,
PurchaseFrom: item.PurchaseFrom,
PurchaseTime: item.PurchaseTime,
Manufacturer: item.Manufacturer,
ModelNumber: item.ModelNumber,
SerialNumber: item.SerialNumber,
LifetimeWarranty: item.LifetimeWarranty,
WarrantyExpires: item.WarrantyExpires,
WarrantyDetails: item.WarrantyDetails,
SoldTo: item.SoldTo,
SoldTime: item.SoldTime,
SoldPrice: item.SoldPrice,
SoldNotes: item.SoldNotes,
Fields: customFields,
}
}
// Extract and sort additional headers for deterministic output
customHeaders := make([]string, 0, len(extraHeaders))
for k := range extraHeaders {
customHeaders = append(customHeaders, k)
}
sort.Strings(customHeaders)
st := reflect.TypeOf(ExportTSVRow{})
// Write headers
for i := 0; i < st.NumField(); i++ {
field := st.Field(i)
tag := field.Tag.Get("csv")
if tag == "" || tag == "-" {
continue
}
s.headers = append(s.headers, tag)
}
for _, h := range customHeaders {
s.headers = append(s.headers, "HB.field."+h)
}
return nil
}
// TSV writes the current sheet to a writer in TSV format.
func (s *IOSheet) TSV() ([][]string, error) {
memcsv := make([][]string, len(s.Rows)+1)
memcsv[0] = s.headers
// use struct tags in rows to dertmine column order
for i, row := range s.Rows {
rowIdx := i + 1
memcsv[rowIdx] = make([]string, len(s.headers))
st := reflect.TypeOf(row)
for i := 0; i < st.NumField(); i++ {
field := st.Field(i)
tag := field.Tag.Get("csv")
if tag == "" || tag == "-" {
continue
}
col, ok := s.GetColumn(tag)
if !ok {
continue
}
val := reflect.ValueOf(row).Field(i)
var v string
switch field.Type {
case reflect.TypeOf(""):
v = val.String()
case reflect.TypeOf(int(0)):
v = strconv.Itoa(int(val.Int()))
case reflect.TypeOf(bool(false)):
v = strconv.FormatBool(val.Bool())
case reflect.TypeOf(float64(0)):
v = strconv.FormatFloat(val.Float(), 'f', -1, 64)
// Custom Types
case reflect.TypeOf(types.Date{}):
v = val.Interface().(types.Date).String()
case reflect.TypeOf(repo.AssetID(0)):
v = val.Interface().(repo.AssetID).String()
case reflect.TypeOf(LocationString{}):
v = val.Interface().(LocationString).String()
case reflect.TypeOf(LabelString{}):
v = val.Interface().(LabelString).String()
default:
log.Debug().Str("type", field.Type.String()).Msg("unknown type")
}
memcsv[rowIdx][col] = v
}
for _, f := range row.Fields {
col, ok := s.GetColumn("HB.field." + f.Name)
if !ok {
continue
}
memcsv[i+1][col] = f.Value
}
}
return memcsv, nil
}

View file

@ -0,0 +1,221 @@
package reporting
import (
"bytes"
"reflect"
"testing"
_ "embed"
"github.com/hay-kot/homebox/backend/internal/data/repo"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
var (
//go:embed .testdata/import/minimal.csv
minimalImportCSV []byte
//go:embed .testdata/import/fields.csv
customFieldImportCSV []byte
//go:embed .testdata/import/types.csv
customTypesImportCSV []byte
)
func TestSheet_Read(t *testing.T) {
tests := []struct {
name string
data []byte
want []ExportTSVRow
wantErr bool
}{
{
name: "minimal import",
data: minimalImportCSV,
want: []ExportTSVRow{
{Location: LocationString{"loc"}, Name: "Item 1", Quantity: 1, Description: "Description 1"},
{Location: LocationString{"loc"}, Name: "Item 2", Quantity: 2, Description: "Description 2"},
{Location: LocationString{"loc"}, Name: "Item 3", Quantity: 3, Description: "Description 3"},
},
},
{
name: "custom field import",
data: customFieldImportCSV,
want: []ExportTSVRow{
{
Location: LocationString{"loc"}, Name: "Item 1", Quantity: 1, Description: "Description 1",
Fields: []ExportItemFields{
{Name: "Custom Field 1", Value: "Value 1[1]"},
{Name: "Custom Field 2", Value: "Value 1[2]"},
{Name: "Custom Field 3", Value: "Value 1[3]"},
},
},
{
Location: LocationString{"loc"}, Name: "Item 2", Quantity: 2, Description: "Description 2",
Fields: []ExportItemFields{
{Name: "Custom Field 1", Value: "Value 2[1]"},
{Name: "Custom Field 2", Value: "Value 2[2]"},
{Name: "Custom Field 3", Value: "Value 2[3]"},
},
},
{
Location: LocationString{"loc"}, Name: "Item 3", Quantity: 3, Description: "Description 3",
Fields: []ExportItemFields{
{Name: "Custom Field 1", Value: "Value 3[1]"},
{Name: "Custom Field 2", Value: "Value 3[2]"},
{Name: "Custom Field 3", Value: "Value 3[3]"},
},
},
},
},
{
name: "custom types import",
data: customTypesImportCSV,
want: []ExportTSVRow{
{
Name: "Item 1",
AssetID: repo.AssetID(1),
Location: LocationString{"Path", "To", "Location 1"},
LabelStr: LabelString{"L1", "L2", "L3"},
},
{
Name: "Item 2",
AssetID: repo.AssetID(2),
Location: LocationString{"Path", "To", "Location 2"},
LabelStr: LabelString{"L1", "L2", "L3"},
},
{
Name: "Item 3",
AssetID: repo.AssetID(1000003),
Location: LocationString{"Path", "To", "Location 3"},
LabelStr: LabelString{"L1", "L2", "L3"},
},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
reader := bytes.NewReader(tt.data)
sheet := &IOSheet{}
err := sheet.Read(reader)
switch {
case tt.wantErr:
require.Error(t, err)
default:
require.NoError(t, err)
assert.ElementsMatch(t, tt.want, sheet.Rows)
}
})
}
}
func Test_parseHeaders(t *testing.T) {
tests := []struct {
name string
rawHeaders []string
wantHbHeaders map[string]int
wantFieldHeaders []string
wantErr bool
}{
{
name: "no hombox headers",
rawHeaders: []string{"Header 1", "Header 2", "Header 3"},
wantHbHeaders: nil,
wantFieldHeaders: nil,
wantErr: true,
},
{
name: "field headers only",
rawHeaders: []string{"HB.location", "HB.name", "HB.field.1", "HB.field.2", "HB.field.3"},
wantHbHeaders: map[string]int{
"HB.location": 0,
"HB.name": 1,
"HB.field.1": 2,
"HB.field.2": 3,
"HB.field.3": 4,
},
wantFieldHeaders: []string{"HB.field.1", "HB.field.2", "HB.field.3"},
wantErr: false,
},
{
name: "mixed headers",
rawHeaders: []string{"Header 1", "HB.name", "Header 2", "HB.field.2", "Header 3", "HB.field.3", "HB.location"},
wantHbHeaders: map[string]int{
"HB.name": 1,
"HB.field.2": 3,
"HB.field.3": 5,
"HB.location": 6,
},
wantFieldHeaders: []string{"HB.field.2", "HB.field.3"},
wantErr: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
gotHbHeaders, gotFieldHeaders, err := parseHeaders(tt.rawHeaders)
if (err != nil) != tt.wantErr {
t.Errorf("parseHeaders() error = %v, wantErr %v", err, tt.wantErr)
return
}
if !reflect.DeepEqual(gotHbHeaders, tt.wantHbHeaders) {
t.Errorf("parseHeaders() gotHbHeaders = %v, want %v", gotHbHeaders, tt.wantHbHeaders)
}
if !reflect.DeepEqual(gotFieldHeaders, tt.wantFieldHeaders) {
t.Errorf("parseHeaders() gotFieldHeaders = %v, want %v", gotFieldHeaders, tt.wantFieldHeaders)
}
})
}
}
func Test_determineSeparator(t *testing.T) {
type args struct {
data []byte
}
tests := []struct {
name string
args args
want rune
wantErr bool
}{
{
name: "comma",
args: args{
data: []byte("a,b,c"),
},
want: ',',
wantErr: false,
},
{
name: "tab",
args: args{
data: []byte("a\tb\tc"),
},
want: '\t',
wantErr: false,
},
{
name: "invalid",
args: args{
data: []byte("a;b;c"),
},
want: 0,
wantErr: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got, err := determineSeparator(tt.args.data)
if (err != nil) != tt.wantErr {
t.Errorf("determineSeparator() error = %v, wantErr %v", err, tt.wantErr)
return
}
if got != tt.want {
t.Errorf("determineSeparator() = %v, want %v", got, tt.want)
}
})
}
}

View file

@ -0,0 +1,38 @@
package reporting
import (
"strconv"
"strings"
)
func parseSeparatedString(s string, sep string) ([]string, error) {
list := strings.Split(s, sep)
csf := make([]string, 0, len(list))
for _, s := range list {
trimmed := strings.TrimSpace(s)
if trimmed != "" {
csf = append(csf, trimmed)
}
}
return csf, nil
}
func parseFloat(s string) float64 {
if s == "" {
return 0
}
f, _ := strconv.ParseFloat(s, 64)
return f
}
func parseBool(s string) bool {
b, _ := strconv.ParseBool(s)
return b
}
func parseInt(s string) int {
i, _ := strconv.Atoi(s)
return i
}

View file

@ -0,0 +1,65 @@
package reporting
import (
"reflect"
"testing"
)
func Test_parseSeparatedString(t *testing.T) {
type args struct {
s string
sep string
}
tests := []struct {
name string
args args
want []string
wantErr bool
}{
{
name: "comma",
args: args{
s: "a,b,c",
sep: ",",
},
want: []string{"a", "b", "c"},
wantErr: false,
},
{
name: "trimmed comma",
args: args{
s: "a, b, c",
sep: ",",
},
want: []string{"a", "b", "c"},
},
{
name: "excessive whitespace",
args: args{
s: " a, b, c ",
sep: ",",
},
want: []string{"a", "b", "c"},
},
{
name: "empty",
args: args{
s: "",
sep: ",",
},
want: []string{},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got, err := parseSeparatedString(tt.args.s, tt.args.sep)
if (err != nil) != tt.wantErr {
t.Errorf("parseSeparatedString() error = %v, wantErr %v", err, tt.wantErr)
return
}
if !reflect.DeepEqual(got, tt.want) {
t.Errorf("parseSeparatedString() = %v, want %v", got, tt.want)
}
})
}
}

View file

@ -0,0 +1,81 @@
package services
import (
"context"
"strings"
"time"
"github.com/containrrr/shoutrrr"
"github.com/hay-kot/homebox/backend/internal/data/repo"
"github.com/hay-kot/homebox/backend/internal/data/types"
"github.com/rs/zerolog/log"
)
type BackgroundService struct {
repos *repo.AllRepos
}
func (svc *BackgroundService) SendNotifiersToday(ctx context.Context) error {
// Get All Groups
groups, err := svc.repos.Groups.GetAllGroups(ctx)
if err != nil {
return err
}
today := types.DateFromTime(time.Now())
for i := range groups {
group := groups[i]
entries, err := svc.repos.MaintEntry.GetScheduled(ctx, group.ID, today)
if err != nil {
return err
}
if len(entries) == 0 {
log.Debug().
Str("group_name", group.Name).
Str("group_id", group.ID.String()).
Msg("No scheduled maintenance for today")
continue
}
notifiers, err := svc.repos.Notifiers.GetByGroup(ctx, group.ID)
if err != nil {
return err
}
urls := make([]string, len(notifiers))
for i := range notifiers {
urls[i] = notifiers[i].URL
}
bldr := strings.Builder{}
bldr.WriteString("Homebox Maintenance for (")
bldr.WriteString(today.String())
bldr.WriteString("):\n")
for i := range entries {
entry := entries[i]
bldr.WriteString(" - ")
bldr.WriteString(entry.Name)
bldr.WriteString("\n")
}
var sendErrs []error
for i := range urls {
err := shoutrrr.Send(urls[i], bldr.String())
if err != nil {
sendErrs = append(sendErrs, err)
}
}
if len(sendErrs) > 0 {
return sendErrs[0]
}
}
return nil
}

View file

@ -3,10 +3,13 @@ package services
import (
"context"
"errors"
"fmt"
"io"
"strings"
"github.com/google/uuid"
"github.com/hay-kot/homebox/backend/internal/core/services/reporting"
"github.com/hay-kot/homebox/backend/internal/data/repo"
"github.com/rs/zerolog/log"
)
var (
@ -18,179 +21,335 @@ type ItemService struct {
repo *repo.AllRepos
filepath string
// at is a map of tokens to attachment IDs. This is used to store the attachment ID
// for issued URLs
at attachmentTokens
autoIncrementAssetID bool
}
func (svc *ItemService) CsvImport(ctx context.Context, gid uuid.UUID, data [][]string) (int, error) {
loaded := []csvRow{}
// Skip first row
for _, row := range data[1:] {
// Skip empty rows
if len(row) == 0 {
continue
func (svc *ItemService) Create(ctx Context, item repo.ItemCreate) (repo.ItemOut, error) {
if svc.autoIncrementAssetID {
highest, err := svc.repo.Items.GetHighestAssetID(ctx, ctx.GID)
if err != nil {
return repo.ItemOut{}, err
}
if len(row) != NumOfCols {
return 0, ErrInvalidCsv
}
r := newCsvRow(row)
loaded = append(loaded, r)
item.AssetID = highest + 1
}
// validate rows
var errMap = map[int][]error{}
var hasErr bool
for i, r := range loaded {
return svc.repo.Items.Create(ctx, ctx.GID, item)
}
errs := r.validate()
if len(errs) > 0 {
hasErr = true
lineNum := i + 2
errMap[lineNum] = errs
}
}
if hasErr {
for lineNum, errs := range errMap {
for _, err := range errs {
log.Error().Err(err).Int("line", lineNum).Msg("csv import error")
}
}
}
// Bootstrap the locations and labels so we can reuse the created IDs for the items
locations := map[string]uuid.UUID{}
existingLocation, err := svc.repo.Locations.GetAll(ctx, gid)
func (svc *ItemService) EnsureAssetID(ctx context.Context, GID uuid.UUID) (int, error) {
items, err := svc.repo.Items.GetAllZeroAssetID(ctx, GID)
if err != nil {
return 0, err
}
for _, loc := range existingLocation {
locations[loc.Name] = loc.ID
}
labels := map[string]uuid.UUID{}
existingLabels, err := svc.repo.Labels.GetAll(ctx, gid)
highest, err := svc.repo.Items.GetHighestAssetID(ctx, GID)
if err != nil {
return 0, err
}
for _, label := range existingLabels {
labels[label.Name] = label.ID
}
for _, row := range loaded {
finished := 0
for _, item := range items {
highest++
// Locations
if _, exists := locations[row.Location]; !exists {
result, err := svc.repo.Locations.Create(ctx, gid, repo.LocationCreate{
Name: row.Location,
Description: "",
})
if err != nil {
return 0, err
}
locations[row.Location] = result.ID
err = svc.repo.Items.SetAssetID(ctx, GID, item.ID, highest)
if err != nil {
return 0, err
}
// Labels
finished++
}
for _, label := range row.getLabels() {
if _, exists := labels[label]; exists {
continue
}
result, err := svc.repo.Labels.Create(ctx, gid, repo.LabelCreate{
Name: label,
Description: "",
})
if err != nil {
return 0, err
}
labels[label] = result.ID
return finished, nil
}
func (svc *ItemService) EnsureImportRef(ctx context.Context, GID uuid.UUID) (int, error) {
ids, err := svc.repo.Items.GetAllZeroImportRef(ctx, GID)
if err != nil {
return 0, err
}
finished := 0
for _, itemID := range ids {
ref := uuid.New().String()[0:8]
err = svc.repo.Items.Patch(ctx, GID, itemID, repo.ItemPatch{ImportRef: &ref})
if err != nil {
return 0, err
}
finished++
}
return finished, nil
}
func serializeLocation[T ~[]string](location T) string {
return strings.Join(location, "/")
}
// CsvImport imports items from a CSV file. using the standard defined format.
//
// CsvImport applies the following rules/operations
//
// 1. If the item does not exist, it is created.
// 2. If the item has a ImportRef and it exists it is skipped
// 3. Locations and Labels are created if they do not exist.
func (svc *ItemService) CsvImport(ctx context.Context, GID uuid.UUID, data io.Reader) (int, error) {
sheet := reporting.IOSheet{}
err := sheet.Read(data)
if err != nil {
return 0, err
}
// ========================================
// Labels
labelMap := make(map[string]uuid.UUID)
{
labels, err := svc.repo.Labels.GetAll(ctx, GID)
if err != nil {
return 0, err
}
for _, label := range labels {
labelMap[label.Name] = label.ID
}
}
// Create the items
var count int
for _, row := range loaded {
// Check Import Ref
if row.Item.ImportRef != "" {
exists, err := svc.repo.Items.CheckRef(ctx, gid, row.Item.ImportRef)
// ========================================
// Locations
locationMap := make(map[string]uuid.UUID)
{
locations, err := svc.repo.Locations.Tree(ctx, GID, repo.TreeQuery{WithItems: false})
if err != nil {
return 0, err
}
// Traverse the tree and build a map of location full paths to IDs
// where the full path is the location name joined by slashes.
var traverse func(location *repo.TreeItem, path []string)
traverse = func(location *repo.TreeItem, path []string) {
path = append(path, location.Name)
locationMap[serializeLocation(path)] = location.ID
for _, child := range location.Children {
traverse(child, path)
}
}
for _, location := range locations {
traverse(&location, []string{})
}
}
// ========================================
// Import items
// Asset ID Pre-Check
highestAID := repo.AssetID(-1)
if svc.autoIncrementAssetID {
highestAID, err = svc.repo.Items.GetHighestAssetID(ctx, GID)
if err != nil {
return 0, err
}
}
finished := 0
for i := range sheet.Rows {
row := sheet.Rows[i]
createRequired := true
// ========================================
// Preflight check for existing item
if row.ImportRef != "" {
exists, err := svc.repo.Items.CheckRef(ctx, GID, row.ImportRef)
if err != nil {
return 0, fmt.Errorf("error checking for existing item with ref %q: %w", row.ImportRef, err)
}
if exists {
continue
createRequired = false
}
}
// ========================================
// Pre-Create Labels as necessary
labelIds := make([]uuid.UUID, len(row.LabelStr))
for j := range row.LabelStr {
label := row.LabelStr[j]
id, ok := labelMap[label]
if !ok {
newLabel, err := svc.repo.Labels.Create(ctx, GID, repo.LabelCreate{Name: label})
if err != nil {
return 0, err
}
id = newLabel.ID
}
labelIds[j] = id
labelMap[label] = id
}
// ========================================
// Pre-Create Locations as necessary
path := serializeLocation(row.Location)
locationID, ok := locationMap[path]
if !ok { // Traverse the path of LocationStr and check each path element to see if it exists already, if not create it.
paths := []string{}
for i, pathElement := range row.Location {
paths = append(paths, pathElement)
path := serializeLocation(paths)
locationID, ok = locationMap[path]
if !ok {
parentID := uuid.Nil
// Get the parent ID
if i > 0 {
parentPath := serializeLocation(row.Location[:i])
parentID = locationMap[parentPath]
}
newLocation, err := svc.repo.Locations.Create(ctx, GID, repo.LocationCreate{
ParentID: parentID,
Name: pathElement,
})
if err != nil {
return 0, err
}
locationID = newLocation.ID
}
locationMap[path] = locationID
}
locationID, ok = locationMap[path]
if !ok {
return 0, errors.New("failed to create location")
}
}
var effAID repo.AssetID
if svc.autoIncrementAssetID && row.AssetID.Nil() {
effAID = highestAID + 1
highestAID++
} else {
effAID = row.AssetID
}
// ========================================
// Create Item
var item repo.ItemOut
switch {
case createRequired:
newItem := repo.ItemCreate{
ImportRef: row.ImportRef,
Name: row.Name,
Description: row.Description,
AssetID: effAID,
LocationID: locationID,
LabelIDs: labelIds,
}
item, err = svc.repo.Items.Create(ctx, GID, newItem)
if err != nil {
log.Err(err).Msg("error checking import ref")
return 0, err
}
default:
item, err = svc.repo.Items.GetByRef(ctx, GID, row.ImportRef)
if err != nil {
return 0, err
}
}
locationID := locations[row.Location]
labelIDs := []uuid.UUID{}
for _, label := range row.getLabels() {
labelIDs = append(labelIDs, labels[label])
if item.ID == uuid.Nil {
panic("item ID is nil on import - this should never happen")
}
log.Info().
Str("name", row.Item.Name).
Str("location", row.Location).
Msgf("Creating Item: %s", row.Item.Name)
result, err := svc.repo.Items.Create(ctx, gid, repo.ItemCreate{
ImportRef: row.Item.ImportRef,
Name: row.Item.Name,
Description: row.Item.Description,
LabelIDs: labelIDs,
LocationID: locationID,
})
if err != nil {
return count, err
fields := make([]repo.ItemField, len(row.Fields))
for i := range row.Fields {
fields[i] = repo.ItemField{
Name: row.Fields[i].Name,
Type: "text",
TextValue: row.Fields[i].Value,
}
}
// Update the item with the rest of the data
_, err = svc.repo.Items.UpdateByGroup(ctx, gid, repo.ItemUpdate{
// Edges
updateItem := repo.ItemUpdate{
ID: item.ID,
LabelIDs: labelIds,
LocationID: locationID,
LabelIDs: labelIDs,
// General Fields
ID: result.ID,
Name: result.Name,
Description: result.Description,
Insured: row.Item.Insured,
Notes: row.Item.Notes,
Quantity: row.Item.Quantity,
Name: row.Name,
Description: row.Description,
AssetID: effAID,
Insured: row.Insured,
Quantity: row.Quantity,
Archived: row.Archived,
// Identifies the item as imported
SerialNumber: row.Item.SerialNumber,
ModelNumber: row.Item.ModelNumber,
Manufacturer: row.Item.Manufacturer,
PurchasePrice: row.PurchasePrice,
PurchaseFrom: row.PurchaseFrom,
PurchaseTime: row.PurchaseTime,
// Purchase
PurchaseFrom: row.Item.PurchaseFrom,
PurchasePrice: row.Item.PurchasePrice,
PurchaseTime: row.Item.PurchaseTime,
Manufacturer: row.Manufacturer,
ModelNumber: row.ModelNumber,
SerialNumber: row.SerialNumber,
// Warranty
LifetimeWarranty: row.Item.LifetimeWarranty,
WarrantyExpires: row.Item.WarrantyExpires,
WarrantyDetails: row.Item.WarrantyDetails,
LifetimeWarranty: row.LifetimeWarranty,
WarrantyExpires: row.WarrantyExpires,
WarrantyDetails: row.WarrantyDetails,
SoldTo: row.Item.SoldTo,
SoldPrice: row.Item.SoldPrice,
SoldTime: row.Item.SoldTime,
SoldNotes: row.Item.SoldNotes,
})
SoldTo: row.SoldTo,
SoldTime: row.SoldTime,
SoldPrice: row.SoldPrice,
SoldNotes: row.SoldNotes,
if err != nil {
return count, err
Notes: row.Notes,
Fields: fields,
}
count++
item, err = svc.repo.Items.UpdateByGroup(ctx, GID, updateItem)
if err != nil {
return 0, err
}
finished++
}
return count, nil
return finished, nil
}
func (svc *ItemService) ExportTSV(ctx context.Context, GID uuid.UUID) ([][]string, error) {
items, err := svc.repo.Items.GetAll(ctx, GID)
if err != nil {
return nil, err
}
sheet := reporting.IOSheet{}
err = sheet.ReadItems(ctx, items, GID, svc.repo)
if err != nil {
return nil, err
}
return sheet.TSV()
}
func (svc *ItemService) ExportBillOfMaterialsTSV(ctx context.Context, GID uuid.UUID) ([]byte, error) {
items, err := svc.repo.Items.GetAll(ctx, GID)
if err != nil {
return nil, err
}
return reporting.BillOfMaterialsTSV(items)
}

View file

@ -4,72 +4,16 @@ import (
"context"
"io"
"os"
"time"
"github.com/google/uuid"
"github.com/hay-kot/homebox/backend/internal/data/ent"
"github.com/hay-kot/homebox/backend/internal/data/ent/attachment"
"github.com/hay-kot/homebox/backend/internal/data/repo"
"github.com/hay-kot/homebox/backend/pkgs/hasher"
"github.com/rs/zerolog/log"
)
// TODO: this isn't a scalable solution, tokens should be stored in the database
type attachmentTokens map[string]uuid.UUID
func (at attachmentTokens) Add(token string, id uuid.UUID) {
at[token] = id
log.Debug().Str("token", token).Str("uuid", id.String()).Msg("added token")
go func() {
ch := time.After(1 * time.Minute)
<-ch
at.Delete(token)
log.Debug().Str("token", token).Msg("deleted token")
}()
}
func (at attachmentTokens) Get(token string) (uuid.UUID, bool) {
id, ok := at[token]
return id, ok
}
func (at attachmentTokens) Delete(token string) {
delete(at, token)
}
func (svc *ItemService) AttachmentToken(ctx Context, itemId, attachmentId uuid.UUID) (string, error) {
_, err := svc.repo.Items.GetOneByGroup(ctx, ctx.GID, itemId)
if err != nil {
return "", err
}
token := hasher.GenerateToken()
// Ensure that the file exists
attachment, err := svc.repo.Attachments.Get(ctx, attachmentId)
if err != nil {
return "", err
}
if _, err := os.Stat(attachment.Edges.Document.Path); os.IsNotExist(err) {
_ = svc.AttachmentDelete(ctx, ctx.GID, itemId, attachmentId)
return "", ErrNotFound
}
svc.at.Add(token.Raw, attachmentId)
return token.Raw, nil
}
func (svc *ItemService) AttachmentPath(ctx context.Context, token string) (*ent.Document, error) {
attachmentId, ok := svc.at.Get(token)
if !ok {
return nil, ErrNotFound
}
attachment, err := svc.repo.Attachments.Get(ctx, attachmentId)
func (svc *ItemService) AttachmentPath(ctx context.Context, attachmentID uuid.UUID) (*ent.Document, error) {
attachment, err := svc.repo.Attachments.Get(ctx, attachmentID)
if err != nil {
return nil, err
}
@ -77,9 +21,9 @@ func (svc *ItemService) AttachmentPath(ctx context.Context, token string) (*ent.
return attachment.Edges.Document, nil
}
func (svc *ItemService) AttachmentUpdate(ctx Context, itemId uuid.UUID, data *repo.ItemAttachmentUpdate) (repo.ItemOut, error) {
func (svc *ItemService) AttachmentUpdate(ctx Context, itemID uuid.UUID, data *repo.ItemAttachmentUpdate) (repo.ItemOut, error) {
// Update Attachment
attachment, err := svc.repo.Attachments.Update(ctx, data.ID, attachment.Type(data.Type))
attachment, err := svc.repo.Attachments.Update(ctx, data.ID, data)
if err != nil {
return repo.ItemOut{}, err
}
@ -91,15 +35,15 @@ func (svc *ItemService) AttachmentUpdate(ctx Context, itemId uuid.UUID, data *re
return repo.ItemOut{}, err
}
return svc.repo.Items.GetOneByGroup(ctx, ctx.GID, itemId)
return svc.repo.Items.GetOneByGroup(ctx, ctx.GID, itemID)
}
// AttachmentAdd adds an attachment to an item by creating an entry in the Documents table and linking it to the Attachment
// Table and Items table. The file provided via the reader is stored on the file system based on the provided
// relative path during construction of the service.
func (svc *ItemService) AttachmentAdd(ctx Context, itemId uuid.UUID, filename string, attachmentType attachment.Type, file io.Reader) (repo.ItemOut, error) {
func (svc *ItemService) AttachmentAdd(ctx Context, itemID uuid.UUID, filename string, attachmentType attachment.Type, file io.Reader) (repo.ItemOut, error) {
// Get the Item
_, err := svc.repo.Items.GetOneByGroup(ctx, ctx.GID, itemId)
_, err := svc.repo.Items.GetOneByGroup(ctx, ctx.GID, itemID)
if err != nil {
return repo.ItemOut{}, err
}
@ -112,29 +56,29 @@ func (svc *ItemService) AttachmentAdd(ctx Context, itemId uuid.UUID, filename st
}
// Create the attachment
_, err = svc.repo.Attachments.Create(ctx, itemId, doc.ID, attachmentType)
_, err = svc.repo.Attachments.Create(ctx, itemID, doc.ID, attachmentType)
if err != nil {
log.Err(err).Msg("failed to create attachment")
return repo.ItemOut{}, err
}
return svc.repo.Items.GetOneByGroup(ctx, ctx.GID, itemId)
return svc.repo.Items.GetOneByGroup(ctx, ctx.GID, itemID)
}
func (svc *ItemService) AttachmentDelete(ctx context.Context, gid, itemId, attachmentId uuid.UUID) error {
func (svc *ItemService) AttachmentDelete(ctx context.Context, gid, itemID, attachmentID uuid.UUID) error {
// Get the Item
_, err := svc.repo.Items.GetOneByGroup(ctx, gid, itemId)
_, err := svc.repo.Items.GetOneByGroup(ctx, gid, itemID)
if err != nil {
return err
}
attachment, err := svc.repo.Attachments.Get(ctx, attachmentId)
attachment, err := svc.repo.Attachments.Get(ctx, attachmentID)
if err != nil {
return err
}
// Delete the attachment
err = svc.repo.Attachments.Delete(ctx, attachmentId)
err = svc.repo.Attachments.Delete(ctx, attachmentID)
if err != nil {
return err
}

View file

@ -9,6 +9,7 @@ import (
"github.com/hay-kot/homebox/backend/internal/data/repo"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestItemService_AddAttachment(t *testing.T) {
@ -23,7 +24,7 @@ func TestItemService_AddAttachment(t *testing.T) {
Description: "test",
Name: "test",
})
assert.NoError(t, err)
require.NoError(t, err)
assert.NotNil(t, loc)
itmC := repo.ItemCreate{
@ -33,11 +34,11 @@ func TestItemService_AddAttachment(t *testing.T) {
}
itm, err := svc.repo.Items.Create(context.Background(), tGroup.ID, itmC)
assert.NoError(t, err)
require.NoError(t, err)
assert.NotNil(t, itm)
t.Cleanup(func() {
err := svc.repo.Items.Delete(context.Background(), itm.ID)
assert.NoError(t, err)
require.NoError(t, err)
})
contents := fk.Str(1000)
@ -45,7 +46,7 @@ func TestItemService_AddAttachment(t *testing.T) {
// Setup
afterAttachment, err := svc.AttachmentAdd(tCtx, itm.ID, "testfile.txt", "attachment", reader)
assert.NoError(t, err)
require.NoError(t, err)
assert.NotNil(t, afterAttachment)
// Check that the file exists
@ -56,7 +57,6 @@ func TestItemService_AddAttachment(t *testing.T) {
// Check that the file contents are correct
bts, err := os.ReadFile(storedPath)
assert.NoError(t, err)
require.NoError(t, err)
assert.Equal(t, contents, string(bts))
}

View file

@ -1,118 +0,0 @@
package services
import (
"errors"
"strconv"
"strings"
"time"
"github.com/hay-kot/homebox/backend/internal/data/repo"
)
var ErrInvalidCsv = errors.New("invalid csv")
const NumOfCols = 21
func parseFloat(s string) float64 {
if s == "" {
return 0
}
f, _ := strconv.ParseFloat(s, 64)
return f
}
func parseDate(s string) time.Time {
if s == "" {
return time.Time{}
}
p, _ := time.Parse("01/02/2006", s)
return p
}
func parseBool(s string) bool {
switch strings.ToLower(s) {
case "true", "yes", "1":
return true
default:
return false
}
}
func parseInt(s string) int {
i, _ := strconv.Atoi(s)
return i
}
type csvRow struct {
Item repo.ItemOut
Location string
LabelStr string
}
func newCsvRow(row []string) csvRow {
return csvRow{
Location: row[1],
LabelStr: row[2],
Item: repo.ItemOut{
ItemSummary: repo.ItemSummary{
ImportRef: row[0],
Quantity: parseInt(row[3]),
Name: row[4],
Description: row[5],
Insured: parseBool(row[6]),
},
SerialNumber: row[7],
ModelNumber: row[8],
Manufacturer: row[9],
Notes: row[10],
PurchaseFrom: row[11],
PurchasePrice: parseFloat(row[12]),
PurchaseTime: parseDate(row[13]),
LifetimeWarranty: parseBool(row[14]),
WarrantyExpires: parseDate(row[15]),
WarrantyDetails: row[16],
SoldTo: row[17],
SoldPrice: parseFloat(row[18]),
SoldTime: parseDate(row[19]),
SoldNotes: row[20],
},
}
}
func (c csvRow) getLabels() []string {
split := strings.Split(c.LabelStr, ";")
// Trim each
for i, s := range split {
split[i] = strings.TrimSpace(s)
}
// Remove empty
for i, s := range split {
if s == "" {
split = append(split[:i], split[i+1:]...)
}
}
return split
}
func (c csvRow) validate() []error {
var errs []error
add := func(err error) {
errs = append(errs, err)
}
required := func(s string, name string) {
if s == "" {
add(errors.New(name + " is required"))
}
}
required(c.Location, "Location")
required(c.Item.Name, "Name")
return errs
}

View file

@ -1,117 +0,0 @@
package services
import (
"bytes"
"encoding/csv"
"fmt"
"reflect"
"testing"
"time"
"github.com/stretchr/testify/assert"
)
const CSV_DATA = `
Import Ref,Location,Labels,Quantity,Name,Description,Insured,Serial Number,Mode Number,Manufacturer,Notes,Purchase From,Purchased Price,Purchased Time,Lifetime Warranty,Warranty Expires,Warranty Details,Sold To,Sold Price,Sold Time,Sold Notes
A,Garage,IOT;Home Assistant; Z-Wave,1,Zooz Universal Relay ZEN17,Description 1,TRUE,,ZEN17,Zooz,,Amazon,39.95,10/13/2021,,10/13/2021,,,,10/13/2021,
B,Living Room,IOT;Home Assistant; Z-Wave,1,Zooz Motion Sensor,Description 2,FALSE,,ZSE18,Zooz,,Amazon,29.95,10/15/2021,,10/15/2021,,,,10/15/2021,
C,Office,IOT;Home Assistant; Z-Wave,1,Zooz 110v Power Switch,Description 3,TRUE,,ZEN15,Zooz,,Amazon,39.95,10/13/2021,,10/13/2021,,,,10/13/2021,
D,Downstairs,IOT;Home Assistant; Z-Wave,1,Ecolink Z-Wave PIR Motion Sensor,Description 4,FALSE,,PIRZWAVE2.5-ECO,Ecolink,,Amazon,35.58,10/21/2020,,10/21/2020,,,,10/21/2020,
E,Entry,IOT;Home Assistant; Z-Wave,1,Yale Security Touchscreen Deadbolt,Description 5,TRUE,,YRD226ZW2619,Yale,,Amazon,120.39,10/14/2020,,10/14/2020,,,,10/14/2020,
F,Kitchen,IOT;Home Assistant; Z-Wave,1,Smart Rocker Light Dimmer,Description 6,FALSE,,39351,Honeywell,,Amazon,65.98,09/30/2020,,09/30/2020,,,,09/30/2020,`
func loadcsv() [][]string {
reader := csv.NewReader(bytes.NewBuffer([]byte(CSV_DATA)))
records, err := reader.ReadAll()
if err != nil {
panic(err)
}
return records
}
func Test_CorrectDateParsing(t *testing.T) {
t.Parallel()
expected := []time.Time{
time.Date(2021, 10, 13, 0, 0, 0, 0, time.UTC),
time.Date(2021, 10, 15, 0, 0, 0, 0, time.UTC),
time.Date(2021, 10, 13, 0, 0, 0, 0, time.UTC),
time.Date(2020, 10, 21, 0, 0, 0, 0, time.UTC),
time.Date(2020, 10, 14, 0, 0, 0, 0, time.UTC),
time.Date(2020, 9, 30, 0, 0, 0, 0, time.UTC),
}
records := loadcsv()
for i, record := range records {
if i == 0 {
continue
}
entity := newCsvRow(record)
expected := expected[i-1]
assert.Equal(t, expected, entity.Item.PurchaseTime, fmt.Sprintf("Failed on row %d", i))
assert.Equal(t, expected, entity.Item.WarrantyExpires, fmt.Sprintf("Failed on row %d", i))
assert.Equal(t, expected, entity.Item.SoldTime, fmt.Sprintf("Failed on row %d", i))
}
}
func Test_csvRow_getLabels(t *testing.T) {
type fields struct {
LabelStr string
}
tests := []struct {
name string
fields fields
want []string
}{
{
name: "basic test",
fields: fields{
LabelStr: "IOT;Home Assistant;Z-Wave",
},
want: []string{"IOT", "Home Assistant", "Z-Wave"},
},
{
name: "no labels",
fields: fields{
LabelStr: "",
},
want: []string{},
},
{
name: "single label",
fields: fields{
LabelStr: "IOT",
},
want: []string{"IOT"},
},
{
name: "trailing semicolon",
fields: fields{
LabelStr: "IOT;",
},
want: []string{"IOT"},
},
{
name: "whitespace",
fields: fields{
LabelStr: " IOT; Home Assistant; Z-Wave ",
},
want: []string{"IOT", "Home Assistant", "Z-Wave"},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
c := csvRow{
LabelStr: tt.fields.LabelStr,
}
if got := c.getLabels(); !reflect.DeepEqual(got, tt.want) {
t.Errorf("csvRow.getLabels() = %v, want %v", got, tt.want)
}
})
}
}

View file

@ -1,77 +0,0 @@
package services
import (
"context"
"testing"
"github.com/google/uuid"
"github.com/stretchr/testify/assert"
)
func TestItemService_CsvImport(t *testing.T) {
data := loadcsv()
svc := &ItemService{
repo: tRepos,
}
count, err := svc.CsvImport(context.Background(), tGroup.ID, data)
assert.Equal(t, 6, count)
assert.NoError(t, err)
// Check import refs are deduplicated
count, err = svc.CsvImport(context.Background(), tGroup.ID, data)
assert.Equal(t, 0, count)
assert.NoError(t, err)
items, err := svc.repo.Items.GetAll(context.Background(), tGroup.ID)
assert.NoError(t, err)
t.Cleanup(func() {
for _, item := range items {
err := svc.repo.Items.Delete(context.Background(), item.ID)
assert.NoError(t, err)
}
})
assert.Equal(t, len(items), 6)
dataCsv := []csvRow{}
for _, item := range data {
dataCsv = append(dataCsv, newCsvRow(item))
}
allLocation, err := tRepos.Locations.GetAll(context.Background(), tGroup.ID)
assert.NoError(t, err)
locNames := []string{}
for _, loc := range allLocation {
locNames = append(locNames, loc.Name)
}
allLabels, err := tRepos.Labels.GetAll(context.Background(), tGroup.ID)
assert.NoError(t, err)
labelNames := []string{}
for _, label := range allLabels {
labelNames = append(labelNames, label.Name)
}
ids := []uuid.UUID{}
t.Cleanup((func() {
for _, id := range ids {
err := svc.repo.Items.Delete(context.Background(), id)
assert.NoError(t, err)
}
}))
for _, item := range items {
assert.Contains(t, locNames, item.Location.Name)
for _, label := range item.Labels {
assert.Contains(t, labelNames, label.Name)
}
for _, csvRow := range dataCsv {
if csvRow.Item.Name == item.Name {
assert.Equal(t, csvRow.Item.Description, item.Description)
assert.Equal(t, csvRow.Item.Quantity, item.Quantity)
assert.Equal(t, csvRow.Item.Insured, item.Insured)
}
}
}
}

View file

@ -6,6 +6,7 @@ import (
"time"
"github.com/google/uuid"
"github.com/hay-kot/homebox/backend/internal/data/ent/authroles"
"github.com/hay-kot/homebox/backend/internal/data/repo"
"github.com/hay-kot/homebox/backend/pkgs/hasher"
"github.com/rs/zerolog/log"
@ -15,7 +16,7 @@ var (
oneWeek = time.Hour * 24 * 7
ErrorInvalidLogin = errors.New("invalid username or password")
ErrorInvalidToken = errors.New("invalid token")
ErrorTokenIdMismatch = errors.New("token id mismatch")
ErrorTokenIDMismatch = errors.New("token id mismatch")
)
type UserService struct {
@ -30,8 +31,9 @@ type (
Password string `json:"password"`
}
UserAuthTokenDetail struct {
Raw string `json:"raw"`
ExpiresAt time.Time `json:"expiresAt"`
Raw string `json:"raw"`
AttachmentToken string `json:"attachmentToken"`
ExpiresAt time.Time `json:"expiresAt"`
}
LoginForm struct {
Username string `json:"username"`
@ -49,21 +51,25 @@ func (svc *UserService) RegisterUser(ctx context.Context, data UserRegistration)
Msg("Registering new user")
var (
err error
group repo.Group
token repo.GroupInvitation
isOwner = false
err error
group repo.Group
token repo.GroupInvitation
// creatingGroup is true if the user is creating a new group.
creatingGroup = false
)
switch data.GroupToken {
case "":
isOwner = true
log.Debug().Msg("creating new group")
creatingGroup = true
group, err = svc.repos.Groups.GroupCreate(ctx, "Home")
if err != nil {
log.Err(err).Msg("Failed to create group")
return repo.UserOut{}, err
}
default:
log.Debug().Msg("joining existing group")
token, err = svc.repos.Groups.InvitationGet(ctx, hasher.HashToken(data.GroupToken))
if err != nil {
log.Err(err).Msg("Failed to get invitation token")
@ -79,30 +85,37 @@ func (svc *UserService) RegisterUser(ctx context.Context, data UserRegistration)
Password: hashed,
IsSuperuser: false,
GroupID: group.ID,
IsOwner: isOwner,
IsOwner: creatingGroup,
}
usr, err := svc.repos.Users.Create(ctx, usrCreate)
if err != nil {
return repo.UserOut{}, err
}
log.Debug().Msg("user created")
for _, label := range defaultLabels() {
_, err := svc.repos.Labels.Create(ctx, group.ID, label)
if err != nil {
return repo.UserOut{}, err
// Create the default labels and locations for the group.
if creatingGroup {
log.Debug().Msg("creating default labels")
for _, label := range defaultLabels() {
_, err := svc.repos.Labels.Create(ctx, usr.GroupID, label)
if err != nil {
return repo.UserOut{}, err
}
}
log.Debug().Msg("creating default locations")
for _, location := range defaultLocations() {
_, err := svc.repos.Locations.Create(ctx, usr.GroupID, location)
if err != nil {
return repo.UserOut{}, err
}
}
}
for _, location := range defaultLocations() {
_, err := svc.repos.Locations.Create(ctx, group.ID, location)
if err != nil {
return repo.UserOut{}, err
}
}
// Decrement the invitation token if it was used
// Decrement the invitation token if it was used.
if token.ID != uuid.Nil {
log.Debug().Msg("decrementing invitation token")
err = svc.repos.Groups.InvitationUpdate(ctx, token.ID, token.Uses-1)
if err != nil {
log.Err(err).Msg("Failed to update invitation token")
@ -125,27 +138,52 @@ func (svc *UserService) UpdateSelf(ctx context.Context, ID uuid.UUID, data repo.
return repo.UserOut{}, err
}
return svc.repos.Users.GetOneId(ctx, ID)
return svc.repos.Users.GetOneID(ctx, ID)
}
// ============================================================================
// User Authentication
func (svc *UserService) createToken(ctx context.Context, userId uuid.UUID) (UserAuthTokenDetail, error) {
newToken := hasher.GenerateToken()
func (svc *UserService) createSessionToken(ctx context.Context, userID uuid.UUID, extendedSession bool) (UserAuthTokenDetail, error) {
attachmentToken := hasher.GenerateToken()
created, err := svc.repos.AuthTokens.CreateToken(ctx, repo.UserAuthTokenCreate{
UserID: userId,
TokenHash: newToken.Hash,
ExpiresAt: time.Now().Add(oneWeek),
})
expiresAt := time.Now().Add(oneWeek)
if extendedSession {
expiresAt = time.Now().Add(oneWeek * 4)
}
return UserAuthTokenDetail{Raw: newToken.Raw, ExpiresAt: created.ExpiresAt}, err
attachmentData := repo.UserAuthTokenCreate{
UserID: userID,
TokenHash: attachmentToken.Hash,
ExpiresAt: expiresAt,
}
_, err := svc.repos.AuthTokens.CreateToken(ctx, attachmentData, authroles.RoleAttachments)
if err != nil {
return UserAuthTokenDetail{}, err
}
userToken := hasher.GenerateToken()
data := repo.UserAuthTokenCreate{
UserID: userID,
TokenHash: userToken.Hash,
ExpiresAt: expiresAt,
}
created, err := svc.repos.AuthTokens.CreateToken(ctx, data, authroles.RoleUser)
if err != nil {
return UserAuthTokenDetail{}, err
}
return UserAuthTokenDetail{
Raw: userToken.Raw,
ExpiresAt: created.ExpiresAt,
AttachmentToken: attachmentToken.Raw,
}, nil
}
func (svc *UserService) Login(ctx context.Context, username, password string) (UserAuthTokenDetail, error) {
func (svc *UserService) Login(ctx context.Context, username, password string, extendedSession bool) (UserAuthTokenDetail, error) {
usr, err := svc.repos.Users.GetOneEmail(ctx, username)
if err != nil {
// SECURITY: Perform hash to ensure response times are the same
hasher.CheckPasswordHash("not-a-real-password", "not-a-real-password")
@ -156,7 +194,7 @@ func (svc *UserService) Login(ctx context.Context, username, password string) (U
return UserAuthTokenDetail{}, ErrorInvalidLogin
}
return svc.createToken(ctx, usr.ID)
return svc.createSessionToken(ctx, usr.ID, extendedSession)
}
func (svc *UserService) Logout(ctx context.Context, token string) error {
@ -169,14 +207,11 @@ func (svc *UserService) RenewToken(ctx context.Context, token string) (UserAuthT
hash := hasher.HashToken(token)
dbToken, err := svc.repos.AuthTokens.GetUserFromToken(ctx, hash)
if err != nil {
return UserAuthTokenDetail{}, ErrorInvalidToken
}
newToken, _ := svc.createToken(ctx, dbToken.ID)
return newToken, nil
return svc.createSessionToken(ctx, dbToken.ID, false)
}
// DeleteSelf deletes the user that is currently logged based of the provided UUID
@ -187,7 +222,7 @@ func (svc *UserService) DeleteSelf(ctx context.Context, ID uuid.UUID) error {
}
func (svc *UserService) ChangePassword(ctx Context, current string, new string) (ok bool) {
usr, err := svc.repos.Users.GetOneId(ctx, ctx.UID)
usr, err := svc.repos.Users.GetOneID(ctx, ctx.UID)
if err != nil {
return false
}

View file

@ -7,6 +7,7 @@ import (
"strings"
"time"
"entgo.io/ent"
"entgo.io/ent/dialect/sql"
"github.com/google/uuid"
"github.com/hay-kot/homebox/backend/internal/data/ent/attachment"
@ -25,11 +26,14 @@ type Attachment struct {
UpdatedAt time.Time `json:"updated_at,omitempty"`
// Type holds the value of the "type" field.
Type attachment.Type `json:"type,omitempty"`
// Primary holds the value of the "primary" field.
Primary bool `json:"primary,omitempty"`
// Edges holds the relations/edges for other nodes in the graph.
// The values are being populated by the AttachmentQuery when eager-loading is set.
Edges AttachmentEdges `json:"edges"`
document_attachments *uuid.UUID
item_attachments *uuid.UUID
selectValues sql.SelectValues
}
// AttachmentEdges holds the relations/edges for other nodes in the graph.
@ -74,6 +78,8 @@ func (*Attachment) scanValues(columns []string) ([]any, error) {
values := make([]any, len(columns))
for i := range columns {
switch columns[i] {
case attachment.FieldPrimary:
values[i] = new(sql.NullBool)
case attachment.FieldType:
values[i] = new(sql.NullString)
case attachment.FieldCreatedAt, attachment.FieldUpdatedAt:
@ -85,7 +91,7 @@ func (*Attachment) scanValues(columns []string) ([]any, error) {
case attachment.ForeignKeys[1]: // item_attachments
values[i] = &sql.NullScanner{S: new(uuid.UUID)}
default:
return nil, fmt.Errorf("unexpected column %q for type Attachment", columns[i])
values[i] = new(sql.UnknownType)
}
}
return values, nil
@ -123,6 +129,12 @@ func (a *Attachment) assignValues(columns []string, values []any) error {
} else if value.Valid {
a.Type = attachment.Type(value.String)
}
case attachment.FieldPrimary:
if value, ok := values[i].(*sql.NullBool); !ok {
return fmt.Errorf("unexpected type %T for field primary", values[i])
} else if value.Valid {
a.Primary = value.Bool
}
case attachment.ForeignKeys[0]:
if value, ok := values[i].(*sql.NullScanner); !ok {
return fmt.Errorf("unexpected type %T for field document_attachments", values[i])
@ -137,26 +149,34 @@ func (a *Attachment) assignValues(columns []string, values []any) error {
a.item_attachments = new(uuid.UUID)
*a.item_attachments = *value.S.(*uuid.UUID)
}
default:
a.selectValues.Set(columns[i], values[i])
}
}
return nil
}
// Value returns the ent.Value that was dynamically selected and assigned to the Attachment.
// This includes values selected through modifiers, order, etc.
func (a *Attachment) Value(name string) (ent.Value, error) {
return a.selectValues.Get(name)
}
// QueryItem queries the "item" edge of the Attachment entity.
func (a *Attachment) QueryItem() *ItemQuery {
return (&AttachmentClient{config: a.config}).QueryItem(a)
return NewAttachmentClient(a.config).QueryItem(a)
}
// QueryDocument queries the "document" edge of the Attachment entity.
func (a *Attachment) QueryDocument() *DocumentQuery {
return (&AttachmentClient{config: a.config}).QueryDocument(a)
return NewAttachmentClient(a.config).QueryDocument(a)
}
// Update returns a builder for updating this Attachment.
// Note that you need to call Attachment.Unwrap() before calling this method if this Attachment
// was returned from a transaction, and the transaction was committed or rolled back.
func (a *Attachment) Update() *AttachmentUpdateOne {
return (&AttachmentClient{config: a.config}).UpdateOne(a)
return NewAttachmentClient(a.config).UpdateOne(a)
}
// Unwrap unwraps the Attachment entity that was returned from a transaction after it was closed,
@ -183,15 +203,12 @@ func (a *Attachment) String() string {
builder.WriteString(", ")
builder.WriteString("type=")
builder.WriteString(fmt.Sprintf("%v", a.Type))
builder.WriteString(", ")
builder.WriteString("primary=")
builder.WriteString(fmt.Sprintf("%v", a.Primary))
builder.WriteByte(')')
return builder.String()
}
// Attachments is a parsable slice of Attachment.
type Attachments []*Attachment
func (a Attachments) config(cfg config) {
for _i := range a {
a[_i].config = cfg
}
}

View file

@ -6,6 +6,8 @@ import (
"fmt"
"time"
"entgo.io/ent/dialect/sql"
"entgo.io/ent/dialect/sql/sqlgraph"
"github.com/google/uuid"
)
@ -20,6 +22,8 @@ const (
FieldUpdatedAt = "updated_at"
// FieldType holds the string denoting the type field in the database.
FieldType = "type"
// FieldPrimary holds the string denoting the primary field in the database.
FieldPrimary = "primary"
// EdgeItem holds the string denoting the item edge name in mutations.
EdgeItem = "item"
// EdgeDocument holds the string denoting the document edge name in mutations.
@ -48,6 +52,7 @@ var Columns = []string{
FieldCreatedAt,
FieldUpdatedAt,
FieldType,
FieldPrimary,
}
// ForeignKeys holds the SQL foreign-keys that are owned by the "attachments"
@ -79,6 +84,8 @@ var (
DefaultUpdatedAt func() time.Time
// UpdateDefaultUpdatedAt holds the default value on update for the "updated_at" field.
UpdateDefaultUpdatedAt func() time.Time
// DefaultPrimary holds the default value on creation for the "primary" field.
DefaultPrimary bool
// DefaultID holds the default value on creation for the "id" field.
DefaultID func() uuid.UUID
)
@ -111,3 +118,59 @@ func TypeValidator(_type Type) error {
return fmt.Errorf("attachment: invalid enum value for type field: %q", _type)
}
}
// OrderOption defines the ordering options for the Attachment queries.
type OrderOption func(*sql.Selector)
// ByID orders the results by the id field.
func ByID(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldID, opts...).ToFunc()
}
// ByCreatedAt orders the results by the created_at field.
func ByCreatedAt(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldCreatedAt, opts...).ToFunc()
}
// ByUpdatedAt orders the results by the updated_at field.
func ByUpdatedAt(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldUpdatedAt, opts...).ToFunc()
}
// ByType orders the results by the type field.
func ByType(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldType, opts...).ToFunc()
}
// ByPrimary orders the results by the primary field.
func ByPrimary(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldPrimary, opts...).ToFunc()
}
// ByItemField orders the results by item field.
func ByItemField(field string, opts ...sql.OrderTermOption) OrderOption {
return func(s *sql.Selector) {
sqlgraph.OrderByNeighborTerms(s, newItemStep(), sql.OrderByField(field, opts...))
}
}
// ByDocumentField orders the results by document field.
func ByDocumentField(field string, opts ...sql.OrderTermOption) OrderOption {
return func(s *sql.Selector) {
sqlgraph.OrderByNeighborTerms(s, newDocumentStep(), sql.OrderByField(field, opts...))
}
}
func newItemStep() *sqlgraph.Step {
return sqlgraph.NewStep(
sqlgraph.From(Table, FieldID),
sqlgraph.To(ItemInverseTable, FieldID),
sqlgraph.Edge(sqlgraph.M2O, true, ItemTable, ItemColumn),
)
}
func newDocumentStep() *sqlgraph.Step {
return sqlgraph.NewStep(
sqlgraph.From(Table, FieldID),
sqlgraph.To(DocumentInverseTable, FieldID),
sqlgraph.Edge(sqlgraph.M2O, true, DocumentTable, DocumentColumn),
)
}

View file

@ -13,251 +13,172 @@ import (
// ID filters vertices based on their ID field.
func ID(id uuid.UUID) predicate.Attachment {
return predicate.Attachment(func(s *sql.Selector) {
s.Where(sql.EQ(s.C(FieldID), id))
})
return predicate.Attachment(sql.FieldEQ(FieldID, id))
}
// IDEQ applies the EQ predicate on the ID field.
func IDEQ(id uuid.UUID) predicate.Attachment {
return predicate.Attachment(func(s *sql.Selector) {
s.Where(sql.EQ(s.C(FieldID), id))
})
return predicate.Attachment(sql.FieldEQ(FieldID, id))
}
// IDNEQ applies the NEQ predicate on the ID field.
func IDNEQ(id uuid.UUID) predicate.Attachment {
return predicate.Attachment(func(s *sql.Selector) {
s.Where(sql.NEQ(s.C(FieldID), id))
})
return predicate.Attachment(sql.FieldNEQ(FieldID, id))
}
// IDIn applies the In predicate on the ID field.
func IDIn(ids ...uuid.UUID) predicate.Attachment {
return predicate.Attachment(func(s *sql.Selector) {
v := make([]any, len(ids))
for i := range v {
v[i] = ids[i]
}
s.Where(sql.In(s.C(FieldID), v...))
})
return predicate.Attachment(sql.FieldIn(FieldID, ids...))
}
// IDNotIn applies the NotIn predicate on the ID field.
func IDNotIn(ids ...uuid.UUID) predicate.Attachment {
return predicate.Attachment(func(s *sql.Selector) {
v := make([]any, len(ids))
for i := range v {
v[i] = ids[i]
}
s.Where(sql.NotIn(s.C(FieldID), v...))
})
return predicate.Attachment(sql.FieldNotIn(FieldID, ids...))
}
// IDGT applies the GT predicate on the ID field.
func IDGT(id uuid.UUID) predicate.Attachment {
return predicate.Attachment(func(s *sql.Selector) {
s.Where(sql.GT(s.C(FieldID), id))
})
return predicate.Attachment(sql.FieldGT(FieldID, id))
}
// IDGTE applies the GTE predicate on the ID field.
func IDGTE(id uuid.UUID) predicate.Attachment {
return predicate.Attachment(func(s *sql.Selector) {
s.Where(sql.GTE(s.C(FieldID), id))
})
return predicate.Attachment(sql.FieldGTE(FieldID, id))
}
// IDLT applies the LT predicate on the ID field.
func IDLT(id uuid.UUID) predicate.Attachment {
return predicate.Attachment(func(s *sql.Selector) {
s.Where(sql.LT(s.C(FieldID), id))
})
return predicate.Attachment(sql.FieldLT(FieldID, id))
}
// IDLTE applies the LTE predicate on the ID field.
func IDLTE(id uuid.UUID) predicate.Attachment {
return predicate.Attachment(func(s *sql.Selector) {
s.Where(sql.LTE(s.C(FieldID), id))
})
return predicate.Attachment(sql.FieldLTE(FieldID, id))
}
// CreatedAt applies equality check predicate on the "created_at" field. It's identical to CreatedAtEQ.
func CreatedAt(v time.Time) predicate.Attachment {
return predicate.Attachment(func(s *sql.Selector) {
s.Where(sql.EQ(s.C(FieldCreatedAt), v))
})
return predicate.Attachment(sql.FieldEQ(FieldCreatedAt, v))
}
// UpdatedAt applies equality check predicate on the "updated_at" field. It's identical to UpdatedAtEQ.
func UpdatedAt(v time.Time) predicate.Attachment {
return predicate.Attachment(func(s *sql.Selector) {
s.Where(sql.EQ(s.C(FieldUpdatedAt), v))
})
return predicate.Attachment(sql.FieldEQ(FieldUpdatedAt, v))
}
// Primary applies equality check predicate on the "primary" field. It's identical to PrimaryEQ.
func Primary(v bool) predicate.Attachment {
return predicate.Attachment(sql.FieldEQ(FieldPrimary, v))
}
// CreatedAtEQ applies the EQ predicate on the "created_at" field.
func CreatedAtEQ(v time.Time) predicate.Attachment {
return predicate.Attachment(func(s *sql.Selector) {
s.Where(sql.EQ(s.C(FieldCreatedAt), v))
})
return predicate.Attachment(sql.FieldEQ(FieldCreatedAt, v))
}
// CreatedAtNEQ applies the NEQ predicate on the "created_at" field.
func CreatedAtNEQ(v time.Time) predicate.Attachment {
return predicate.Attachment(func(s *sql.Selector) {
s.Where(sql.NEQ(s.C(FieldCreatedAt), v))
})
return predicate.Attachment(sql.FieldNEQ(FieldCreatedAt, v))
}
// CreatedAtIn applies the In predicate on the "created_at" field.
func CreatedAtIn(vs ...time.Time) predicate.Attachment {
v := make([]any, len(vs))
for i := range v {
v[i] = vs[i]
}
return predicate.Attachment(func(s *sql.Selector) {
s.Where(sql.In(s.C(FieldCreatedAt), v...))
})
return predicate.Attachment(sql.FieldIn(FieldCreatedAt, vs...))
}
// CreatedAtNotIn applies the NotIn predicate on the "created_at" field.
func CreatedAtNotIn(vs ...time.Time) predicate.Attachment {
v := make([]any, len(vs))
for i := range v {
v[i] = vs[i]
}
return predicate.Attachment(func(s *sql.Selector) {
s.Where(sql.NotIn(s.C(FieldCreatedAt), v...))
})
return predicate.Attachment(sql.FieldNotIn(FieldCreatedAt, vs...))
}
// CreatedAtGT applies the GT predicate on the "created_at" field.
func CreatedAtGT(v time.Time) predicate.Attachment {
return predicate.Attachment(func(s *sql.Selector) {
s.Where(sql.GT(s.C(FieldCreatedAt), v))
})
return predicate.Attachment(sql.FieldGT(FieldCreatedAt, v))
}
// CreatedAtGTE applies the GTE predicate on the "created_at" field.
func CreatedAtGTE(v time.Time) predicate.Attachment {
return predicate.Attachment(func(s *sql.Selector) {
s.Where(sql.GTE(s.C(FieldCreatedAt), v))
})
return predicate.Attachment(sql.FieldGTE(FieldCreatedAt, v))
}
// CreatedAtLT applies the LT predicate on the "created_at" field.
func CreatedAtLT(v time.Time) predicate.Attachment {
return predicate.Attachment(func(s *sql.Selector) {
s.Where(sql.LT(s.C(FieldCreatedAt), v))
})
return predicate.Attachment(sql.FieldLT(FieldCreatedAt, v))
}
// CreatedAtLTE applies the LTE predicate on the "created_at" field.
func CreatedAtLTE(v time.Time) predicate.Attachment {
return predicate.Attachment(func(s *sql.Selector) {
s.Where(sql.LTE(s.C(FieldCreatedAt), v))
})
return predicate.Attachment(sql.FieldLTE(FieldCreatedAt, v))
}
// UpdatedAtEQ applies the EQ predicate on the "updated_at" field.
func UpdatedAtEQ(v time.Time) predicate.Attachment {
return predicate.Attachment(func(s *sql.Selector) {
s.Where(sql.EQ(s.C(FieldUpdatedAt), v))
})
return predicate.Attachment(sql.FieldEQ(FieldUpdatedAt, v))
}
// UpdatedAtNEQ applies the NEQ predicate on the "updated_at" field.
func UpdatedAtNEQ(v time.Time) predicate.Attachment {
return predicate.Attachment(func(s *sql.Selector) {
s.Where(sql.NEQ(s.C(FieldUpdatedAt), v))
})
return predicate.Attachment(sql.FieldNEQ(FieldUpdatedAt, v))
}
// UpdatedAtIn applies the In predicate on the "updated_at" field.
func UpdatedAtIn(vs ...time.Time) predicate.Attachment {
v := make([]any, len(vs))
for i := range v {
v[i] = vs[i]
}
return predicate.Attachment(func(s *sql.Selector) {
s.Where(sql.In(s.C(FieldUpdatedAt), v...))
})
return predicate.Attachment(sql.FieldIn(FieldUpdatedAt, vs...))
}
// UpdatedAtNotIn applies the NotIn predicate on the "updated_at" field.
func UpdatedAtNotIn(vs ...time.Time) predicate.Attachment {
v := make([]any, len(vs))
for i := range v {
v[i] = vs[i]
}
return predicate.Attachment(func(s *sql.Selector) {
s.Where(sql.NotIn(s.C(FieldUpdatedAt), v...))
})
return predicate.Attachment(sql.FieldNotIn(FieldUpdatedAt, vs...))
}
// UpdatedAtGT applies the GT predicate on the "updated_at" field.
func UpdatedAtGT(v time.Time) predicate.Attachment {
return predicate.Attachment(func(s *sql.Selector) {
s.Where(sql.GT(s.C(FieldUpdatedAt), v))
})
return predicate.Attachment(sql.FieldGT(FieldUpdatedAt, v))
}
// UpdatedAtGTE applies the GTE predicate on the "updated_at" field.
func UpdatedAtGTE(v time.Time) predicate.Attachment {
return predicate.Attachment(func(s *sql.Selector) {
s.Where(sql.GTE(s.C(FieldUpdatedAt), v))
})
return predicate.Attachment(sql.FieldGTE(FieldUpdatedAt, v))
}
// UpdatedAtLT applies the LT predicate on the "updated_at" field.
func UpdatedAtLT(v time.Time) predicate.Attachment {
return predicate.Attachment(func(s *sql.Selector) {
s.Where(sql.LT(s.C(FieldUpdatedAt), v))
})
return predicate.Attachment(sql.FieldLT(FieldUpdatedAt, v))
}
// UpdatedAtLTE applies the LTE predicate on the "updated_at" field.
func UpdatedAtLTE(v time.Time) predicate.Attachment {
return predicate.Attachment(func(s *sql.Selector) {
s.Where(sql.LTE(s.C(FieldUpdatedAt), v))
})
return predicate.Attachment(sql.FieldLTE(FieldUpdatedAt, v))
}
// TypeEQ applies the EQ predicate on the "type" field.
func TypeEQ(v Type) predicate.Attachment {
return predicate.Attachment(func(s *sql.Selector) {
s.Where(sql.EQ(s.C(FieldType), v))
})
return predicate.Attachment(sql.FieldEQ(FieldType, v))
}
// TypeNEQ applies the NEQ predicate on the "type" field.
func TypeNEQ(v Type) predicate.Attachment {
return predicate.Attachment(func(s *sql.Selector) {
s.Where(sql.NEQ(s.C(FieldType), v))
})
return predicate.Attachment(sql.FieldNEQ(FieldType, v))
}
// TypeIn applies the In predicate on the "type" field.
func TypeIn(vs ...Type) predicate.Attachment {
v := make([]any, len(vs))
for i := range v {
v[i] = vs[i]
}
return predicate.Attachment(func(s *sql.Selector) {
s.Where(sql.In(s.C(FieldType), v...))
})
return predicate.Attachment(sql.FieldIn(FieldType, vs...))
}
// TypeNotIn applies the NotIn predicate on the "type" field.
func TypeNotIn(vs ...Type) predicate.Attachment {
v := make([]any, len(vs))
for i := range v {
v[i] = vs[i]
}
return predicate.Attachment(func(s *sql.Selector) {
s.Where(sql.NotIn(s.C(FieldType), v...))
})
return predicate.Attachment(sql.FieldNotIn(FieldType, vs...))
}
// PrimaryEQ applies the EQ predicate on the "primary" field.
func PrimaryEQ(v bool) predicate.Attachment {
return predicate.Attachment(sql.FieldEQ(FieldPrimary, v))
}
// PrimaryNEQ applies the NEQ predicate on the "primary" field.
func PrimaryNEQ(v bool) predicate.Attachment {
return predicate.Attachment(sql.FieldNEQ(FieldPrimary, v))
}
// HasItem applies the HasEdge predicate on the "item" edge.
@ -265,7 +186,6 @@ func HasItem() predicate.Attachment {
return predicate.Attachment(func(s *sql.Selector) {
step := sqlgraph.NewStep(
sqlgraph.From(Table, FieldID),
sqlgraph.To(ItemTable, FieldID),
sqlgraph.Edge(sqlgraph.M2O, true, ItemTable, ItemColumn),
)
sqlgraph.HasNeighbors(s, step)
@ -275,11 +195,7 @@ func HasItem() predicate.Attachment {
// HasItemWith applies the HasEdge predicate on the "item" edge with a given conditions (other predicates).
func HasItemWith(preds ...predicate.Item) predicate.Attachment {
return predicate.Attachment(func(s *sql.Selector) {
step := sqlgraph.NewStep(
sqlgraph.From(Table, FieldID),
sqlgraph.To(ItemInverseTable, FieldID),
sqlgraph.Edge(sqlgraph.M2O, true, ItemTable, ItemColumn),
)
step := newItemStep()
sqlgraph.HasNeighborsWith(s, step, func(s *sql.Selector) {
for _, p := range preds {
p(s)
@ -293,7 +209,6 @@ func HasDocument() predicate.Attachment {
return predicate.Attachment(func(s *sql.Selector) {
step := sqlgraph.NewStep(
sqlgraph.From(Table, FieldID),
sqlgraph.To(DocumentTable, FieldID),
sqlgraph.Edge(sqlgraph.M2O, true, DocumentTable, DocumentColumn),
)
sqlgraph.HasNeighbors(s, step)
@ -303,11 +218,7 @@ func HasDocument() predicate.Attachment {
// HasDocumentWith applies the HasEdge predicate on the "document" edge with a given conditions (other predicates).
func HasDocumentWith(preds ...predicate.Document) predicate.Attachment {
return predicate.Attachment(func(s *sql.Selector) {
step := sqlgraph.NewStep(
sqlgraph.From(Table, FieldID),
sqlgraph.To(DocumentInverseTable, FieldID),
sqlgraph.Edge(sqlgraph.M2O, true, DocumentTable, DocumentColumn),
)
step := newDocumentStep()
sqlgraph.HasNeighborsWith(s, step, func(s *sql.Selector) {
for _, p := range preds {
p(s)
@ -318,32 +229,15 @@ func HasDocumentWith(preds ...predicate.Document) predicate.Attachment {
// And groups predicates with the AND operator between them.
func And(predicates ...predicate.Attachment) predicate.Attachment {
return predicate.Attachment(func(s *sql.Selector) {
s1 := s.Clone().SetP(nil)
for _, p := range predicates {
p(s1)
}
s.Where(s1.P())
})
return predicate.Attachment(sql.AndPredicates(predicates...))
}
// Or groups predicates with the OR operator between them.
func Or(predicates ...predicate.Attachment) predicate.Attachment {
return predicate.Attachment(func(s *sql.Selector) {
s1 := s.Clone().SetP(nil)
for i, p := range predicates {
if i > 0 {
s1.Or()
}
p(s1)
}
s.Where(s1.P())
})
return predicate.Attachment(sql.OrPredicates(predicates...))
}
// Not applies the not operator on the given predicate.
func Not(p predicate.Attachment) predicate.Attachment {
return predicate.Attachment(func(s *sql.Selector) {
p(s.Not())
})
return predicate.Attachment(sql.NotPredicates(p))
}

View file

@ -65,6 +65,20 @@ func (ac *AttachmentCreate) SetNillableType(a *attachment.Type) *AttachmentCreat
return ac
}
// SetPrimary sets the "primary" field.
func (ac *AttachmentCreate) SetPrimary(b bool) *AttachmentCreate {
ac.mutation.SetPrimary(b)
return ac
}
// SetNillablePrimary sets the "primary" field if the given value is not nil.
func (ac *AttachmentCreate) SetNillablePrimary(b *bool) *AttachmentCreate {
if b != nil {
ac.SetPrimary(*b)
}
return ac
}
// SetID sets the "id" field.
func (ac *AttachmentCreate) SetID(u uuid.UUID) *AttachmentCreate {
ac.mutation.SetID(u)
@ -108,50 +122,8 @@ func (ac *AttachmentCreate) Mutation() *AttachmentMutation {
// Save creates the Attachment in the database.
func (ac *AttachmentCreate) Save(ctx context.Context) (*Attachment, error) {
var (
err error
node *Attachment
)
ac.defaults()
if len(ac.hooks) == 0 {
if err = ac.check(); err != nil {
return nil, err
}
node, err = ac.sqlSave(ctx)
} else {
var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) {
mutation, ok := m.(*AttachmentMutation)
if !ok {
return nil, fmt.Errorf("unexpected mutation type %T", m)
}
if err = ac.check(); err != nil {
return nil, err
}
ac.mutation = mutation
if node, err = ac.sqlSave(ctx); err != nil {
return nil, err
}
mutation.id = &node.ID
mutation.done = true
return node, err
})
for i := len(ac.hooks) - 1; i >= 0; i-- {
if ac.hooks[i] == nil {
return nil, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)")
}
mut = ac.hooks[i](mut)
}
v, err := mut.Mutate(ctx, ac.mutation)
if err != nil {
return nil, err
}
nv, ok := v.(*Attachment)
if !ok {
return nil, fmt.Errorf("unexpected node type %T returned from AttachmentMutation", v)
}
node = nv
}
return node, err
return withHooks(ctx, ac.sqlSave, ac.mutation, ac.hooks)
}
// SaveX calls Save and panics if Save returns an error.
@ -190,6 +162,10 @@ func (ac *AttachmentCreate) defaults() {
v := attachment.DefaultType
ac.mutation.SetType(v)
}
if _, ok := ac.mutation.Primary(); !ok {
v := attachment.DefaultPrimary
ac.mutation.SetPrimary(v)
}
if _, ok := ac.mutation.ID(); !ok {
v := attachment.DefaultID()
ac.mutation.SetID(v)
@ -212,6 +188,9 @@ func (ac *AttachmentCreate) check() error {
return &ValidationError{Name: "type", err: fmt.Errorf(`ent: validator failed for field "Attachment.type": %w`, err)}
}
}
if _, ok := ac.mutation.Primary(); !ok {
return &ValidationError{Name: "primary", err: errors.New(`ent: missing required field "Attachment.primary"`)}
}
if _, ok := ac.mutation.ItemID(); !ok {
return &ValidationError{Name: "item", err: errors.New(`ent: missing required edge "Attachment.item"`)}
}
@ -222,6 +201,9 @@ func (ac *AttachmentCreate) check() error {
}
func (ac *AttachmentCreate) sqlSave(ctx context.Context) (*Attachment, error) {
if err := ac.check(); err != nil {
return nil, err
}
_node, _spec := ac.createSpec()
if err := sqlgraph.CreateNode(ctx, ac.driver, _spec); err != nil {
if sqlgraph.IsConstraintError(err) {
@ -236,48 +218,36 @@ func (ac *AttachmentCreate) sqlSave(ctx context.Context) (*Attachment, error) {
return nil, err
}
}
ac.mutation.id = &_node.ID
ac.mutation.done = true
return _node, nil
}
func (ac *AttachmentCreate) createSpec() (*Attachment, *sqlgraph.CreateSpec) {
var (
_node = &Attachment{config: ac.config}
_spec = &sqlgraph.CreateSpec{
Table: attachment.Table,
ID: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: attachment.FieldID,
},
}
_spec = sqlgraph.NewCreateSpec(attachment.Table, sqlgraph.NewFieldSpec(attachment.FieldID, field.TypeUUID))
)
if id, ok := ac.mutation.ID(); ok {
_node.ID = id
_spec.ID.Value = &id
}
if value, ok := ac.mutation.CreatedAt(); ok {
_spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{
Type: field.TypeTime,
Value: value,
Column: attachment.FieldCreatedAt,
})
_spec.SetField(attachment.FieldCreatedAt, field.TypeTime, value)
_node.CreatedAt = value
}
if value, ok := ac.mutation.UpdatedAt(); ok {
_spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{
Type: field.TypeTime,
Value: value,
Column: attachment.FieldUpdatedAt,
})
_spec.SetField(attachment.FieldUpdatedAt, field.TypeTime, value)
_node.UpdatedAt = value
}
if value, ok := ac.mutation.GetType(); ok {
_spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{
Type: field.TypeEnum,
Value: value,
Column: attachment.FieldType,
})
_spec.SetField(attachment.FieldType, field.TypeEnum, value)
_node.Type = value
}
if value, ok := ac.mutation.Primary(); ok {
_spec.SetField(attachment.FieldPrimary, field.TypeBool, value)
_node.Primary = value
}
if nodes := ac.mutation.ItemIDs(); len(nodes) > 0 {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.M2O,
@ -286,10 +256,7 @@ func (ac *AttachmentCreate) createSpec() (*Attachment, *sqlgraph.CreateSpec) {
Columns: []string{attachment.ItemColumn},
Bidi: false,
Target: &sqlgraph.EdgeTarget{
IDSpec: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: item.FieldID,
},
IDSpec: sqlgraph.NewFieldSpec(item.FieldID, field.TypeUUID),
},
}
for _, k := range nodes {
@ -306,10 +273,7 @@ func (ac *AttachmentCreate) createSpec() (*Attachment, *sqlgraph.CreateSpec) {
Columns: []string{attachment.DocumentColumn},
Bidi: false,
Target: &sqlgraph.EdgeTarget{
IDSpec: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: document.FieldID,
},
IDSpec: sqlgraph.NewFieldSpec(document.FieldID, field.TypeUUID),
},
}
for _, k := range nodes {
@ -324,11 +288,15 @@ func (ac *AttachmentCreate) createSpec() (*Attachment, *sqlgraph.CreateSpec) {
// AttachmentCreateBulk is the builder for creating many Attachment entities in bulk.
type AttachmentCreateBulk struct {
config
err error
builders []*AttachmentCreate
}
// Save creates the Attachment entities in the database.
func (acb *AttachmentCreateBulk) Save(ctx context.Context) ([]*Attachment, error) {
if acb.err != nil {
return nil, acb.err
}
specs := make([]*sqlgraph.CreateSpec, len(acb.builders))
nodes := make([]*Attachment, len(acb.builders))
mutators := make([]Mutator, len(acb.builders))
@ -345,8 +313,8 @@ func (acb *AttachmentCreateBulk) Save(ctx context.Context) ([]*Attachment, error
return nil, err
}
builder.mutation = mutation
nodes[i], specs[i] = builder.createSpec()
var err error
nodes[i], specs[i] = builder.createSpec()
if i < len(mutators)-1 {
_, err = mutators[i+1].Mutate(root, acb.builders[i+1].mutation)
} else {

View file

@ -4,7 +4,6 @@ package ent
import (
"context"
"fmt"
"entgo.io/ent/dialect/sql"
"entgo.io/ent/dialect/sql/sqlgraph"
@ -28,34 +27,7 @@ func (ad *AttachmentDelete) Where(ps ...predicate.Attachment) *AttachmentDelete
// Exec executes the deletion query and returns how many vertices were deleted.
func (ad *AttachmentDelete) Exec(ctx context.Context) (int, error) {
var (
err error
affected int
)
if len(ad.hooks) == 0 {
affected, err = ad.sqlExec(ctx)
} else {
var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) {
mutation, ok := m.(*AttachmentMutation)
if !ok {
return nil, fmt.Errorf("unexpected mutation type %T", m)
}
ad.mutation = mutation
affected, err = ad.sqlExec(ctx)
mutation.done = true
return affected, err
})
for i := len(ad.hooks) - 1; i >= 0; i-- {
if ad.hooks[i] == nil {
return 0, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)")
}
mut = ad.hooks[i](mut)
}
if _, err := mut.Mutate(ctx, ad.mutation); err != nil {
return 0, err
}
}
return affected, err
return withHooks(ctx, ad.sqlExec, ad.mutation, ad.hooks)
}
// ExecX is like Exec, but panics if an error occurs.
@ -68,15 +40,7 @@ func (ad *AttachmentDelete) ExecX(ctx context.Context) int {
}
func (ad *AttachmentDelete) sqlExec(ctx context.Context) (int, error) {
_spec := &sqlgraph.DeleteSpec{
Node: &sqlgraph.NodeSpec{
Table: attachment.Table,
ID: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: attachment.FieldID,
},
},
}
_spec := sqlgraph.NewDeleteSpec(attachment.Table, sqlgraph.NewFieldSpec(attachment.FieldID, field.TypeUUID))
if ps := ad.mutation.predicates; len(ps) > 0 {
_spec.Predicate = func(selector *sql.Selector) {
for i := range ps {
@ -88,6 +52,7 @@ func (ad *AttachmentDelete) sqlExec(ctx context.Context) (int, error) {
if err != nil && sqlgraph.IsConstraintError(err) {
err = &ConstraintError{msg: err.Error(), wrap: err}
}
ad.mutation.done = true
return affected, err
}
@ -96,6 +61,12 @@ type AttachmentDeleteOne struct {
ad *AttachmentDelete
}
// Where appends a list predicates to the AttachmentDelete builder.
func (ado *AttachmentDeleteOne) Where(ps ...predicate.Attachment) *AttachmentDeleteOne {
ado.ad.mutation.Where(ps...)
return ado
}
// Exec executes the deletion query.
func (ado *AttachmentDeleteOne) Exec(ctx context.Context) error {
n, err := ado.ad.Exec(ctx)
@ -111,5 +82,7 @@ func (ado *AttachmentDeleteOne) Exec(ctx context.Context) error {
// ExecX is like Exec, but panics if an error occurs.
func (ado *AttachmentDeleteOne) ExecX(ctx context.Context) {
ado.ad.ExecX(ctx)
if err := ado.Exec(ctx); err != nil {
panic(err)
}
}

View file

@ -20,11 +20,9 @@ import (
// AttachmentQuery is the builder for querying Attachment entities.
type AttachmentQuery struct {
config
limit *int
offset *int
unique *bool
order []OrderFunc
fields []string
ctx *QueryContext
order []attachment.OrderOption
inters []Interceptor
predicates []predicate.Attachment
withItem *ItemQuery
withDocument *DocumentQuery
@ -40,34 +38,34 @@ func (aq *AttachmentQuery) Where(ps ...predicate.Attachment) *AttachmentQuery {
return aq
}
// Limit adds a limit step to the query.
// Limit the number of records to be returned by this query.
func (aq *AttachmentQuery) Limit(limit int) *AttachmentQuery {
aq.limit = &limit
aq.ctx.Limit = &limit
return aq
}
// Offset adds an offset step to the query.
// Offset to start from.
func (aq *AttachmentQuery) Offset(offset int) *AttachmentQuery {
aq.offset = &offset
aq.ctx.Offset = &offset
return aq
}
// Unique configures the query builder to filter duplicate records on query.
// By default, unique is set to true, and can be disabled using this method.
func (aq *AttachmentQuery) Unique(unique bool) *AttachmentQuery {
aq.unique = &unique
aq.ctx.Unique = &unique
return aq
}
// Order adds an order step to the query.
func (aq *AttachmentQuery) Order(o ...OrderFunc) *AttachmentQuery {
// Order specifies how the records should be ordered.
func (aq *AttachmentQuery) Order(o ...attachment.OrderOption) *AttachmentQuery {
aq.order = append(aq.order, o...)
return aq
}
// QueryItem chains the current query on the "item" edge.
func (aq *AttachmentQuery) QueryItem() *ItemQuery {
query := &ItemQuery{config: aq.config}
query := (&ItemClient{config: aq.config}).Query()
query.path = func(ctx context.Context) (fromU *sql.Selector, err error) {
if err := aq.prepareQuery(ctx); err != nil {
return nil, err
@ -89,7 +87,7 @@ func (aq *AttachmentQuery) QueryItem() *ItemQuery {
// QueryDocument chains the current query on the "document" edge.
func (aq *AttachmentQuery) QueryDocument() *DocumentQuery {
query := &DocumentQuery{config: aq.config}
query := (&DocumentClient{config: aq.config}).Query()
query.path = func(ctx context.Context) (fromU *sql.Selector, err error) {
if err := aq.prepareQuery(ctx); err != nil {
return nil, err
@ -112,7 +110,7 @@ func (aq *AttachmentQuery) QueryDocument() *DocumentQuery {
// First returns the first Attachment entity from the query.
// Returns a *NotFoundError when no Attachment was found.
func (aq *AttachmentQuery) First(ctx context.Context) (*Attachment, error) {
nodes, err := aq.Limit(1).All(ctx)
nodes, err := aq.Limit(1).All(setContextOp(ctx, aq.ctx, "First"))
if err != nil {
return nil, err
}
@ -135,7 +133,7 @@ func (aq *AttachmentQuery) FirstX(ctx context.Context) *Attachment {
// Returns a *NotFoundError when no Attachment ID was found.
func (aq *AttachmentQuery) FirstID(ctx context.Context) (id uuid.UUID, err error) {
var ids []uuid.UUID
if ids, err = aq.Limit(1).IDs(ctx); err != nil {
if ids, err = aq.Limit(1).IDs(setContextOp(ctx, aq.ctx, "FirstID")); err != nil {
return
}
if len(ids) == 0 {
@ -158,7 +156,7 @@ func (aq *AttachmentQuery) FirstIDX(ctx context.Context) uuid.UUID {
// Returns a *NotSingularError when more than one Attachment entity is found.
// Returns a *NotFoundError when no Attachment entities are found.
func (aq *AttachmentQuery) Only(ctx context.Context) (*Attachment, error) {
nodes, err := aq.Limit(2).All(ctx)
nodes, err := aq.Limit(2).All(setContextOp(ctx, aq.ctx, "Only"))
if err != nil {
return nil, err
}
@ -186,7 +184,7 @@ func (aq *AttachmentQuery) OnlyX(ctx context.Context) *Attachment {
// Returns a *NotFoundError when no entities are found.
func (aq *AttachmentQuery) OnlyID(ctx context.Context) (id uuid.UUID, err error) {
var ids []uuid.UUID
if ids, err = aq.Limit(2).IDs(ctx); err != nil {
if ids, err = aq.Limit(2).IDs(setContextOp(ctx, aq.ctx, "OnlyID")); err != nil {
return
}
switch len(ids) {
@ -211,10 +209,12 @@ func (aq *AttachmentQuery) OnlyIDX(ctx context.Context) uuid.UUID {
// All executes the query and returns a list of Attachments.
func (aq *AttachmentQuery) All(ctx context.Context) ([]*Attachment, error) {
ctx = setContextOp(ctx, aq.ctx, "All")
if err := aq.prepareQuery(ctx); err != nil {
return nil, err
}
return aq.sqlAll(ctx)
qr := querierAll[[]*Attachment, *AttachmentQuery]()
return withInterceptors[[]*Attachment](ctx, aq, qr, aq.inters)
}
// AllX is like All, but panics if an error occurs.
@ -227,9 +227,12 @@ func (aq *AttachmentQuery) AllX(ctx context.Context) []*Attachment {
}
// IDs executes the query and returns a list of Attachment IDs.
func (aq *AttachmentQuery) IDs(ctx context.Context) ([]uuid.UUID, error) {
var ids []uuid.UUID
if err := aq.Select(attachment.FieldID).Scan(ctx, &ids); err != nil {
func (aq *AttachmentQuery) IDs(ctx context.Context) (ids []uuid.UUID, err error) {
if aq.ctx.Unique == nil && aq.path != nil {
aq.Unique(true)
}
ctx = setContextOp(ctx, aq.ctx, "IDs")
if err = aq.Select(attachment.FieldID).Scan(ctx, &ids); err != nil {
return nil, err
}
return ids, nil
@ -246,10 +249,11 @@ func (aq *AttachmentQuery) IDsX(ctx context.Context) []uuid.UUID {
// Count returns the count of the given query.
func (aq *AttachmentQuery) Count(ctx context.Context) (int, error) {
ctx = setContextOp(ctx, aq.ctx, "Count")
if err := aq.prepareQuery(ctx); err != nil {
return 0, err
}
return aq.sqlCount(ctx)
return withInterceptors[int](ctx, aq, querierCount[*AttachmentQuery](), aq.inters)
}
// CountX is like Count, but panics if an error occurs.
@ -263,10 +267,15 @@ func (aq *AttachmentQuery) CountX(ctx context.Context) int {
// Exist returns true if the query has elements in the graph.
func (aq *AttachmentQuery) Exist(ctx context.Context) (bool, error) {
if err := aq.prepareQuery(ctx); err != nil {
return false, err
ctx = setContextOp(ctx, aq.ctx, "Exist")
switch _, err := aq.FirstID(ctx); {
case IsNotFound(err):
return false, nil
case err != nil:
return false, fmt.Errorf("ent: check existence: %w", err)
default:
return true, nil
}
return aq.sqlExist(ctx)
}
// ExistX is like Exist, but panics if an error occurs.
@ -286,23 +295,22 @@ func (aq *AttachmentQuery) Clone() *AttachmentQuery {
}
return &AttachmentQuery{
config: aq.config,
limit: aq.limit,
offset: aq.offset,
order: append([]OrderFunc{}, aq.order...),
ctx: aq.ctx.Clone(),
order: append([]attachment.OrderOption{}, aq.order...),
inters: append([]Interceptor{}, aq.inters...),
predicates: append([]predicate.Attachment{}, aq.predicates...),
withItem: aq.withItem.Clone(),
withDocument: aq.withDocument.Clone(),
// clone intermediate query.
sql: aq.sql.Clone(),
path: aq.path,
unique: aq.unique,
sql: aq.sql.Clone(),
path: aq.path,
}
}
// WithItem tells the query-builder to eager-load the nodes that are connected to
// the "item" edge. The optional arguments are used to configure the query builder of the edge.
func (aq *AttachmentQuery) WithItem(opts ...func(*ItemQuery)) *AttachmentQuery {
query := &ItemQuery{config: aq.config}
query := (&ItemClient{config: aq.config}).Query()
for _, opt := range opts {
opt(query)
}
@ -313,7 +321,7 @@ func (aq *AttachmentQuery) WithItem(opts ...func(*ItemQuery)) *AttachmentQuery {
// WithDocument tells the query-builder to eager-load the nodes that are connected to
// the "document" edge. The optional arguments are used to configure the query builder of the edge.
func (aq *AttachmentQuery) WithDocument(opts ...func(*DocumentQuery)) *AttachmentQuery {
query := &DocumentQuery{config: aq.config}
query := (&DocumentClient{config: aq.config}).Query()
for _, opt := range opts {
opt(query)
}
@ -336,16 +344,11 @@ func (aq *AttachmentQuery) WithDocument(opts ...func(*DocumentQuery)) *Attachmen
// Aggregate(ent.Count()).
// Scan(ctx, &v)
func (aq *AttachmentQuery) GroupBy(field string, fields ...string) *AttachmentGroupBy {
grbuild := &AttachmentGroupBy{config: aq.config}
grbuild.fields = append([]string{field}, fields...)
grbuild.path = func(ctx context.Context) (prev *sql.Selector, err error) {
if err := aq.prepareQuery(ctx); err != nil {
return nil, err
}
return aq.sqlQuery(ctx), nil
}
aq.ctx.Fields = append([]string{field}, fields...)
grbuild := &AttachmentGroupBy{build: aq}
grbuild.flds = &aq.ctx.Fields
grbuild.label = attachment.Label
grbuild.flds, grbuild.scan = &grbuild.fields, grbuild.Scan
grbuild.scan = grbuild.Scan
return grbuild
}
@ -362,15 +365,30 @@ func (aq *AttachmentQuery) GroupBy(field string, fields ...string) *AttachmentGr
// Select(attachment.FieldCreatedAt).
// Scan(ctx, &v)
func (aq *AttachmentQuery) Select(fields ...string) *AttachmentSelect {
aq.fields = append(aq.fields, fields...)
selbuild := &AttachmentSelect{AttachmentQuery: aq}
selbuild.label = attachment.Label
selbuild.flds, selbuild.scan = &aq.fields, selbuild.Scan
return selbuild
aq.ctx.Fields = append(aq.ctx.Fields, fields...)
sbuild := &AttachmentSelect{AttachmentQuery: aq}
sbuild.label = attachment.Label
sbuild.flds, sbuild.scan = &aq.ctx.Fields, sbuild.Scan
return sbuild
}
// Aggregate returns a AttachmentSelect configured with the given aggregations.
func (aq *AttachmentQuery) Aggregate(fns ...AggregateFunc) *AttachmentSelect {
return aq.Select().Aggregate(fns...)
}
func (aq *AttachmentQuery) prepareQuery(ctx context.Context) error {
for _, f := range aq.fields {
for _, inter := range aq.inters {
if inter == nil {
return fmt.Errorf("ent: uninitialized interceptor (forgotten import ent/runtime?)")
}
if trv, ok := inter.(Traverser); ok {
if err := trv.Traverse(ctx, aq); err != nil {
return err
}
}
}
for _, f := range aq.ctx.Fields {
if !attachment.ValidColumn(f) {
return &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)}
}
@ -447,6 +465,9 @@ func (aq *AttachmentQuery) loadItem(ctx context.Context, query *ItemQuery, nodes
}
nodeids[fk] = append(nodeids[fk], nodes[i])
}
if len(ids) == 0 {
return nil
}
query.Where(item.IDIn(ids...))
neighbors, err := query.All(ctx)
if err != nil {
@ -476,6 +497,9 @@ func (aq *AttachmentQuery) loadDocument(ctx context.Context, query *DocumentQuer
}
nodeids[fk] = append(nodeids[fk], nodes[i])
}
if len(ids) == 0 {
return nil
}
query.Where(document.IDIn(ids...))
neighbors, err := query.All(ctx)
if err != nil {
@ -495,41 +519,22 @@ func (aq *AttachmentQuery) loadDocument(ctx context.Context, query *DocumentQuer
func (aq *AttachmentQuery) sqlCount(ctx context.Context) (int, error) {
_spec := aq.querySpec()
_spec.Node.Columns = aq.fields
if len(aq.fields) > 0 {
_spec.Unique = aq.unique != nil && *aq.unique
_spec.Node.Columns = aq.ctx.Fields
if len(aq.ctx.Fields) > 0 {
_spec.Unique = aq.ctx.Unique != nil && *aq.ctx.Unique
}
return sqlgraph.CountNodes(ctx, aq.driver, _spec)
}
func (aq *AttachmentQuery) sqlExist(ctx context.Context) (bool, error) {
switch _, err := aq.FirstID(ctx); {
case IsNotFound(err):
return false, nil
case err != nil:
return false, fmt.Errorf("ent: check existence: %w", err)
default:
return true, nil
}
}
func (aq *AttachmentQuery) querySpec() *sqlgraph.QuerySpec {
_spec := &sqlgraph.QuerySpec{
Node: &sqlgraph.NodeSpec{
Table: attachment.Table,
Columns: attachment.Columns,
ID: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: attachment.FieldID,
},
},
From: aq.sql,
Unique: true,
}
if unique := aq.unique; unique != nil {
_spec := sqlgraph.NewQuerySpec(attachment.Table, attachment.Columns, sqlgraph.NewFieldSpec(attachment.FieldID, field.TypeUUID))
_spec.From = aq.sql
if unique := aq.ctx.Unique; unique != nil {
_spec.Unique = *unique
} else if aq.path != nil {
_spec.Unique = true
}
if fields := aq.fields; len(fields) > 0 {
if fields := aq.ctx.Fields; len(fields) > 0 {
_spec.Node.Columns = make([]string, 0, len(fields))
_spec.Node.Columns = append(_spec.Node.Columns, attachment.FieldID)
for i := range fields {
@ -545,10 +550,10 @@ func (aq *AttachmentQuery) querySpec() *sqlgraph.QuerySpec {
}
}
}
if limit := aq.limit; limit != nil {
if limit := aq.ctx.Limit; limit != nil {
_spec.Limit = *limit
}
if offset := aq.offset; offset != nil {
if offset := aq.ctx.Offset; offset != nil {
_spec.Offset = *offset
}
if ps := aq.order; len(ps) > 0 {
@ -564,7 +569,7 @@ func (aq *AttachmentQuery) querySpec() *sqlgraph.QuerySpec {
func (aq *AttachmentQuery) sqlQuery(ctx context.Context) *sql.Selector {
builder := sql.Dialect(aq.driver.Dialect())
t1 := builder.Table(attachment.Table)
columns := aq.fields
columns := aq.ctx.Fields
if len(columns) == 0 {
columns = attachment.Columns
}
@ -573,7 +578,7 @@ func (aq *AttachmentQuery) sqlQuery(ctx context.Context) *sql.Selector {
selector = aq.sql
selector.Select(selector.Columns(columns...)...)
}
if aq.unique != nil && *aq.unique {
if aq.ctx.Unique != nil && *aq.ctx.Unique {
selector.Distinct()
}
for _, p := range aq.predicates {
@ -582,12 +587,12 @@ func (aq *AttachmentQuery) sqlQuery(ctx context.Context) *sql.Selector {
for _, p := range aq.order {
p(selector)
}
if offset := aq.offset; offset != nil {
if offset := aq.ctx.Offset; offset != nil {
// limit is mandatory for offset clause. We start
// with default value, and override it below if needed.
selector.Offset(*offset).Limit(math.MaxInt32)
}
if limit := aq.limit; limit != nil {
if limit := aq.ctx.Limit; limit != nil {
selector.Limit(*limit)
}
return selector
@ -595,13 +600,8 @@ func (aq *AttachmentQuery) sqlQuery(ctx context.Context) *sql.Selector {
// AttachmentGroupBy is the group-by builder for Attachment entities.
type AttachmentGroupBy struct {
config
selector
fields []string
fns []AggregateFunc
// intermediate query (i.e. traversal path).
sql *sql.Selector
path func(context.Context) (*sql.Selector, error)
build *AttachmentQuery
}
// Aggregate adds the given aggregation functions to the group-by query.
@ -610,74 +610,77 @@ func (agb *AttachmentGroupBy) Aggregate(fns ...AggregateFunc) *AttachmentGroupBy
return agb
}
// Scan applies the group-by query and scans the result into the given value.
// Scan applies the selector query and scans the result into the given value.
func (agb *AttachmentGroupBy) Scan(ctx context.Context, v any) error {
query, err := agb.path(ctx)
if err != nil {
ctx = setContextOp(ctx, agb.build.ctx, "GroupBy")
if err := agb.build.prepareQuery(ctx); err != nil {
return err
}
agb.sql = query
return agb.sqlScan(ctx, v)
return scanWithInterceptors[*AttachmentQuery, *AttachmentGroupBy](ctx, agb.build, agb, agb.build.inters, v)
}
func (agb *AttachmentGroupBy) sqlScan(ctx context.Context, v any) error {
for _, f := range agb.fields {
if !attachment.ValidColumn(f) {
return &ValidationError{Name: f, err: fmt.Errorf("invalid field %q for group-by", f)}
}
}
selector := agb.sqlQuery()
if err := selector.Err(); err != nil {
return err
}
rows := &sql.Rows{}
query, args := selector.Query()
if err := agb.driver.Query(ctx, query, args, rows); err != nil {
return err
}
defer rows.Close()
return sql.ScanSlice(rows, v)
}
func (agb *AttachmentGroupBy) sqlQuery() *sql.Selector {
selector := agb.sql.Select()
func (agb *AttachmentGroupBy) sqlScan(ctx context.Context, root *AttachmentQuery, v any) error {
selector := root.sqlQuery(ctx).Select()
aggregation := make([]string, 0, len(agb.fns))
for _, fn := range agb.fns {
aggregation = append(aggregation, fn(selector))
}
// If no columns were selected in a custom aggregation function, the default
// selection is the fields used for "group-by", and the aggregation functions.
if len(selector.SelectedColumns()) == 0 {
columns := make([]string, 0, len(agb.fields)+len(agb.fns))
for _, f := range agb.fields {
columns := make([]string, 0, len(*agb.flds)+len(agb.fns))
for _, f := range *agb.flds {
columns = append(columns, selector.C(f))
}
columns = append(columns, aggregation...)
selector.Select(columns...)
}
return selector.GroupBy(selector.Columns(agb.fields...)...)
selector.GroupBy(selector.Columns(*agb.flds...)...)
if err := selector.Err(); err != nil {
return err
}
rows := &sql.Rows{}
query, args := selector.Query()
if err := agb.build.driver.Query(ctx, query, args, rows); err != nil {
return err
}
defer rows.Close()
return sql.ScanSlice(rows, v)
}
// AttachmentSelect is the builder for selecting fields of Attachment entities.
type AttachmentSelect struct {
*AttachmentQuery
selector
// intermediate query (i.e. traversal path).
sql *sql.Selector
}
// Aggregate adds the given aggregation functions to the selector query.
func (as *AttachmentSelect) Aggregate(fns ...AggregateFunc) *AttachmentSelect {
as.fns = append(as.fns, fns...)
return as
}
// Scan applies the selector query and scans the result into the given value.
func (as *AttachmentSelect) Scan(ctx context.Context, v any) error {
ctx = setContextOp(ctx, as.ctx, "Select")
if err := as.prepareQuery(ctx); err != nil {
return err
}
as.sql = as.AttachmentQuery.sqlQuery(ctx)
return as.sqlScan(ctx, v)
return scanWithInterceptors[*AttachmentQuery, *AttachmentSelect](ctx, as.AttachmentQuery, as, as.inters, v)
}
func (as *AttachmentSelect) sqlScan(ctx context.Context, v any) error {
func (as *AttachmentSelect) sqlScan(ctx context.Context, root *AttachmentQuery, v any) error {
selector := root.sqlQuery(ctx)
aggregation := make([]string, 0, len(as.fns))
for _, fn := range as.fns {
aggregation = append(aggregation, fn(selector))
}
switch n := len(*as.selector.flds); {
case n == 0 && len(aggregation) > 0:
selector.Select(aggregation...)
case n != 0 && len(aggregation) > 0:
selector.AppendSelect(aggregation...)
}
rows := &sql.Rows{}
query, args := as.sql.Query()
query, args := selector.Query()
if err := as.driver.Query(ctx, query, args, rows); err != nil {
return err
}

View file

@ -51,6 +51,20 @@ func (au *AttachmentUpdate) SetNillableType(a *attachment.Type) *AttachmentUpdat
return au
}
// SetPrimary sets the "primary" field.
func (au *AttachmentUpdate) SetPrimary(b bool) *AttachmentUpdate {
au.mutation.SetPrimary(b)
return au
}
// SetNillablePrimary sets the "primary" field if the given value is not nil.
func (au *AttachmentUpdate) SetNillablePrimary(b *bool) *AttachmentUpdate {
if b != nil {
au.SetPrimary(*b)
}
return au
}
// SetItemID sets the "item" edge to the Item entity by ID.
func (au *AttachmentUpdate) SetItemID(id uuid.UUID) *AttachmentUpdate {
au.mutation.SetItemID(id)
@ -92,41 +106,8 @@ func (au *AttachmentUpdate) ClearDocument() *AttachmentUpdate {
// Save executes the query and returns the number of nodes affected by the update operation.
func (au *AttachmentUpdate) Save(ctx context.Context) (int, error) {
var (
err error
affected int
)
au.defaults()
if len(au.hooks) == 0 {
if err = au.check(); err != nil {
return 0, err
}
affected, err = au.sqlSave(ctx)
} else {
var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) {
mutation, ok := m.(*AttachmentMutation)
if !ok {
return nil, fmt.Errorf("unexpected mutation type %T", m)
}
if err = au.check(); err != nil {
return 0, err
}
au.mutation = mutation
affected, err = au.sqlSave(ctx)
mutation.done = true
return affected, err
})
for i := len(au.hooks) - 1; i >= 0; i-- {
if au.hooks[i] == nil {
return 0, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)")
}
mut = au.hooks[i](mut)
}
if _, err := mut.Mutate(ctx, au.mutation); err != nil {
return 0, err
}
}
return affected, err
return withHooks(ctx, au.sqlSave, au.mutation, au.hooks)
}
// SaveX is like Save, but panics if an error occurs.
@ -176,16 +157,10 @@ func (au *AttachmentUpdate) check() error {
}
func (au *AttachmentUpdate) sqlSave(ctx context.Context) (n int, err error) {
_spec := &sqlgraph.UpdateSpec{
Node: &sqlgraph.NodeSpec{
Table: attachment.Table,
Columns: attachment.Columns,
ID: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: attachment.FieldID,
},
},
if err := au.check(); err != nil {
return n, err
}
_spec := sqlgraph.NewUpdateSpec(attachment.Table, attachment.Columns, sqlgraph.NewFieldSpec(attachment.FieldID, field.TypeUUID))
if ps := au.mutation.predicates; len(ps) > 0 {
_spec.Predicate = func(selector *sql.Selector) {
for i := range ps {
@ -194,18 +169,13 @@ func (au *AttachmentUpdate) sqlSave(ctx context.Context) (n int, err error) {
}
}
if value, ok := au.mutation.UpdatedAt(); ok {
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
Type: field.TypeTime,
Value: value,
Column: attachment.FieldUpdatedAt,
})
_spec.SetField(attachment.FieldUpdatedAt, field.TypeTime, value)
}
if value, ok := au.mutation.GetType(); ok {
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
Type: field.TypeEnum,
Value: value,
Column: attachment.FieldType,
})
_spec.SetField(attachment.FieldType, field.TypeEnum, value)
}
if value, ok := au.mutation.Primary(); ok {
_spec.SetField(attachment.FieldPrimary, field.TypeBool, value)
}
if au.mutation.ItemCleared() {
edge := &sqlgraph.EdgeSpec{
@ -215,10 +185,7 @@ func (au *AttachmentUpdate) sqlSave(ctx context.Context) (n int, err error) {
Columns: []string{attachment.ItemColumn},
Bidi: false,
Target: &sqlgraph.EdgeTarget{
IDSpec: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: item.FieldID,
},
IDSpec: sqlgraph.NewFieldSpec(item.FieldID, field.TypeUUID),
},
}
_spec.Edges.Clear = append(_spec.Edges.Clear, edge)
@ -231,10 +198,7 @@ func (au *AttachmentUpdate) sqlSave(ctx context.Context) (n int, err error) {
Columns: []string{attachment.ItemColumn},
Bidi: false,
Target: &sqlgraph.EdgeTarget{
IDSpec: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: item.FieldID,
},
IDSpec: sqlgraph.NewFieldSpec(item.FieldID, field.TypeUUID),
},
}
for _, k := range nodes {
@ -250,10 +214,7 @@ func (au *AttachmentUpdate) sqlSave(ctx context.Context) (n int, err error) {
Columns: []string{attachment.DocumentColumn},
Bidi: false,
Target: &sqlgraph.EdgeTarget{
IDSpec: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: document.FieldID,
},
IDSpec: sqlgraph.NewFieldSpec(document.FieldID, field.TypeUUID),
},
}
_spec.Edges.Clear = append(_spec.Edges.Clear, edge)
@ -266,10 +227,7 @@ func (au *AttachmentUpdate) sqlSave(ctx context.Context) (n int, err error) {
Columns: []string{attachment.DocumentColumn},
Bidi: false,
Target: &sqlgraph.EdgeTarget{
IDSpec: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: document.FieldID,
},
IDSpec: sqlgraph.NewFieldSpec(document.FieldID, field.TypeUUID),
},
}
for _, k := range nodes {
@ -285,6 +243,7 @@ func (au *AttachmentUpdate) sqlSave(ctx context.Context) (n int, err error) {
}
return 0, err
}
au.mutation.done = true
return n, nil
}
@ -316,6 +275,20 @@ func (auo *AttachmentUpdateOne) SetNillableType(a *attachment.Type) *AttachmentU
return auo
}
// SetPrimary sets the "primary" field.
func (auo *AttachmentUpdateOne) SetPrimary(b bool) *AttachmentUpdateOne {
auo.mutation.SetPrimary(b)
return auo
}
// SetNillablePrimary sets the "primary" field if the given value is not nil.
func (auo *AttachmentUpdateOne) SetNillablePrimary(b *bool) *AttachmentUpdateOne {
if b != nil {
auo.SetPrimary(*b)
}
return auo
}
// SetItemID sets the "item" edge to the Item entity by ID.
func (auo *AttachmentUpdateOne) SetItemID(id uuid.UUID) *AttachmentUpdateOne {
auo.mutation.SetItemID(id)
@ -355,6 +328,12 @@ func (auo *AttachmentUpdateOne) ClearDocument() *AttachmentUpdateOne {
return auo
}
// Where appends a list predicates to the AttachmentUpdate builder.
func (auo *AttachmentUpdateOne) Where(ps ...predicate.Attachment) *AttachmentUpdateOne {
auo.mutation.Where(ps...)
return auo
}
// Select allows selecting one or more fields (columns) of the returned entity.
// The default is selecting all fields defined in the entity schema.
func (auo *AttachmentUpdateOne) Select(field string, fields ...string) *AttachmentUpdateOne {
@ -364,47 +343,8 @@ func (auo *AttachmentUpdateOne) Select(field string, fields ...string) *Attachme
// Save executes the query and returns the updated Attachment entity.
func (auo *AttachmentUpdateOne) Save(ctx context.Context) (*Attachment, error) {
var (
err error
node *Attachment
)
auo.defaults()
if len(auo.hooks) == 0 {
if err = auo.check(); err != nil {
return nil, err
}
node, err = auo.sqlSave(ctx)
} else {
var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) {
mutation, ok := m.(*AttachmentMutation)
if !ok {
return nil, fmt.Errorf("unexpected mutation type %T", m)
}
if err = auo.check(); err != nil {
return nil, err
}
auo.mutation = mutation
node, err = auo.sqlSave(ctx)
mutation.done = true
return node, err
})
for i := len(auo.hooks) - 1; i >= 0; i-- {
if auo.hooks[i] == nil {
return nil, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)")
}
mut = auo.hooks[i](mut)
}
v, err := mut.Mutate(ctx, auo.mutation)
if err != nil {
return nil, err
}
nv, ok := v.(*Attachment)
if !ok {
return nil, fmt.Errorf("unexpected node type %T returned from AttachmentMutation", v)
}
node = nv
}
return node, err
return withHooks(ctx, auo.sqlSave, auo.mutation, auo.hooks)
}
// SaveX is like Save, but panics if an error occurs.
@ -454,16 +394,10 @@ func (auo *AttachmentUpdateOne) check() error {
}
func (auo *AttachmentUpdateOne) sqlSave(ctx context.Context) (_node *Attachment, err error) {
_spec := &sqlgraph.UpdateSpec{
Node: &sqlgraph.NodeSpec{
Table: attachment.Table,
Columns: attachment.Columns,
ID: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: attachment.FieldID,
},
},
if err := auo.check(); err != nil {
return _node, err
}
_spec := sqlgraph.NewUpdateSpec(attachment.Table, attachment.Columns, sqlgraph.NewFieldSpec(attachment.FieldID, field.TypeUUID))
id, ok := auo.mutation.ID()
if !ok {
return nil, &ValidationError{Name: "id", err: errors.New(`ent: missing "Attachment.id" for update`)}
@ -489,18 +423,13 @@ func (auo *AttachmentUpdateOne) sqlSave(ctx context.Context) (_node *Attachment,
}
}
if value, ok := auo.mutation.UpdatedAt(); ok {
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
Type: field.TypeTime,
Value: value,
Column: attachment.FieldUpdatedAt,
})
_spec.SetField(attachment.FieldUpdatedAt, field.TypeTime, value)
}
if value, ok := auo.mutation.GetType(); ok {
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
Type: field.TypeEnum,
Value: value,
Column: attachment.FieldType,
})
_spec.SetField(attachment.FieldType, field.TypeEnum, value)
}
if value, ok := auo.mutation.Primary(); ok {
_spec.SetField(attachment.FieldPrimary, field.TypeBool, value)
}
if auo.mutation.ItemCleared() {
edge := &sqlgraph.EdgeSpec{
@ -510,10 +439,7 @@ func (auo *AttachmentUpdateOne) sqlSave(ctx context.Context) (_node *Attachment,
Columns: []string{attachment.ItemColumn},
Bidi: false,
Target: &sqlgraph.EdgeTarget{
IDSpec: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: item.FieldID,
},
IDSpec: sqlgraph.NewFieldSpec(item.FieldID, field.TypeUUID),
},
}
_spec.Edges.Clear = append(_spec.Edges.Clear, edge)
@ -526,10 +452,7 @@ func (auo *AttachmentUpdateOne) sqlSave(ctx context.Context) (_node *Attachment,
Columns: []string{attachment.ItemColumn},
Bidi: false,
Target: &sqlgraph.EdgeTarget{
IDSpec: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: item.FieldID,
},
IDSpec: sqlgraph.NewFieldSpec(item.FieldID, field.TypeUUID),
},
}
for _, k := range nodes {
@ -545,10 +468,7 @@ func (auo *AttachmentUpdateOne) sqlSave(ctx context.Context) (_node *Attachment,
Columns: []string{attachment.DocumentColumn},
Bidi: false,
Target: &sqlgraph.EdgeTarget{
IDSpec: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: document.FieldID,
},
IDSpec: sqlgraph.NewFieldSpec(document.FieldID, field.TypeUUID),
},
}
_spec.Edges.Clear = append(_spec.Edges.Clear, edge)
@ -561,10 +481,7 @@ func (auo *AttachmentUpdateOne) sqlSave(ctx context.Context) (_node *Attachment,
Columns: []string{attachment.DocumentColumn},
Bidi: false,
Target: &sqlgraph.EdgeTarget{
IDSpec: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: document.FieldID,
},
IDSpec: sqlgraph.NewFieldSpec(document.FieldID, field.TypeUUID),
},
}
for _, k := range nodes {
@ -583,5 +500,6 @@ func (auo *AttachmentUpdateOne) sqlSave(ctx context.Context) (_node *Attachment,
}
return nil, err
}
auo.mutation.done = true
return _node, nil
}

View file

@ -0,0 +1,145 @@
// Code generated by ent, DO NOT EDIT.
package ent
import (
"fmt"
"strings"
"entgo.io/ent"
"entgo.io/ent/dialect/sql"
"github.com/google/uuid"
"github.com/hay-kot/homebox/backend/internal/data/ent/authroles"
"github.com/hay-kot/homebox/backend/internal/data/ent/authtokens"
)
// AuthRoles is the model entity for the AuthRoles schema.
type AuthRoles struct {
config `json:"-"`
// ID of the ent.
ID int `json:"id,omitempty"`
// Role holds the value of the "role" field.
Role authroles.Role `json:"role,omitempty"`
// Edges holds the relations/edges for other nodes in the graph.
// The values are being populated by the AuthRolesQuery when eager-loading is set.
Edges AuthRolesEdges `json:"edges"`
auth_tokens_roles *uuid.UUID
selectValues sql.SelectValues
}
// AuthRolesEdges holds the relations/edges for other nodes in the graph.
type AuthRolesEdges struct {
// Token holds the value of the token edge.
Token *AuthTokens `json:"token,omitempty"`
// loadedTypes holds the information for reporting if a
// type was loaded (or requested) in eager-loading or not.
loadedTypes [1]bool
}
// TokenOrErr returns the Token value or an error if the edge
// was not loaded in eager-loading, or loaded but was not found.
func (e AuthRolesEdges) TokenOrErr() (*AuthTokens, error) {
if e.loadedTypes[0] {
if e.Token == nil {
// Edge was loaded but was not found.
return nil, &NotFoundError{label: authtokens.Label}
}
return e.Token, nil
}
return nil, &NotLoadedError{edge: "token"}
}
// scanValues returns the types for scanning values from sql.Rows.
func (*AuthRoles) scanValues(columns []string) ([]any, error) {
values := make([]any, len(columns))
for i := range columns {
switch columns[i] {
case authroles.FieldID:
values[i] = new(sql.NullInt64)
case authroles.FieldRole:
values[i] = new(sql.NullString)
case authroles.ForeignKeys[0]: // auth_tokens_roles
values[i] = &sql.NullScanner{S: new(uuid.UUID)}
default:
values[i] = new(sql.UnknownType)
}
}
return values, nil
}
// assignValues assigns the values that were returned from sql.Rows (after scanning)
// to the AuthRoles fields.
func (ar *AuthRoles) assignValues(columns []string, values []any) error {
if m, n := len(values), len(columns); m < n {
return fmt.Errorf("mismatch number of scan values: %d != %d", m, n)
}
for i := range columns {
switch columns[i] {
case authroles.FieldID:
value, ok := values[i].(*sql.NullInt64)
if !ok {
return fmt.Errorf("unexpected type %T for field id", value)
}
ar.ID = int(value.Int64)
case authroles.FieldRole:
if value, ok := values[i].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field role", values[i])
} else if value.Valid {
ar.Role = authroles.Role(value.String)
}
case authroles.ForeignKeys[0]:
if value, ok := values[i].(*sql.NullScanner); !ok {
return fmt.Errorf("unexpected type %T for field auth_tokens_roles", values[i])
} else if value.Valid {
ar.auth_tokens_roles = new(uuid.UUID)
*ar.auth_tokens_roles = *value.S.(*uuid.UUID)
}
default:
ar.selectValues.Set(columns[i], values[i])
}
}
return nil
}
// Value returns the ent.Value that was dynamically selected and assigned to the AuthRoles.
// This includes values selected through modifiers, order, etc.
func (ar *AuthRoles) Value(name string) (ent.Value, error) {
return ar.selectValues.Get(name)
}
// QueryToken queries the "token" edge of the AuthRoles entity.
func (ar *AuthRoles) QueryToken() *AuthTokensQuery {
return NewAuthRolesClient(ar.config).QueryToken(ar)
}
// Update returns a builder for updating this AuthRoles.
// Note that you need to call AuthRoles.Unwrap() before calling this method if this AuthRoles
// was returned from a transaction, and the transaction was committed or rolled back.
func (ar *AuthRoles) Update() *AuthRolesUpdateOne {
return NewAuthRolesClient(ar.config).UpdateOne(ar)
}
// Unwrap unwraps the AuthRoles entity that was returned from a transaction after it was closed,
// so that all future queries will be executed through the driver which created the transaction.
func (ar *AuthRoles) Unwrap() *AuthRoles {
_tx, ok := ar.config.driver.(*txDriver)
if !ok {
panic("ent: AuthRoles is not a transactional entity")
}
ar.config.driver = _tx.drv
return ar
}
// String implements the fmt.Stringer.
func (ar *AuthRoles) String() string {
var builder strings.Builder
builder.WriteString("AuthRoles(")
builder.WriteString(fmt.Sprintf("id=%v, ", ar.ID))
builder.WriteString("role=")
builder.WriteString(fmt.Sprintf("%v", ar.Role))
builder.WriteByte(')')
return builder.String()
}
// AuthRolesSlice is a parsable slice of AuthRoles.
type AuthRolesSlice []*AuthRoles

View file

@ -0,0 +1,111 @@
// Code generated by ent, DO NOT EDIT.
package authroles
import (
"fmt"
"entgo.io/ent/dialect/sql"
"entgo.io/ent/dialect/sql/sqlgraph"
)
const (
// Label holds the string label denoting the authroles type in the database.
Label = "auth_roles"
// FieldID holds the string denoting the id field in the database.
FieldID = "id"
// FieldRole holds the string denoting the role field in the database.
FieldRole = "role"
// EdgeToken holds the string denoting the token edge name in mutations.
EdgeToken = "token"
// Table holds the table name of the authroles in the database.
Table = "auth_roles"
// TokenTable is the table that holds the token relation/edge.
TokenTable = "auth_roles"
// TokenInverseTable is the table name for the AuthTokens entity.
// It exists in this package in order to avoid circular dependency with the "authtokens" package.
TokenInverseTable = "auth_tokens"
// TokenColumn is the table column denoting the token relation/edge.
TokenColumn = "auth_tokens_roles"
)
// Columns holds all SQL columns for authroles fields.
var Columns = []string{
FieldID,
FieldRole,
}
// ForeignKeys holds the SQL foreign-keys that are owned by the "auth_roles"
// table and are not defined as standalone fields in the schema.
var ForeignKeys = []string{
"auth_tokens_roles",
}
// ValidColumn reports if the column name is valid (part of the table columns).
func ValidColumn(column string) bool {
for i := range Columns {
if column == Columns[i] {
return true
}
}
for i := range ForeignKeys {
if column == ForeignKeys[i] {
return true
}
}
return false
}
// Role defines the type for the "role" enum field.
type Role string
// RoleUser is the default value of the Role enum.
const DefaultRole = RoleUser
// Role values.
const (
RoleAdmin Role = "admin"
RoleUser Role = "user"
RoleAttachments Role = "attachments"
)
func (r Role) String() string {
return string(r)
}
// RoleValidator is a validator for the "role" field enum values. It is called by the builders before save.
func RoleValidator(r Role) error {
switch r {
case RoleAdmin, RoleUser, RoleAttachments:
return nil
default:
return fmt.Errorf("authroles: invalid enum value for role field: %q", r)
}
}
// OrderOption defines the ordering options for the AuthRoles queries.
type OrderOption func(*sql.Selector)
// ByID orders the results by the id field.
func ByID(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldID, opts...).ToFunc()
}
// ByRole orders the results by the role field.
func ByRole(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldRole, opts...).ToFunc()
}
// ByTokenField orders the results by token field.
func ByTokenField(field string, opts ...sql.OrderTermOption) OrderOption {
return func(s *sql.Selector) {
sqlgraph.OrderByNeighborTerms(s, newTokenStep(), sql.OrderByField(field, opts...))
}
}
func newTokenStep() *sqlgraph.Step {
return sqlgraph.NewStep(
sqlgraph.From(Table, FieldID),
sqlgraph.To(TokenInverseTable, FieldID),
sqlgraph.Edge(sqlgraph.O2O, true, TokenTable, TokenColumn),
)
}

View file

@ -0,0 +1,112 @@
// Code generated by ent, DO NOT EDIT.
package authroles
import (
"entgo.io/ent/dialect/sql"
"entgo.io/ent/dialect/sql/sqlgraph"
"github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
)
// ID filters vertices based on their ID field.
func ID(id int) predicate.AuthRoles {
return predicate.AuthRoles(sql.FieldEQ(FieldID, id))
}
// IDEQ applies the EQ predicate on the ID field.
func IDEQ(id int) predicate.AuthRoles {
return predicate.AuthRoles(sql.FieldEQ(FieldID, id))
}
// IDNEQ applies the NEQ predicate on the ID field.
func IDNEQ(id int) predicate.AuthRoles {
return predicate.AuthRoles(sql.FieldNEQ(FieldID, id))
}
// IDIn applies the In predicate on the ID field.
func IDIn(ids ...int) predicate.AuthRoles {
return predicate.AuthRoles(sql.FieldIn(FieldID, ids...))
}
// IDNotIn applies the NotIn predicate on the ID field.
func IDNotIn(ids ...int) predicate.AuthRoles {
return predicate.AuthRoles(sql.FieldNotIn(FieldID, ids...))
}
// IDGT applies the GT predicate on the ID field.
func IDGT(id int) predicate.AuthRoles {
return predicate.AuthRoles(sql.FieldGT(FieldID, id))
}
// IDGTE applies the GTE predicate on the ID field.
func IDGTE(id int) predicate.AuthRoles {
return predicate.AuthRoles(sql.FieldGTE(FieldID, id))
}
// IDLT applies the LT predicate on the ID field.
func IDLT(id int) predicate.AuthRoles {
return predicate.AuthRoles(sql.FieldLT(FieldID, id))
}
// IDLTE applies the LTE predicate on the ID field.
func IDLTE(id int) predicate.AuthRoles {
return predicate.AuthRoles(sql.FieldLTE(FieldID, id))
}
// RoleEQ applies the EQ predicate on the "role" field.
func RoleEQ(v Role) predicate.AuthRoles {
return predicate.AuthRoles(sql.FieldEQ(FieldRole, v))
}
// RoleNEQ applies the NEQ predicate on the "role" field.
func RoleNEQ(v Role) predicate.AuthRoles {
return predicate.AuthRoles(sql.FieldNEQ(FieldRole, v))
}
// RoleIn applies the In predicate on the "role" field.
func RoleIn(vs ...Role) predicate.AuthRoles {
return predicate.AuthRoles(sql.FieldIn(FieldRole, vs...))
}
// RoleNotIn applies the NotIn predicate on the "role" field.
func RoleNotIn(vs ...Role) predicate.AuthRoles {
return predicate.AuthRoles(sql.FieldNotIn(FieldRole, vs...))
}
// HasToken applies the HasEdge predicate on the "token" edge.
func HasToken() predicate.AuthRoles {
return predicate.AuthRoles(func(s *sql.Selector) {
step := sqlgraph.NewStep(
sqlgraph.From(Table, FieldID),
sqlgraph.Edge(sqlgraph.O2O, true, TokenTable, TokenColumn),
)
sqlgraph.HasNeighbors(s, step)
})
}
// HasTokenWith applies the HasEdge predicate on the "token" edge with a given conditions (other predicates).
func HasTokenWith(preds ...predicate.AuthTokens) predicate.AuthRoles {
return predicate.AuthRoles(func(s *sql.Selector) {
step := newTokenStep()
sqlgraph.HasNeighborsWith(s, step, func(s *sql.Selector) {
for _, p := range preds {
p(s)
}
})
})
}
// And groups predicates with the AND operator between them.
func And(predicates ...predicate.AuthRoles) predicate.AuthRoles {
return predicate.AuthRoles(sql.AndPredicates(predicates...))
}
// Or groups predicates with the OR operator between them.
func Or(predicates ...predicate.AuthRoles) predicate.AuthRoles {
return predicate.AuthRoles(sql.OrPredicates(predicates...))
}
// Not applies the not operator on the given predicate.
func Not(p predicate.AuthRoles) predicate.AuthRoles {
return predicate.AuthRoles(sql.NotPredicates(p))
}

View file

@ -0,0 +1,244 @@
// Code generated by ent, DO NOT EDIT.
package ent
import (
"context"
"errors"
"fmt"
"entgo.io/ent/dialect/sql/sqlgraph"
"entgo.io/ent/schema/field"
"github.com/google/uuid"
"github.com/hay-kot/homebox/backend/internal/data/ent/authroles"
"github.com/hay-kot/homebox/backend/internal/data/ent/authtokens"
)
// AuthRolesCreate is the builder for creating a AuthRoles entity.
type AuthRolesCreate struct {
config
mutation *AuthRolesMutation
hooks []Hook
}
// SetRole sets the "role" field.
func (arc *AuthRolesCreate) SetRole(a authroles.Role) *AuthRolesCreate {
arc.mutation.SetRole(a)
return arc
}
// SetNillableRole sets the "role" field if the given value is not nil.
func (arc *AuthRolesCreate) SetNillableRole(a *authroles.Role) *AuthRolesCreate {
if a != nil {
arc.SetRole(*a)
}
return arc
}
// SetTokenID sets the "token" edge to the AuthTokens entity by ID.
func (arc *AuthRolesCreate) SetTokenID(id uuid.UUID) *AuthRolesCreate {
arc.mutation.SetTokenID(id)
return arc
}
// SetNillableTokenID sets the "token" edge to the AuthTokens entity by ID if the given value is not nil.
func (arc *AuthRolesCreate) SetNillableTokenID(id *uuid.UUID) *AuthRolesCreate {
if id != nil {
arc = arc.SetTokenID(*id)
}
return arc
}
// SetToken sets the "token" edge to the AuthTokens entity.
func (arc *AuthRolesCreate) SetToken(a *AuthTokens) *AuthRolesCreate {
return arc.SetTokenID(a.ID)
}
// Mutation returns the AuthRolesMutation object of the builder.
func (arc *AuthRolesCreate) Mutation() *AuthRolesMutation {
return arc.mutation
}
// Save creates the AuthRoles in the database.
func (arc *AuthRolesCreate) Save(ctx context.Context) (*AuthRoles, error) {
arc.defaults()
return withHooks(ctx, arc.sqlSave, arc.mutation, arc.hooks)
}
// SaveX calls Save and panics if Save returns an error.
func (arc *AuthRolesCreate) SaveX(ctx context.Context) *AuthRoles {
v, err := arc.Save(ctx)
if err != nil {
panic(err)
}
return v
}
// Exec executes the query.
func (arc *AuthRolesCreate) Exec(ctx context.Context) error {
_, err := arc.Save(ctx)
return err
}
// ExecX is like Exec, but panics if an error occurs.
func (arc *AuthRolesCreate) ExecX(ctx context.Context) {
if err := arc.Exec(ctx); err != nil {
panic(err)
}
}
// defaults sets the default values of the builder before save.
func (arc *AuthRolesCreate) defaults() {
if _, ok := arc.mutation.Role(); !ok {
v := authroles.DefaultRole
arc.mutation.SetRole(v)
}
}
// check runs all checks and user-defined validators on the builder.
func (arc *AuthRolesCreate) check() error {
if _, ok := arc.mutation.Role(); !ok {
return &ValidationError{Name: "role", err: errors.New(`ent: missing required field "AuthRoles.role"`)}
}
if v, ok := arc.mutation.Role(); ok {
if err := authroles.RoleValidator(v); err != nil {
return &ValidationError{Name: "role", err: fmt.Errorf(`ent: validator failed for field "AuthRoles.role": %w`, err)}
}
}
return nil
}
func (arc *AuthRolesCreate) sqlSave(ctx context.Context) (*AuthRoles, error) {
if err := arc.check(); err != nil {
return nil, err
}
_node, _spec := arc.createSpec()
if err := sqlgraph.CreateNode(ctx, arc.driver, _spec); err != nil {
if sqlgraph.IsConstraintError(err) {
err = &ConstraintError{msg: err.Error(), wrap: err}
}
return nil, err
}
id := _spec.ID.Value.(int64)
_node.ID = int(id)
arc.mutation.id = &_node.ID
arc.mutation.done = true
return _node, nil
}
func (arc *AuthRolesCreate) createSpec() (*AuthRoles, *sqlgraph.CreateSpec) {
var (
_node = &AuthRoles{config: arc.config}
_spec = sqlgraph.NewCreateSpec(authroles.Table, sqlgraph.NewFieldSpec(authroles.FieldID, field.TypeInt))
)
if value, ok := arc.mutation.Role(); ok {
_spec.SetField(authroles.FieldRole, field.TypeEnum, value)
_node.Role = value
}
if nodes := arc.mutation.TokenIDs(); len(nodes) > 0 {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.O2O,
Inverse: true,
Table: authroles.TokenTable,
Columns: []string{authroles.TokenColumn},
Bidi: false,
Target: &sqlgraph.EdgeTarget{
IDSpec: sqlgraph.NewFieldSpec(authtokens.FieldID, field.TypeUUID),
},
}
for _, k := range nodes {
edge.Target.Nodes = append(edge.Target.Nodes, k)
}
_node.auth_tokens_roles = &nodes[0]
_spec.Edges = append(_spec.Edges, edge)
}
return _node, _spec
}
// AuthRolesCreateBulk is the builder for creating many AuthRoles entities in bulk.
type AuthRolesCreateBulk struct {
config
err error
builders []*AuthRolesCreate
}
// Save creates the AuthRoles entities in the database.
func (arcb *AuthRolesCreateBulk) Save(ctx context.Context) ([]*AuthRoles, error) {
if arcb.err != nil {
return nil, arcb.err
}
specs := make([]*sqlgraph.CreateSpec, len(arcb.builders))
nodes := make([]*AuthRoles, len(arcb.builders))
mutators := make([]Mutator, len(arcb.builders))
for i := range arcb.builders {
func(i int, root context.Context) {
builder := arcb.builders[i]
builder.defaults()
var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) {
mutation, ok := m.(*AuthRolesMutation)
if !ok {
return nil, fmt.Errorf("unexpected mutation type %T", m)
}
if err := builder.check(); err != nil {
return nil, err
}
builder.mutation = mutation
var err error
nodes[i], specs[i] = builder.createSpec()
if i < len(mutators)-1 {
_, err = mutators[i+1].Mutate(root, arcb.builders[i+1].mutation)
} else {
spec := &sqlgraph.BatchCreateSpec{Nodes: specs}
// Invoke the actual operation on the latest mutation in the chain.
if err = sqlgraph.BatchCreate(ctx, arcb.driver, spec); err != nil {
if sqlgraph.IsConstraintError(err) {
err = &ConstraintError{msg: err.Error(), wrap: err}
}
}
}
if err != nil {
return nil, err
}
mutation.id = &nodes[i].ID
if specs[i].ID.Value != nil {
id := specs[i].ID.Value.(int64)
nodes[i].ID = int(id)
}
mutation.done = true
return nodes[i], nil
})
for i := len(builder.hooks) - 1; i >= 0; i-- {
mut = builder.hooks[i](mut)
}
mutators[i] = mut
}(i, ctx)
}
if len(mutators) > 0 {
if _, err := mutators[0].Mutate(ctx, arcb.builders[0].mutation); err != nil {
return nil, err
}
}
return nodes, nil
}
// SaveX is like Save, but panics if an error occurs.
func (arcb *AuthRolesCreateBulk) SaveX(ctx context.Context) []*AuthRoles {
v, err := arcb.Save(ctx)
if err != nil {
panic(err)
}
return v
}
// Exec executes the query.
func (arcb *AuthRolesCreateBulk) Exec(ctx context.Context) error {
_, err := arcb.Save(ctx)
return err
}
// ExecX is like Exec, but panics if an error occurs.
func (arcb *AuthRolesCreateBulk) ExecX(ctx context.Context) {
if err := arcb.Exec(ctx); err != nil {
panic(err)
}
}

View file

@ -0,0 +1,88 @@
// Code generated by ent, DO NOT EDIT.
package ent
import (
"context"
"entgo.io/ent/dialect/sql"
"entgo.io/ent/dialect/sql/sqlgraph"
"entgo.io/ent/schema/field"
"github.com/hay-kot/homebox/backend/internal/data/ent/authroles"
"github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
)
// AuthRolesDelete is the builder for deleting a AuthRoles entity.
type AuthRolesDelete struct {
config
hooks []Hook
mutation *AuthRolesMutation
}
// Where appends a list predicates to the AuthRolesDelete builder.
func (ard *AuthRolesDelete) Where(ps ...predicate.AuthRoles) *AuthRolesDelete {
ard.mutation.Where(ps...)
return ard
}
// Exec executes the deletion query and returns how many vertices were deleted.
func (ard *AuthRolesDelete) Exec(ctx context.Context) (int, error) {
return withHooks(ctx, ard.sqlExec, ard.mutation, ard.hooks)
}
// ExecX is like Exec, but panics if an error occurs.
func (ard *AuthRolesDelete) ExecX(ctx context.Context) int {
n, err := ard.Exec(ctx)
if err != nil {
panic(err)
}
return n
}
func (ard *AuthRolesDelete) sqlExec(ctx context.Context) (int, error) {
_spec := sqlgraph.NewDeleteSpec(authroles.Table, sqlgraph.NewFieldSpec(authroles.FieldID, field.TypeInt))
if ps := ard.mutation.predicates; len(ps) > 0 {
_spec.Predicate = func(selector *sql.Selector) {
for i := range ps {
ps[i](selector)
}
}
}
affected, err := sqlgraph.DeleteNodes(ctx, ard.driver, _spec)
if err != nil && sqlgraph.IsConstraintError(err) {
err = &ConstraintError{msg: err.Error(), wrap: err}
}
ard.mutation.done = true
return affected, err
}
// AuthRolesDeleteOne is the builder for deleting a single AuthRoles entity.
type AuthRolesDeleteOne struct {
ard *AuthRolesDelete
}
// Where appends a list predicates to the AuthRolesDelete builder.
func (ardo *AuthRolesDeleteOne) Where(ps ...predicate.AuthRoles) *AuthRolesDeleteOne {
ardo.ard.mutation.Where(ps...)
return ardo
}
// Exec executes the deletion query.
func (ardo *AuthRolesDeleteOne) Exec(ctx context.Context) error {
n, err := ardo.ard.Exec(ctx)
switch {
case err != nil:
return err
case n == 0:
return &NotFoundError{authroles.Label}
default:
return nil
}
}
// ExecX is like Exec, but panics if an error occurs.
func (ardo *AuthRolesDeleteOne) ExecX(ctx context.Context) {
if err := ardo.Exec(ctx); err != nil {
panic(err)
}
}

View file

@ -0,0 +1,614 @@
// Code generated by ent, DO NOT EDIT.
package ent
import (
"context"
"fmt"
"math"
"entgo.io/ent/dialect/sql"
"entgo.io/ent/dialect/sql/sqlgraph"
"entgo.io/ent/schema/field"
"github.com/google/uuid"
"github.com/hay-kot/homebox/backend/internal/data/ent/authroles"
"github.com/hay-kot/homebox/backend/internal/data/ent/authtokens"
"github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
)
// AuthRolesQuery is the builder for querying AuthRoles entities.
type AuthRolesQuery struct {
config
ctx *QueryContext
order []authroles.OrderOption
inters []Interceptor
predicates []predicate.AuthRoles
withToken *AuthTokensQuery
withFKs bool
// intermediate query (i.e. traversal path).
sql *sql.Selector
path func(context.Context) (*sql.Selector, error)
}
// Where adds a new predicate for the AuthRolesQuery builder.
func (arq *AuthRolesQuery) Where(ps ...predicate.AuthRoles) *AuthRolesQuery {
arq.predicates = append(arq.predicates, ps...)
return arq
}
// Limit the number of records to be returned by this query.
func (arq *AuthRolesQuery) Limit(limit int) *AuthRolesQuery {
arq.ctx.Limit = &limit
return arq
}
// Offset to start from.
func (arq *AuthRolesQuery) Offset(offset int) *AuthRolesQuery {
arq.ctx.Offset = &offset
return arq
}
// Unique configures the query builder to filter duplicate records on query.
// By default, unique is set to true, and can be disabled using this method.
func (arq *AuthRolesQuery) Unique(unique bool) *AuthRolesQuery {
arq.ctx.Unique = &unique
return arq
}
// Order specifies how the records should be ordered.
func (arq *AuthRolesQuery) Order(o ...authroles.OrderOption) *AuthRolesQuery {
arq.order = append(arq.order, o...)
return arq
}
// QueryToken chains the current query on the "token" edge.
func (arq *AuthRolesQuery) QueryToken() *AuthTokensQuery {
query := (&AuthTokensClient{config: arq.config}).Query()
query.path = func(ctx context.Context) (fromU *sql.Selector, err error) {
if err := arq.prepareQuery(ctx); err != nil {
return nil, err
}
selector := arq.sqlQuery(ctx)
if err := selector.Err(); err != nil {
return nil, err
}
step := sqlgraph.NewStep(
sqlgraph.From(authroles.Table, authroles.FieldID, selector),
sqlgraph.To(authtokens.Table, authtokens.FieldID),
sqlgraph.Edge(sqlgraph.O2O, true, authroles.TokenTable, authroles.TokenColumn),
)
fromU = sqlgraph.SetNeighbors(arq.driver.Dialect(), step)
return fromU, nil
}
return query
}
// First returns the first AuthRoles entity from the query.
// Returns a *NotFoundError when no AuthRoles was found.
func (arq *AuthRolesQuery) First(ctx context.Context) (*AuthRoles, error) {
nodes, err := arq.Limit(1).All(setContextOp(ctx, arq.ctx, "First"))
if err != nil {
return nil, err
}
if len(nodes) == 0 {
return nil, &NotFoundError{authroles.Label}
}
return nodes[0], nil
}
// FirstX is like First, but panics if an error occurs.
func (arq *AuthRolesQuery) FirstX(ctx context.Context) *AuthRoles {
node, err := arq.First(ctx)
if err != nil && !IsNotFound(err) {
panic(err)
}
return node
}
// FirstID returns the first AuthRoles ID from the query.
// Returns a *NotFoundError when no AuthRoles ID was found.
func (arq *AuthRolesQuery) FirstID(ctx context.Context) (id int, err error) {
var ids []int
if ids, err = arq.Limit(1).IDs(setContextOp(ctx, arq.ctx, "FirstID")); err != nil {
return
}
if len(ids) == 0 {
err = &NotFoundError{authroles.Label}
return
}
return ids[0], nil
}
// FirstIDX is like FirstID, but panics if an error occurs.
func (arq *AuthRolesQuery) FirstIDX(ctx context.Context) int {
id, err := arq.FirstID(ctx)
if err != nil && !IsNotFound(err) {
panic(err)
}
return id
}
// Only returns a single AuthRoles entity found by the query, ensuring it only returns one.
// Returns a *NotSingularError when more than one AuthRoles entity is found.
// Returns a *NotFoundError when no AuthRoles entities are found.
func (arq *AuthRolesQuery) Only(ctx context.Context) (*AuthRoles, error) {
nodes, err := arq.Limit(2).All(setContextOp(ctx, arq.ctx, "Only"))
if err != nil {
return nil, err
}
switch len(nodes) {
case 1:
return nodes[0], nil
case 0:
return nil, &NotFoundError{authroles.Label}
default:
return nil, &NotSingularError{authroles.Label}
}
}
// OnlyX is like Only, but panics if an error occurs.
func (arq *AuthRolesQuery) OnlyX(ctx context.Context) *AuthRoles {
node, err := arq.Only(ctx)
if err != nil {
panic(err)
}
return node
}
// OnlyID is like Only, but returns the only AuthRoles ID in the query.
// Returns a *NotSingularError when more than one AuthRoles ID is found.
// Returns a *NotFoundError when no entities are found.
func (arq *AuthRolesQuery) OnlyID(ctx context.Context) (id int, err error) {
var ids []int
if ids, err = arq.Limit(2).IDs(setContextOp(ctx, arq.ctx, "OnlyID")); err != nil {
return
}
switch len(ids) {
case 1:
id = ids[0]
case 0:
err = &NotFoundError{authroles.Label}
default:
err = &NotSingularError{authroles.Label}
}
return
}
// OnlyIDX is like OnlyID, but panics if an error occurs.
func (arq *AuthRolesQuery) OnlyIDX(ctx context.Context) int {
id, err := arq.OnlyID(ctx)
if err != nil {
panic(err)
}
return id
}
// All executes the query and returns a list of AuthRolesSlice.
func (arq *AuthRolesQuery) All(ctx context.Context) ([]*AuthRoles, error) {
ctx = setContextOp(ctx, arq.ctx, "All")
if err := arq.prepareQuery(ctx); err != nil {
return nil, err
}
qr := querierAll[[]*AuthRoles, *AuthRolesQuery]()
return withInterceptors[[]*AuthRoles](ctx, arq, qr, arq.inters)
}
// AllX is like All, but panics if an error occurs.
func (arq *AuthRolesQuery) AllX(ctx context.Context) []*AuthRoles {
nodes, err := arq.All(ctx)
if err != nil {
panic(err)
}
return nodes
}
// IDs executes the query and returns a list of AuthRoles IDs.
func (arq *AuthRolesQuery) IDs(ctx context.Context) (ids []int, err error) {
if arq.ctx.Unique == nil && arq.path != nil {
arq.Unique(true)
}
ctx = setContextOp(ctx, arq.ctx, "IDs")
if err = arq.Select(authroles.FieldID).Scan(ctx, &ids); err != nil {
return nil, err
}
return ids, nil
}
// IDsX is like IDs, but panics if an error occurs.
func (arq *AuthRolesQuery) IDsX(ctx context.Context) []int {
ids, err := arq.IDs(ctx)
if err != nil {
panic(err)
}
return ids
}
// Count returns the count of the given query.
func (arq *AuthRolesQuery) Count(ctx context.Context) (int, error) {
ctx = setContextOp(ctx, arq.ctx, "Count")
if err := arq.prepareQuery(ctx); err != nil {
return 0, err
}
return withInterceptors[int](ctx, arq, querierCount[*AuthRolesQuery](), arq.inters)
}
// CountX is like Count, but panics if an error occurs.
func (arq *AuthRolesQuery) CountX(ctx context.Context) int {
count, err := arq.Count(ctx)
if err != nil {
panic(err)
}
return count
}
// Exist returns true if the query has elements in the graph.
func (arq *AuthRolesQuery) Exist(ctx context.Context) (bool, error) {
ctx = setContextOp(ctx, arq.ctx, "Exist")
switch _, err := arq.FirstID(ctx); {
case IsNotFound(err):
return false, nil
case err != nil:
return false, fmt.Errorf("ent: check existence: %w", err)
default:
return true, nil
}
}
// ExistX is like Exist, but panics if an error occurs.
func (arq *AuthRolesQuery) ExistX(ctx context.Context) bool {
exist, err := arq.Exist(ctx)
if err != nil {
panic(err)
}
return exist
}
// Clone returns a duplicate of the AuthRolesQuery builder, including all associated steps. It can be
// used to prepare common query builders and use them differently after the clone is made.
func (arq *AuthRolesQuery) Clone() *AuthRolesQuery {
if arq == nil {
return nil
}
return &AuthRolesQuery{
config: arq.config,
ctx: arq.ctx.Clone(),
order: append([]authroles.OrderOption{}, arq.order...),
inters: append([]Interceptor{}, arq.inters...),
predicates: append([]predicate.AuthRoles{}, arq.predicates...),
withToken: arq.withToken.Clone(),
// clone intermediate query.
sql: arq.sql.Clone(),
path: arq.path,
}
}
// WithToken tells the query-builder to eager-load the nodes that are connected to
// the "token" edge. The optional arguments are used to configure the query builder of the edge.
func (arq *AuthRolesQuery) WithToken(opts ...func(*AuthTokensQuery)) *AuthRolesQuery {
query := (&AuthTokensClient{config: arq.config}).Query()
for _, opt := range opts {
opt(query)
}
arq.withToken = query
return arq
}
// GroupBy is used to group vertices by one or more fields/columns.
// It is often used with aggregate functions, like: count, max, mean, min, sum.
//
// Example:
//
// var v []struct {
// Role authroles.Role `json:"role,omitempty"`
// Count int `json:"count,omitempty"`
// }
//
// client.AuthRoles.Query().
// GroupBy(authroles.FieldRole).
// Aggregate(ent.Count()).
// Scan(ctx, &v)
func (arq *AuthRolesQuery) GroupBy(field string, fields ...string) *AuthRolesGroupBy {
arq.ctx.Fields = append([]string{field}, fields...)
grbuild := &AuthRolesGroupBy{build: arq}
grbuild.flds = &arq.ctx.Fields
grbuild.label = authroles.Label
grbuild.scan = grbuild.Scan
return grbuild
}
// Select allows the selection one or more fields/columns for the given query,
// instead of selecting all fields in the entity.
//
// Example:
//
// var v []struct {
// Role authroles.Role `json:"role,omitempty"`
// }
//
// client.AuthRoles.Query().
// Select(authroles.FieldRole).
// Scan(ctx, &v)
func (arq *AuthRolesQuery) Select(fields ...string) *AuthRolesSelect {
arq.ctx.Fields = append(arq.ctx.Fields, fields...)
sbuild := &AuthRolesSelect{AuthRolesQuery: arq}
sbuild.label = authroles.Label
sbuild.flds, sbuild.scan = &arq.ctx.Fields, sbuild.Scan
return sbuild
}
// Aggregate returns a AuthRolesSelect configured with the given aggregations.
func (arq *AuthRolesQuery) Aggregate(fns ...AggregateFunc) *AuthRolesSelect {
return arq.Select().Aggregate(fns...)
}
func (arq *AuthRolesQuery) prepareQuery(ctx context.Context) error {
for _, inter := range arq.inters {
if inter == nil {
return fmt.Errorf("ent: uninitialized interceptor (forgotten import ent/runtime?)")
}
if trv, ok := inter.(Traverser); ok {
if err := trv.Traverse(ctx, arq); err != nil {
return err
}
}
}
for _, f := range arq.ctx.Fields {
if !authroles.ValidColumn(f) {
return &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)}
}
}
if arq.path != nil {
prev, err := arq.path(ctx)
if err != nil {
return err
}
arq.sql = prev
}
return nil
}
func (arq *AuthRolesQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*AuthRoles, error) {
var (
nodes = []*AuthRoles{}
withFKs = arq.withFKs
_spec = arq.querySpec()
loadedTypes = [1]bool{
arq.withToken != nil,
}
)
if arq.withToken != nil {
withFKs = true
}
if withFKs {
_spec.Node.Columns = append(_spec.Node.Columns, authroles.ForeignKeys...)
}
_spec.ScanValues = func(columns []string) ([]any, error) {
return (*AuthRoles).scanValues(nil, columns)
}
_spec.Assign = func(columns []string, values []any) error {
node := &AuthRoles{config: arq.config}
nodes = append(nodes, node)
node.Edges.loadedTypes = loadedTypes
return node.assignValues(columns, values)
}
for i := range hooks {
hooks[i](ctx, _spec)
}
if err := sqlgraph.QueryNodes(ctx, arq.driver, _spec); err != nil {
return nil, err
}
if len(nodes) == 0 {
return nodes, nil
}
if query := arq.withToken; query != nil {
if err := arq.loadToken(ctx, query, nodes, nil,
func(n *AuthRoles, e *AuthTokens) { n.Edges.Token = e }); err != nil {
return nil, err
}
}
return nodes, nil
}
func (arq *AuthRolesQuery) loadToken(ctx context.Context, query *AuthTokensQuery, nodes []*AuthRoles, init func(*AuthRoles), assign func(*AuthRoles, *AuthTokens)) error {
ids := make([]uuid.UUID, 0, len(nodes))
nodeids := make(map[uuid.UUID][]*AuthRoles)
for i := range nodes {
if nodes[i].auth_tokens_roles == nil {
continue
}
fk := *nodes[i].auth_tokens_roles
if _, ok := nodeids[fk]; !ok {
ids = append(ids, fk)
}
nodeids[fk] = append(nodeids[fk], nodes[i])
}
if len(ids) == 0 {
return nil
}
query.Where(authtokens.IDIn(ids...))
neighbors, err := query.All(ctx)
if err != nil {
return err
}
for _, n := range neighbors {
nodes, ok := nodeids[n.ID]
if !ok {
return fmt.Errorf(`unexpected foreign-key "auth_tokens_roles" returned %v`, n.ID)
}
for i := range nodes {
assign(nodes[i], n)
}
}
return nil
}
func (arq *AuthRolesQuery) sqlCount(ctx context.Context) (int, error) {
_spec := arq.querySpec()
_spec.Node.Columns = arq.ctx.Fields
if len(arq.ctx.Fields) > 0 {
_spec.Unique = arq.ctx.Unique != nil && *arq.ctx.Unique
}
return sqlgraph.CountNodes(ctx, arq.driver, _spec)
}
func (arq *AuthRolesQuery) querySpec() *sqlgraph.QuerySpec {
_spec := sqlgraph.NewQuerySpec(authroles.Table, authroles.Columns, sqlgraph.NewFieldSpec(authroles.FieldID, field.TypeInt))
_spec.From = arq.sql
if unique := arq.ctx.Unique; unique != nil {
_spec.Unique = *unique
} else if arq.path != nil {
_spec.Unique = true
}
if fields := arq.ctx.Fields; len(fields) > 0 {
_spec.Node.Columns = make([]string, 0, len(fields))
_spec.Node.Columns = append(_spec.Node.Columns, authroles.FieldID)
for i := range fields {
if fields[i] != authroles.FieldID {
_spec.Node.Columns = append(_spec.Node.Columns, fields[i])
}
}
}
if ps := arq.predicates; len(ps) > 0 {
_spec.Predicate = func(selector *sql.Selector) {
for i := range ps {
ps[i](selector)
}
}
}
if limit := arq.ctx.Limit; limit != nil {
_spec.Limit = *limit
}
if offset := arq.ctx.Offset; offset != nil {
_spec.Offset = *offset
}
if ps := arq.order; len(ps) > 0 {
_spec.Order = func(selector *sql.Selector) {
for i := range ps {
ps[i](selector)
}
}
}
return _spec
}
func (arq *AuthRolesQuery) sqlQuery(ctx context.Context) *sql.Selector {
builder := sql.Dialect(arq.driver.Dialect())
t1 := builder.Table(authroles.Table)
columns := arq.ctx.Fields
if len(columns) == 0 {
columns = authroles.Columns
}
selector := builder.Select(t1.Columns(columns...)...).From(t1)
if arq.sql != nil {
selector = arq.sql
selector.Select(selector.Columns(columns...)...)
}
if arq.ctx.Unique != nil && *arq.ctx.Unique {
selector.Distinct()
}
for _, p := range arq.predicates {
p(selector)
}
for _, p := range arq.order {
p(selector)
}
if offset := arq.ctx.Offset; offset != nil {
// limit is mandatory for offset clause. We start
// with default value, and override it below if needed.
selector.Offset(*offset).Limit(math.MaxInt32)
}
if limit := arq.ctx.Limit; limit != nil {
selector.Limit(*limit)
}
return selector
}
// AuthRolesGroupBy is the group-by builder for AuthRoles entities.
type AuthRolesGroupBy struct {
selector
build *AuthRolesQuery
}
// Aggregate adds the given aggregation functions to the group-by query.
func (argb *AuthRolesGroupBy) Aggregate(fns ...AggregateFunc) *AuthRolesGroupBy {
argb.fns = append(argb.fns, fns...)
return argb
}
// Scan applies the selector query and scans the result into the given value.
func (argb *AuthRolesGroupBy) Scan(ctx context.Context, v any) error {
ctx = setContextOp(ctx, argb.build.ctx, "GroupBy")
if err := argb.build.prepareQuery(ctx); err != nil {
return err
}
return scanWithInterceptors[*AuthRolesQuery, *AuthRolesGroupBy](ctx, argb.build, argb, argb.build.inters, v)
}
func (argb *AuthRolesGroupBy) sqlScan(ctx context.Context, root *AuthRolesQuery, v any) error {
selector := root.sqlQuery(ctx).Select()
aggregation := make([]string, 0, len(argb.fns))
for _, fn := range argb.fns {
aggregation = append(aggregation, fn(selector))
}
if len(selector.SelectedColumns()) == 0 {
columns := make([]string, 0, len(*argb.flds)+len(argb.fns))
for _, f := range *argb.flds {
columns = append(columns, selector.C(f))
}
columns = append(columns, aggregation...)
selector.Select(columns...)
}
selector.GroupBy(selector.Columns(*argb.flds...)...)
if err := selector.Err(); err != nil {
return err
}
rows := &sql.Rows{}
query, args := selector.Query()
if err := argb.build.driver.Query(ctx, query, args, rows); err != nil {
return err
}
defer rows.Close()
return sql.ScanSlice(rows, v)
}
// AuthRolesSelect is the builder for selecting fields of AuthRoles entities.
type AuthRolesSelect struct {
*AuthRolesQuery
selector
}
// Aggregate adds the given aggregation functions to the selector query.
func (ars *AuthRolesSelect) Aggregate(fns ...AggregateFunc) *AuthRolesSelect {
ars.fns = append(ars.fns, fns...)
return ars
}
// Scan applies the selector query and scans the result into the given value.
func (ars *AuthRolesSelect) Scan(ctx context.Context, v any) error {
ctx = setContextOp(ctx, ars.ctx, "Select")
if err := ars.prepareQuery(ctx); err != nil {
return err
}
return scanWithInterceptors[*AuthRolesQuery, *AuthRolesSelect](ctx, ars.AuthRolesQuery, ars, ars.inters, v)
}
func (ars *AuthRolesSelect) sqlScan(ctx context.Context, root *AuthRolesQuery, v any) error {
selector := root.sqlQuery(ctx)
aggregation := make([]string, 0, len(ars.fns))
for _, fn := range ars.fns {
aggregation = append(aggregation, fn(selector))
}
switch n := len(*ars.selector.flds); {
case n == 0 && len(aggregation) > 0:
selector.Select(aggregation...)
case n != 0 && len(aggregation) > 0:
selector.AppendSelect(aggregation...)
}
rows := &sql.Rows{}
query, args := selector.Query()
if err := ars.driver.Query(ctx, query, args, rows); err != nil {
return err
}
defer rows.Close()
return sql.ScanSlice(rows, v)
}

View file

@ -0,0 +1,345 @@
// Code generated by ent, DO NOT EDIT.
package ent
import (
"context"
"errors"
"fmt"
"entgo.io/ent/dialect/sql"
"entgo.io/ent/dialect/sql/sqlgraph"
"entgo.io/ent/schema/field"
"github.com/google/uuid"
"github.com/hay-kot/homebox/backend/internal/data/ent/authroles"
"github.com/hay-kot/homebox/backend/internal/data/ent/authtokens"
"github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
)
// AuthRolesUpdate is the builder for updating AuthRoles entities.
type AuthRolesUpdate struct {
config
hooks []Hook
mutation *AuthRolesMutation
}
// Where appends a list predicates to the AuthRolesUpdate builder.
func (aru *AuthRolesUpdate) Where(ps ...predicate.AuthRoles) *AuthRolesUpdate {
aru.mutation.Where(ps...)
return aru
}
// SetRole sets the "role" field.
func (aru *AuthRolesUpdate) SetRole(a authroles.Role) *AuthRolesUpdate {
aru.mutation.SetRole(a)
return aru
}
// SetNillableRole sets the "role" field if the given value is not nil.
func (aru *AuthRolesUpdate) SetNillableRole(a *authroles.Role) *AuthRolesUpdate {
if a != nil {
aru.SetRole(*a)
}
return aru
}
// SetTokenID sets the "token" edge to the AuthTokens entity by ID.
func (aru *AuthRolesUpdate) SetTokenID(id uuid.UUID) *AuthRolesUpdate {
aru.mutation.SetTokenID(id)
return aru
}
// SetNillableTokenID sets the "token" edge to the AuthTokens entity by ID if the given value is not nil.
func (aru *AuthRolesUpdate) SetNillableTokenID(id *uuid.UUID) *AuthRolesUpdate {
if id != nil {
aru = aru.SetTokenID(*id)
}
return aru
}
// SetToken sets the "token" edge to the AuthTokens entity.
func (aru *AuthRolesUpdate) SetToken(a *AuthTokens) *AuthRolesUpdate {
return aru.SetTokenID(a.ID)
}
// Mutation returns the AuthRolesMutation object of the builder.
func (aru *AuthRolesUpdate) Mutation() *AuthRolesMutation {
return aru.mutation
}
// ClearToken clears the "token" edge to the AuthTokens entity.
func (aru *AuthRolesUpdate) ClearToken() *AuthRolesUpdate {
aru.mutation.ClearToken()
return aru
}
// Save executes the query and returns the number of nodes affected by the update operation.
func (aru *AuthRolesUpdate) Save(ctx context.Context) (int, error) {
return withHooks(ctx, aru.sqlSave, aru.mutation, aru.hooks)
}
// SaveX is like Save, but panics if an error occurs.
func (aru *AuthRolesUpdate) SaveX(ctx context.Context) int {
affected, err := aru.Save(ctx)
if err != nil {
panic(err)
}
return affected
}
// Exec executes the query.
func (aru *AuthRolesUpdate) Exec(ctx context.Context) error {
_, err := aru.Save(ctx)
return err
}
// ExecX is like Exec, but panics if an error occurs.
func (aru *AuthRolesUpdate) ExecX(ctx context.Context) {
if err := aru.Exec(ctx); err != nil {
panic(err)
}
}
// check runs all checks and user-defined validators on the builder.
func (aru *AuthRolesUpdate) check() error {
if v, ok := aru.mutation.Role(); ok {
if err := authroles.RoleValidator(v); err != nil {
return &ValidationError{Name: "role", err: fmt.Errorf(`ent: validator failed for field "AuthRoles.role": %w`, err)}
}
}
return nil
}
func (aru *AuthRolesUpdate) sqlSave(ctx context.Context) (n int, err error) {
if err := aru.check(); err != nil {
return n, err
}
_spec := sqlgraph.NewUpdateSpec(authroles.Table, authroles.Columns, sqlgraph.NewFieldSpec(authroles.FieldID, field.TypeInt))
if ps := aru.mutation.predicates; len(ps) > 0 {
_spec.Predicate = func(selector *sql.Selector) {
for i := range ps {
ps[i](selector)
}
}
}
if value, ok := aru.mutation.Role(); ok {
_spec.SetField(authroles.FieldRole, field.TypeEnum, value)
}
if aru.mutation.TokenCleared() {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.O2O,
Inverse: true,
Table: authroles.TokenTable,
Columns: []string{authroles.TokenColumn},
Bidi: false,
Target: &sqlgraph.EdgeTarget{
IDSpec: sqlgraph.NewFieldSpec(authtokens.FieldID, field.TypeUUID),
},
}
_spec.Edges.Clear = append(_spec.Edges.Clear, edge)
}
if nodes := aru.mutation.TokenIDs(); len(nodes) > 0 {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.O2O,
Inverse: true,
Table: authroles.TokenTable,
Columns: []string{authroles.TokenColumn},
Bidi: false,
Target: &sqlgraph.EdgeTarget{
IDSpec: sqlgraph.NewFieldSpec(authtokens.FieldID, field.TypeUUID),
},
}
for _, k := range nodes {
edge.Target.Nodes = append(edge.Target.Nodes, k)
}
_spec.Edges.Add = append(_spec.Edges.Add, edge)
}
if n, err = sqlgraph.UpdateNodes(ctx, aru.driver, _spec); err != nil {
if _, ok := err.(*sqlgraph.NotFoundError); ok {
err = &NotFoundError{authroles.Label}
} else if sqlgraph.IsConstraintError(err) {
err = &ConstraintError{msg: err.Error(), wrap: err}
}
return 0, err
}
aru.mutation.done = true
return n, nil
}
// AuthRolesUpdateOne is the builder for updating a single AuthRoles entity.
type AuthRolesUpdateOne struct {
config
fields []string
hooks []Hook
mutation *AuthRolesMutation
}
// SetRole sets the "role" field.
func (aruo *AuthRolesUpdateOne) SetRole(a authroles.Role) *AuthRolesUpdateOne {
aruo.mutation.SetRole(a)
return aruo
}
// SetNillableRole sets the "role" field if the given value is not nil.
func (aruo *AuthRolesUpdateOne) SetNillableRole(a *authroles.Role) *AuthRolesUpdateOne {
if a != nil {
aruo.SetRole(*a)
}
return aruo
}
// SetTokenID sets the "token" edge to the AuthTokens entity by ID.
func (aruo *AuthRolesUpdateOne) SetTokenID(id uuid.UUID) *AuthRolesUpdateOne {
aruo.mutation.SetTokenID(id)
return aruo
}
// SetNillableTokenID sets the "token" edge to the AuthTokens entity by ID if the given value is not nil.
func (aruo *AuthRolesUpdateOne) SetNillableTokenID(id *uuid.UUID) *AuthRolesUpdateOne {
if id != nil {
aruo = aruo.SetTokenID(*id)
}
return aruo
}
// SetToken sets the "token" edge to the AuthTokens entity.
func (aruo *AuthRolesUpdateOne) SetToken(a *AuthTokens) *AuthRolesUpdateOne {
return aruo.SetTokenID(a.ID)
}
// Mutation returns the AuthRolesMutation object of the builder.
func (aruo *AuthRolesUpdateOne) Mutation() *AuthRolesMutation {
return aruo.mutation
}
// ClearToken clears the "token" edge to the AuthTokens entity.
func (aruo *AuthRolesUpdateOne) ClearToken() *AuthRolesUpdateOne {
aruo.mutation.ClearToken()
return aruo
}
// Where appends a list predicates to the AuthRolesUpdate builder.
func (aruo *AuthRolesUpdateOne) Where(ps ...predicate.AuthRoles) *AuthRolesUpdateOne {
aruo.mutation.Where(ps...)
return aruo
}
// Select allows selecting one or more fields (columns) of the returned entity.
// The default is selecting all fields defined in the entity schema.
func (aruo *AuthRolesUpdateOne) Select(field string, fields ...string) *AuthRolesUpdateOne {
aruo.fields = append([]string{field}, fields...)
return aruo
}
// Save executes the query and returns the updated AuthRoles entity.
func (aruo *AuthRolesUpdateOne) Save(ctx context.Context) (*AuthRoles, error) {
return withHooks(ctx, aruo.sqlSave, aruo.mutation, aruo.hooks)
}
// SaveX is like Save, but panics if an error occurs.
func (aruo *AuthRolesUpdateOne) SaveX(ctx context.Context) *AuthRoles {
node, err := aruo.Save(ctx)
if err != nil {
panic(err)
}
return node
}
// Exec executes the query on the entity.
func (aruo *AuthRolesUpdateOne) Exec(ctx context.Context) error {
_, err := aruo.Save(ctx)
return err
}
// ExecX is like Exec, but panics if an error occurs.
func (aruo *AuthRolesUpdateOne) ExecX(ctx context.Context) {
if err := aruo.Exec(ctx); err != nil {
panic(err)
}
}
// check runs all checks and user-defined validators on the builder.
func (aruo *AuthRolesUpdateOne) check() error {
if v, ok := aruo.mutation.Role(); ok {
if err := authroles.RoleValidator(v); err != nil {
return &ValidationError{Name: "role", err: fmt.Errorf(`ent: validator failed for field "AuthRoles.role": %w`, err)}
}
}
return nil
}
func (aruo *AuthRolesUpdateOne) sqlSave(ctx context.Context) (_node *AuthRoles, err error) {
if err := aruo.check(); err != nil {
return _node, err
}
_spec := sqlgraph.NewUpdateSpec(authroles.Table, authroles.Columns, sqlgraph.NewFieldSpec(authroles.FieldID, field.TypeInt))
id, ok := aruo.mutation.ID()
if !ok {
return nil, &ValidationError{Name: "id", err: errors.New(`ent: missing "AuthRoles.id" for update`)}
}
_spec.Node.ID.Value = id
if fields := aruo.fields; len(fields) > 0 {
_spec.Node.Columns = make([]string, 0, len(fields))
_spec.Node.Columns = append(_spec.Node.Columns, authroles.FieldID)
for _, f := range fields {
if !authroles.ValidColumn(f) {
return nil, &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)}
}
if f != authroles.FieldID {
_spec.Node.Columns = append(_spec.Node.Columns, f)
}
}
}
if ps := aruo.mutation.predicates; len(ps) > 0 {
_spec.Predicate = func(selector *sql.Selector) {
for i := range ps {
ps[i](selector)
}
}
}
if value, ok := aruo.mutation.Role(); ok {
_spec.SetField(authroles.FieldRole, field.TypeEnum, value)
}
if aruo.mutation.TokenCleared() {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.O2O,
Inverse: true,
Table: authroles.TokenTable,
Columns: []string{authroles.TokenColumn},
Bidi: false,
Target: &sqlgraph.EdgeTarget{
IDSpec: sqlgraph.NewFieldSpec(authtokens.FieldID, field.TypeUUID),
},
}
_spec.Edges.Clear = append(_spec.Edges.Clear, edge)
}
if nodes := aruo.mutation.TokenIDs(); len(nodes) > 0 {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.O2O,
Inverse: true,
Table: authroles.TokenTable,
Columns: []string{authroles.TokenColumn},
Bidi: false,
Target: &sqlgraph.EdgeTarget{
IDSpec: sqlgraph.NewFieldSpec(authtokens.FieldID, field.TypeUUID),
},
}
for _, k := range nodes {
edge.Target.Nodes = append(edge.Target.Nodes, k)
}
_spec.Edges.Add = append(_spec.Edges.Add, edge)
}
_node = &AuthRoles{config: aruo.config}
_spec.Assign = _node.assignValues
_spec.ScanValues = _node.scanValues
if err = sqlgraph.UpdateNode(ctx, aruo.driver, _spec); err != nil {
if _, ok := err.(*sqlgraph.NotFoundError); ok {
err = &NotFoundError{authroles.Label}
} else if sqlgraph.IsConstraintError(err) {
err = &ConstraintError{msg: err.Error(), wrap: err}
}
return nil, err
}
aruo.mutation.done = true
return _node, nil
}

View file

@ -7,8 +7,10 @@ import (
"strings"
"time"
"entgo.io/ent"
"entgo.io/ent/dialect/sql"
"github.com/google/uuid"
"github.com/hay-kot/homebox/backend/internal/data/ent/authroles"
"github.com/hay-kot/homebox/backend/internal/data/ent/authtokens"
"github.com/hay-kot/homebox/backend/internal/data/ent/user"
)
@ -30,15 +32,18 @@ type AuthTokens struct {
// The values are being populated by the AuthTokensQuery when eager-loading is set.
Edges AuthTokensEdges `json:"edges"`
user_auth_tokens *uuid.UUID
selectValues sql.SelectValues
}
// AuthTokensEdges holds the relations/edges for other nodes in the graph.
type AuthTokensEdges struct {
// User holds the value of the user edge.
User *User `json:"user,omitempty"`
// Roles holds the value of the roles edge.
Roles *AuthRoles `json:"roles,omitempty"`
// loadedTypes holds the information for reporting if a
// type was loaded (or requested) in eager-loading or not.
loadedTypes [1]bool
loadedTypes [2]bool
}
// UserOrErr returns the User value or an error if the edge
@ -54,6 +59,19 @@ func (e AuthTokensEdges) UserOrErr() (*User, error) {
return nil, &NotLoadedError{edge: "user"}
}
// RolesOrErr returns the Roles value or an error if the edge
// was not loaded in eager-loading, or loaded but was not found.
func (e AuthTokensEdges) RolesOrErr() (*AuthRoles, error) {
if e.loadedTypes[1] {
if e.Roles == nil {
// Edge was loaded but was not found.
return nil, &NotFoundError{label: authroles.Label}
}
return e.Roles, nil
}
return nil, &NotLoadedError{edge: "roles"}
}
// scanValues returns the types for scanning values from sql.Rows.
func (*AuthTokens) scanValues(columns []string) ([]any, error) {
values := make([]any, len(columns))
@ -68,7 +86,7 @@ func (*AuthTokens) scanValues(columns []string) ([]any, error) {
case authtokens.ForeignKeys[0]: // user_auth_tokens
values[i] = &sql.NullScanner{S: new(uuid.UUID)}
default:
return nil, fmt.Errorf("unexpected column %q for type AuthTokens", columns[i])
values[i] = new(sql.UnknownType)
}
}
return values, nil
@ -119,21 +137,34 @@ func (at *AuthTokens) assignValues(columns []string, values []any) error {
at.user_auth_tokens = new(uuid.UUID)
*at.user_auth_tokens = *value.S.(*uuid.UUID)
}
default:
at.selectValues.Set(columns[i], values[i])
}
}
return nil
}
// Value returns the ent.Value that was dynamically selected and assigned to the AuthTokens.
// This includes values selected through modifiers, order, etc.
func (at *AuthTokens) Value(name string) (ent.Value, error) {
return at.selectValues.Get(name)
}
// QueryUser queries the "user" edge of the AuthTokens entity.
func (at *AuthTokens) QueryUser() *UserQuery {
return (&AuthTokensClient{config: at.config}).QueryUser(at)
return NewAuthTokensClient(at.config).QueryUser(at)
}
// QueryRoles queries the "roles" edge of the AuthTokens entity.
func (at *AuthTokens) QueryRoles() *AuthRolesQuery {
return NewAuthTokensClient(at.config).QueryRoles(at)
}
// Update returns a builder for updating this AuthTokens.
// Note that you need to call AuthTokens.Unwrap() before calling this method if this AuthTokens
// was returned from a transaction, and the transaction was committed or rolled back.
func (at *AuthTokens) Update() *AuthTokensUpdateOne {
return (&AuthTokensClient{config: at.config}).UpdateOne(at)
return NewAuthTokensClient(at.config).UpdateOne(at)
}
// Unwrap unwraps the AuthTokens entity that was returned from a transaction after it was closed,
@ -169,9 +200,3 @@ func (at *AuthTokens) String() string {
// AuthTokensSlice is a parsable slice of AuthTokens.
type AuthTokensSlice []*AuthTokens
func (at AuthTokensSlice) config(cfg config) {
for _i := range at {
at[_i].config = cfg
}
}

View file

@ -5,6 +5,8 @@ package authtokens
import (
"time"
"entgo.io/ent/dialect/sql"
"entgo.io/ent/dialect/sql/sqlgraph"
"github.com/google/uuid"
)
@ -23,6 +25,8 @@ const (
FieldExpiresAt = "expires_at"
// EdgeUser holds the string denoting the user edge name in mutations.
EdgeUser = "user"
// EdgeRoles holds the string denoting the roles edge name in mutations.
EdgeRoles = "roles"
// Table holds the table name of the authtokens in the database.
Table = "auth_tokens"
// UserTable is the table that holds the user relation/edge.
@ -32,6 +36,13 @@ const (
UserInverseTable = "users"
// UserColumn is the table column denoting the user relation/edge.
UserColumn = "user_auth_tokens"
// RolesTable is the table that holds the roles relation/edge.
RolesTable = "auth_roles"
// RolesInverseTable is the table name for the AuthRoles entity.
// It exists in this package in order to avoid circular dependency with the "authroles" package.
RolesInverseTable = "auth_roles"
// RolesColumn is the table column denoting the roles relation/edge.
RolesColumn = "auth_tokens_roles"
)
// Columns holds all SQL columns for authtokens fields.
@ -76,3 +87,54 @@ var (
// DefaultID holds the default value on creation for the "id" field.
DefaultID func() uuid.UUID
)
// OrderOption defines the ordering options for the AuthTokens queries.
type OrderOption func(*sql.Selector)
// ByID orders the results by the id field.
func ByID(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldID, opts...).ToFunc()
}
// ByCreatedAt orders the results by the created_at field.
func ByCreatedAt(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldCreatedAt, opts...).ToFunc()
}
// ByUpdatedAt orders the results by the updated_at field.
func ByUpdatedAt(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldUpdatedAt, opts...).ToFunc()
}
// ByExpiresAt orders the results by the expires_at field.
func ByExpiresAt(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldExpiresAt, opts...).ToFunc()
}
// ByUserField orders the results by user field.
func ByUserField(field string, opts ...sql.OrderTermOption) OrderOption {
return func(s *sql.Selector) {
sqlgraph.OrderByNeighborTerms(s, newUserStep(), sql.OrderByField(field, opts...))
}
}
// ByRolesField orders the results by roles field.
func ByRolesField(field string, opts ...sql.OrderTermOption) OrderOption {
return func(s *sql.Selector) {
sqlgraph.OrderByNeighborTerms(s, newRolesStep(), sql.OrderByField(field, opts...))
}
}
func newUserStep() *sqlgraph.Step {
return sqlgraph.NewStep(
sqlgraph.From(Table, FieldID),
sqlgraph.To(UserInverseTable, FieldID),
sqlgraph.Edge(sqlgraph.M2O, true, UserTable, UserColumn),
)
}
func newRolesStep() *sqlgraph.Step {
return sqlgraph.NewStep(
sqlgraph.From(Table, FieldID),
sqlgraph.To(RolesInverseTable, FieldID),
sqlgraph.Edge(sqlgraph.O2O, false, RolesTable, RolesColumn),
)
}

View file

@ -13,357 +13,227 @@ import (
// ID filters vertices based on their ID field.
func ID(id uuid.UUID) predicate.AuthTokens {
return predicate.AuthTokens(func(s *sql.Selector) {
s.Where(sql.EQ(s.C(FieldID), id))
})
return predicate.AuthTokens(sql.FieldEQ(FieldID, id))
}
// IDEQ applies the EQ predicate on the ID field.
func IDEQ(id uuid.UUID) predicate.AuthTokens {
return predicate.AuthTokens(func(s *sql.Selector) {
s.Where(sql.EQ(s.C(FieldID), id))
})
return predicate.AuthTokens(sql.FieldEQ(FieldID, id))
}
// IDNEQ applies the NEQ predicate on the ID field.
func IDNEQ(id uuid.UUID) predicate.AuthTokens {
return predicate.AuthTokens(func(s *sql.Selector) {
s.Where(sql.NEQ(s.C(FieldID), id))
})
return predicate.AuthTokens(sql.FieldNEQ(FieldID, id))
}
// IDIn applies the In predicate on the ID field.
func IDIn(ids ...uuid.UUID) predicate.AuthTokens {
return predicate.AuthTokens(func(s *sql.Selector) {
v := make([]any, len(ids))
for i := range v {
v[i] = ids[i]
}
s.Where(sql.In(s.C(FieldID), v...))
})
return predicate.AuthTokens(sql.FieldIn(FieldID, ids...))
}
// IDNotIn applies the NotIn predicate on the ID field.
func IDNotIn(ids ...uuid.UUID) predicate.AuthTokens {
return predicate.AuthTokens(func(s *sql.Selector) {
v := make([]any, len(ids))
for i := range v {
v[i] = ids[i]
}
s.Where(sql.NotIn(s.C(FieldID), v...))
})
return predicate.AuthTokens(sql.FieldNotIn(FieldID, ids...))
}
// IDGT applies the GT predicate on the ID field.
func IDGT(id uuid.UUID) predicate.AuthTokens {
return predicate.AuthTokens(func(s *sql.Selector) {
s.Where(sql.GT(s.C(FieldID), id))
})
return predicate.AuthTokens(sql.FieldGT(FieldID, id))
}
// IDGTE applies the GTE predicate on the ID field.
func IDGTE(id uuid.UUID) predicate.AuthTokens {
return predicate.AuthTokens(func(s *sql.Selector) {
s.Where(sql.GTE(s.C(FieldID), id))
})
return predicate.AuthTokens(sql.FieldGTE(FieldID, id))
}
// IDLT applies the LT predicate on the ID field.
func IDLT(id uuid.UUID) predicate.AuthTokens {
return predicate.AuthTokens(func(s *sql.Selector) {
s.Where(sql.LT(s.C(FieldID), id))
})
return predicate.AuthTokens(sql.FieldLT(FieldID, id))
}
// IDLTE applies the LTE predicate on the ID field.
func IDLTE(id uuid.UUID) predicate.AuthTokens {
return predicate.AuthTokens(func(s *sql.Selector) {
s.Where(sql.LTE(s.C(FieldID), id))
})
return predicate.AuthTokens(sql.FieldLTE(FieldID, id))
}
// CreatedAt applies equality check predicate on the "created_at" field. It's identical to CreatedAtEQ.
func CreatedAt(v time.Time) predicate.AuthTokens {
return predicate.AuthTokens(func(s *sql.Selector) {
s.Where(sql.EQ(s.C(FieldCreatedAt), v))
})
return predicate.AuthTokens(sql.FieldEQ(FieldCreatedAt, v))
}
// UpdatedAt applies equality check predicate on the "updated_at" field. It's identical to UpdatedAtEQ.
func UpdatedAt(v time.Time) predicate.AuthTokens {
return predicate.AuthTokens(func(s *sql.Selector) {
s.Where(sql.EQ(s.C(FieldUpdatedAt), v))
})
return predicate.AuthTokens(sql.FieldEQ(FieldUpdatedAt, v))
}
// Token applies equality check predicate on the "token" field. It's identical to TokenEQ.
func Token(v []byte) predicate.AuthTokens {
return predicate.AuthTokens(func(s *sql.Selector) {
s.Where(sql.EQ(s.C(FieldToken), v))
})
return predicate.AuthTokens(sql.FieldEQ(FieldToken, v))
}
// ExpiresAt applies equality check predicate on the "expires_at" field. It's identical to ExpiresAtEQ.
func ExpiresAt(v time.Time) predicate.AuthTokens {
return predicate.AuthTokens(func(s *sql.Selector) {
s.Where(sql.EQ(s.C(FieldExpiresAt), v))
})
return predicate.AuthTokens(sql.FieldEQ(FieldExpiresAt, v))
}
// CreatedAtEQ applies the EQ predicate on the "created_at" field.
func CreatedAtEQ(v time.Time) predicate.AuthTokens {
return predicate.AuthTokens(func(s *sql.Selector) {
s.Where(sql.EQ(s.C(FieldCreatedAt), v))
})
return predicate.AuthTokens(sql.FieldEQ(FieldCreatedAt, v))
}
// CreatedAtNEQ applies the NEQ predicate on the "created_at" field.
func CreatedAtNEQ(v time.Time) predicate.AuthTokens {
return predicate.AuthTokens(func(s *sql.Selector) {
s.Where(sql.NEQ(s.C(FieldCreatedAt), v))
})
return predicate.AuthTokens(sql.FieldNEQ(FieldCreatedAt, v))
}
// CreatedAtIn applies the In predicate on the "created_at" field.
func CreatedAtIn(vs ...time.Time) predicate.AuthTokens {
v := make([]any, len(vs))
for i := range v {
v[i] = vs[i]
}
return predicate.AuthTokens(func(s *sql.Selector) {
s.Where(sql.In(s.C(FieldCreatedAt), v...))
})
return predicate.AuthTokens(sql.FieldIn(FieldCreatedAt, vs...))
}
// CreatedAtNotIn applies the NotIn predicate on the "created_at" field.
func CreatedAtNotIn(vs ...time.Time) predicate.AuthTokens {
v := make([]any, len(vs))
for i := range v {
v[i] = vs[i]
}
return predicate.AuthTokens(func(s *sql.Selector) {
s.Where(sql.NotIn(s.C(FieldCreatedAt), v...))
})
return predicate.AuthTokens(sql.FieldNotIn(FieldCreatedAt, vs...))
}
// CreatedAtGT applies the GT predicate on the "created_at" field.
func CreatedAtGT(v time.Time) predicate.AuthTokens {
return predicate.AuthTokens(func(s *sql.Selector) {
s.Where(sql.GT(s.C(FieldCreatedAt), v))
})
return predicate.AuthTokens(sql.FieldGT(FieldCreatedAt, v))
}
// CreatedAtGTE applies the GTE predicate on the "created_at" field.
func CreatedAtGTE(v time.Time) predicate.AuthTokens {
return predicate.AuthTokens(func(s *sql.Selector) {
s.Where(sql.GTE(s.C(FieldCreatedAt), v))
})
return predicate.AuthTokens(sql.FieldGTE(FieldCreatedAt, v))
}
// CreatedAtLT applies the LT predicate on the "created_at" field.
func CreatedAtLT(v time.Time) predicate.AuthTokens {
return predicate.AuthTokens(func(s *sql.Selector) {
s.Where(sql.LT(s.C(FieldCreatedAt), v))
})
return predicate.AuthTokens(sql.FieldLT(FieldCreatedAt, v))
}
// CreatedAtLTE applies the LTE predicate on the "created_at" field.
func CreatedAtLTE(v time.Time) predicate.AuthTokens {
return predicate.AuthTokens(func(s *sql.Selector) {
s.Where(sql.LTE(s.C(FieldCreatedAt), v))
})
return predicate.AuthTokens(sql.FieldLTE(FieldCreatedAt, v))
}
// UpdatedAtEQ applies the EQ predicate on the "updated_at" field.
func UpdatedAtEQ(v time.Time) predicate.AuthTokens {
return predicate.AuthTokens(func(s *sql.Selector) {
s.Where(sql.EQ(s.C(FieldUpdatedAt), v))
})
return predicate.AuthTokens(sql.FieldEQ(FieldUpdatedAt, v))
}
// UpdatedAtNEQ applies the NEQ predicate on the "updated_at" field.
func UpdatedAtNEQ(v time.Time) predicate.AuthTokens {
return predicate.AuthTokens(func(s *sql.Selector) {
s.Where(sql.NEQ(s.C(FieldUpdatedAt), v))
})
return predicate.AuthTokens(sql.FieldNEQ(FieldUpdatedAt, v))
}
// UpdatedAtIn applies the In predicate on the "updated_at" field.
func UpdatedAtIn(vs ...time.Time) predicate.AuthTokens {
v := make([]any, len(vs))
for i := range v {
v[i] = vs[i]
}
return predicate.AuthTokens(func(s *sql.Selector) {
s.Where(sql.In(s.C(FieldUpdatedAt), v...))
})
return predicate.AuthTokens(sql.FieldIn(FieldUpdatedAt, vs...))
}
// UpdatedAtNotIn applies the NotIn predicate on the "updated_at" field.
func UpdatedAtNotIn(vs ...time.Time) predicate.AuthTokens {
v := make([]any, len(vs))
for i := range v {
v[i] = vs[i]
}
return predicate.AuthTokens(func(s *sql.Selector) {
s.Where(sql.NotIn(s.C(FieldUpdatedAt), v...))
})
return predicate.AuthTokens(sql.FieldNotIn(FieldUpdatedAt, vs...))
}
// UpdatedAtGT applies the GT predicate on the "updated_at" field.
func UpdatedAtGT(v time.Time) predicate.AuthTokens {
return predicate.AuthTokens(func(s *sql.Selector) {
s.Where(sql.GT(s.C(FieldUpdatedAt), v))
})
return predicate.AuthTokens(sql.FieldGT(FieldUpdatedAt, v))
}
// UpdatedAtGTE applies the GTE predicate on the "updated_at" field.
func UpdatedAtGTE(v time.Time) predicate.AuthTokens {
return predicate.AuthTokens(func(s *sql.Selector) {
s.Where(sql.GTE(s.C(FieldUpdatedAt), v))
})
return predicate.AuthTokens(sql.FieldGTE(FieldUpdatedAt, v))
}
// UpdatedAtLT applies the LT predicate on the "updated_at" field.
func UpdatedAtLT(v time.Time) predicate.AuthTokens {
return predicate.AuthTokens(func(s *sql.Selector) {
s.Where(sql.LT(s.C(FieldUpdatedAt), v))
})
return predicate.AuthTokens(sql.FieldLT(FieldUpdatedAt, v))
}
// UpdatedAtLTE applies the LTE predicate on the "updated_at" field.
func UpdatedAtLTE(v time.Time) predicate.AuthTokens {
return predicate.AuthTokens(func(s *sql.Selector) {
s.Where(sql.LTE(s.C(FieldUpdatedAt), v))
})
return predicate.AuthTokens(sql.FieldLTE(FieldUpdatedAt, v))
}
// TokenEQ applies the EQ predicate on the "token" field.
func TokenEQ(v []byte) predicate.AuthTokens {
return predicate.AuthTokens(func(s *sql.Selector) {
s.Where(sql.EQ(s.C(FieldToken), v))
})
return predicate.AuthTokens(sql.FieldEQ(FieldToken, v))
}
// TokenNEQ applies the NEQ predicate on the "token" field.
func TokenNEQ(v []byte) predicate.AuthTokens {
return predicate.AuthTokens(func(s *sql.Selector) {
s.Where(sql.NEQ(s.C(FieldToken), v))
})
return predicate.AuthTokens(sql.FieldNEQ(FieldToken, v))
}
// TokenIn applies the In predicate on the "token" field.
func TokenIn(vs ...[]byte) predicate.AuthTokens {
v := make([]any, len(vs))
for i := range v {
v[i] = vs[i]
}
return predicate.AuthTokens(func(s *sql.Selector) {
s.Where(sql.In(s.C(FieldToken), v...))
})
return predicate.AuthTokens(sql.FieldIn(FieldToken, vs...))
}
// TokenNotIn applies the NotIn predicate on the "token" field.
func TokenNotIn(vs ...[]byte) predicate.AuthTokens {
v := make([]any, len(vs))
for i := range v {
v[i] = vs[i]
}
return predicate.AuthTokens(func(s *sql.Selector) {
s.Where(sql.NotIn(s.C(FieldToken), v...))
})
return predicate.AuthTokens(sql.FieldNotIn(FieldToken, vs...))
}
// TokenGT applies the GT predicate on the "token" field.
func TokenGT(v []byte) predicate.AuthTokens {
return predicate.AuthTokens(func(s *sql.Selector) {
s.Where(sql.GT(s.C(FieldToken), v))
})
return predicate.AuthTokens(sql.FieldGT(FieldToken, v))
}
// TokenGTE applies the GTE predicate on the "token" field.
func TokenGTE(v []byte) predicate.AuthTokens {
return predicate.AuthTokens(func(s *sql.Selector) {
s.Where(sql.GTE(s.C(FieldToken), v))
})
return predicate.AuthTokens(sql.FieldGTE(FieldToken, v))
}
// TokenLT applies the LT predicate on the "token" field.
func TokenLT(v []byte) predicate.AuthTokens {
return predicate.AuthTokens(func(s *sql.Selector) {
s.Where(sql.LT(s.C(FieldToken), v))
})
return predicate.AuthTokens(sql.FieldLT(FieldToken, v))
}
// TokenLTE applies the LTE predicate on the "token" field.
func TokenLTE(v []byte) predicate.AuthTokens {
return predicate.AuthTokens(func(s *sql.Selector) {
s.Where(sql.LTE(s.C(FieldToken), v))
})
return predicate.AuthTokens(sql.FieldLTE(FieldToken, v))
}
// ExpiresAtEQ applies the EQ predicate on the "expires_at" field.
func ExpiresAtEQ(v time.Time) predicate.AuthTokens {
return predicate.AuthTokens(func(s *sql.Selector) {
s.Where(sql.EQ(s.C(FieldExpiresAt), v))
})
return predicate.AuthTokens(sql.FieldEQ(FieldExpiresAt, v))
}
// ExpiresAtNEQ applies the NEQ predicate on the "expires_at" field.
func ExpiresAtNEQ(v time.Time) predicate.AuthTokens {
return predicate.AuthTokens(func(s *sql.Selector) {
s.Where(sql.NEQ(s.C(FieldExpiresAt), v))
})
return predicate.AuthTokens(sql.FieldNEQ(FieldExpiresAt, v))
}
// ExpiresAtIn applies the In predicate on the "expires_at" field.
func ExpiresAtIn(vs ...time.Time) predicate.AuthTokens {
v := make([]any, len(vs))
for i := range v {
v[i] = vs[i]
}
return predicate.AuthTokens(func(s *sql.Selector) {
s.Where(sql.In(s.C(FieldExpiresAt), v...))
})
return predicate.AuthTokens(sql.FieldIn(FieldExpiresAt, vs...))
}
// ExpiresAtNotIn applies the NotIn predicate on the "expires_at" field.
func ExpiresAtNotIn(vs ...time.Time) predicate.AuthTokens {
v := make([]any, len(vs))
for i := range v {
v[i] = vs[i]
}
return predicate.AuthTokens(func(s *sql.Selector) {
s.Where(sql.NotIn(s.C(FieldExpiresAt), v...))
})
return predicate.AuthTokens(sql.FieldNotIn(FieldExpiresAt, vs...))
}
// ExpiresAtGT applies the GT predicate on the "expires_at" field.
func ExpiresAtGT(v time.Time) predicate.AuthTokens {
return predicate.AuthTokens(func(s *sql.Selector) {
s.Where(sql.GT(s.C(FieldExpiresAt), v))
})
return predicate.AuthTokens(sql.FieldGT(FieldExpiresAt, v))
}
// ExpiresAtGTE applies the GTE predicate on the "expires_at" field.
func ExpiresAtGTE(v time.Time) predicate.AuthTokens {
return predicate.AuthTokens(func(s *sql.Selector) {
s.Where(sql.GTE(s.C(FieldExpiresAt), v))
})
return predicate.AuthTokens(sql.FieldGTE(FieldExpiresAt, v))
}
// ExpiresAtLT applies the LT predicate on the "expires_at" field.
func ExpiresAtLT(v time.Time) predicate.AuthTokens {
return predicate.AuthTokens(func(s *sql.Selector) {
s.Where(sql.LT(s.C(FieldExpiresAt), v))
})
return predicate.AuthTokens(sql.FieldLT(FieldExpiresAt, v))
}
// ExpiresAtLTE applies the LTE predicate on the "expires_at" field.
func ExpiresAtLTE(v time.Time) predicate.AuthTokens {
return predicate.AuthTokens(func(s *sql.Selector) {
s.Where(sql.LTE(s.C(FieldExpiresAt), v))
})
return predicate.AuthTokens(sql.FieldLTE(FieldExpiresAt, v))
}
// HasUser applies the HasEdge predicate on the "user" edge.
@ -371,7 +241,6 @@ func HasUser() predicate.AuthTokens {
return predicate.AuthTokens(func(s *sql.Selector) {
step := sqlgraph.NewStep(
sqlgraph.From(Table, FieldID),
sqlgraph.To(UserTable, FieldID),
sqlgraph.Edge(sqlgraph.M2O, true, UserTable, UserColumn),
)
sqlgraph.HasNeighbors(s, step)
@ -380,12 +249,31 @@ func HasUser() predicate.AuthTokens {
// HasUserWith applies the HasEdge predicate on the "user" edge with a given conditions (other predicates).
func HasUserWith(preds ...predicate.User) predicate.AuthTokens {
return predicate.AuthTokens(func(s *sql.Selector) {
step := newUserStep()
sqlgraph.HasNeighborsWith(s, step, func(s *sql.Selector) {
for _, p := range preds {
p(s)
}
})
})
}
// HasRoles applies the HasEdge predicate on the "roles" edge.
func HasRoles() predicate.AuthTokens {
return predicate.AuthTokens(func(s *sql.Selector) {
step := sqlgraph.NewStep(
sqlgraph.From(Table, FieldID),
sqlgraph.To(UserInverseTable, FieldID),
sqlgraph.Edge(sqlgraph.M2O, true, UserTable, UserColumn),
sqlgraph.Edge(sqlgraph.O2O, false, RolesTable, RolesColumn),
)
sqlgraph.HasNeighbors(s, step)
})
}
// HasRolesWith applies the HasEdge predicate on the "roles" edge with a given conditions (other predicates).
func HasRolesWith(preds ...predicate.AuthRoles) predicate.AuthTokens {
return predicate.AuthTokens(func(s *sql.Selector) {
step := newRolesStep()
sqlgraph.HasNeighborsWith(s, step, func(s *sql.Selector) {
for _, p := range preds {
p(s)
@ -396,32 +284,15 @@ func HasUserWith(preds ...predicate.User) predicate.AuthTokens {
// And groups predicates with the AND operator between them.
func And(predicates ...predicate.AuthTokens) predicate.AuthTokens {
return predicate.AuthTokens(func(s *sql.Selector) {
s1 := s.Clone().SetP(nil)
for _, p := range predicates {
p(s1)
}
s.Where(s1.P())
})
return predicate.AuthTokens(sql.AndPredicates(predicates...))
}
// Or groups predicates with the OR operator between them.
func Or(predicates ...predicate.AuthTokens) predicate.AuthTokens {
return predicate.AuthTokens(func(s *sql.Selector) {
s1 := s.Clone().SetP(nil)
for i, p := range predicates {
if i > 0 {
s1.Or()
}
p(s1)
}
s.Where(s1.P())
})
return predicate.AuthTokens(sql.OrPredicates(predicates...))
}
// Not applies the not operator on the given predicate.
func Not(p predicate.AuthTokens) predicate.AuthTokens {
return predicate.AuthTokens(func(s *sql.Selector) {
p(s.Not())
})
return predicate.AuthTokens(sql.NotPredicates(p))
}

View file

@ -11,6 +11,7 @@ import (
"entgo.io/ent/dialect/sql/sqlgraph"
"entgo.io/ent/schema/field"
"github.com/google/uuid"
"github.com/hay-kot/homebox/backend/internal/data/ent/authroles"
"github.com/hay-kot/homebox/backend/internal/data/ent/authtokens"
"github.com/hay-kot/homebox/backend/internal/data/ent/user"
)
@ -103,6 +104,25 @@ func (atc *AuthTokensCreate) SetUser(u *User) *AuthTokensCreate {
return atc.SetUserID(u.ID)
}
// SetRolesID sets the "roles" edge to the AuthRoles entity by ID.
func (atc *AuthTokensCreate) SetRolesID(id int) *AuthTokensCreate {
atc.mutation.SetRolesID(id)
return atc
}
// SetNillableRolesID sets the "roles" edge to the AuthRoles entity by ID if the given value is not nil.
func (atc *AuthTokensCreate) SetNillableRolesID(id *int) *AuthTokensCreate {
if id != nil {
atc = atc.SetRolesID(*id)
}
return atc
}
// SetRoles sets the "roles" edge to the AuthRoles entity.
func (atc *AuthTokensCreate) SetRoles(a *AuthRoles) *AuthTokensCreate {
return atc.SetRolesID(a.ID)
}
// Mutation returns the AuthTokensMutation object of the builder.
func (atc *AuthTokensCreate) Mutation() *AuthTokensMutation {
return atc.mutation
@ -110,50 +130,8 @@ func (atc *AuthTokensCreate) Mutation() *AuthTokensMutation {
// Save creates the AuthTokens in the database.
func (atc *AuthTokensCreate) Save(ctx context.Context) (*AuthTokens, error) {
var (
err error
node *AuthTokens
)
atc.defaults()
if len(atc.hooks) == 0 {
if err = atc.check(); err != nil {
return nil, err
}
node, err = atc.sqlSave(ctx)
} else {
var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) {
mutation, ok := m.(*AuthTokensMutation)
if !ok {
return nil, fmt.Errorf("unexpected mutation type %T", m)
}
if err = atc.check(); err != nil {
return nil, err
}
atc.mutation = mutation
if node, err = atc.sqlSave(ctx); err != nil {
return nil, err
}
mutation.id = &node.ID
mutation.done = true
return node, err
})
for i := len(atc.hooks) - 1; i >= 0; i-- {
if atc.hooks[i] == nil {
return nil, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)")
}
mut = atc.hooks[i](mut)
}
v, err := mut.Mutate(ctx, atc.mutation)
if err != nil {
return nil, err
}
nv, ok := v.(*AuthTokens)
if !ok {
return nil, fmt.Errorf("unexpected node type %T returned from AuthTokensMutation", v)
}
node = nv
}
return node, err
return withHooks(ctx, atc.sqlSave, atc.mutation, atc.hooks)
}
// SaveX calls Save and panics if Save returns an error.
@ -216,6 +194,9 @@ func (atc *AuthTokensCreate) check() error {
}
func (atc *AuthTokensCreate) sqlSave(ctx context.Context) (*AuthTokens, error) {
if err := atc.check(); err != nil {
return nil, err
}
_node, _spec := atc.createSpec()
if err := sqlgraph.CreateNode(ctx, atc.driver, _spec); err != nil {
if sqlgraph.IsConstraintError(err) {
@ -230,54 +211,34 @@ func (atc *AuthTokensCreate) sqlSave(ctx context.Context) (*AuthTokens, error) {
return nil, err
}
}
atc.mutation.id = &_node.ID
atc.mutation.done = true
return _node, nil
}
func (atc *AuthTokensCreate) createSpec() (*AuthTokens, *sqlgraph.CreateSpec) {
var (
_node = &AuthTokens{config: atc.config}
_spec = &sqlgraph.CreateSpec{
Table: authtokens.Table,
ID: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: authtokens.FieldID,
},
}
_spec = sqlgraph.NewCreateSpec(authtokens.Table, sqlgraph.NewFieldSpec(authtokens.FieldID, field.TypeUUID))
)
if id, ok := atc.mutation.ID(); ok {
_node.ID = id
_spec.ID.Value = &id
}
if value, ok := atc.mutation.CreatedAt(); ok {
_spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{
Type: field.TypeTime,
Value: value,
Column: authtokens.FieldCreatedAt,
})
_spec.SetField(authtokens.FieldCreatedAt, field.TypeTime, value)
_node.CreatedAt = value
}
if value, ok := atc.mutation.UpdatedAt(); ok {
_spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{
Type: field.TypeTime,
Value: value,
Column: authtokens.FieldUpdatedAt,
})
_spec.SetField(authtokens.FieldUpdatedAt, field.TypeTime, value)
_node.UpdatedAt = value
}
if value, ok := atc.mutation.Token(); ok {
_spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{
Type: field.TypeBytes,
Value: value,
Column: authtokens.FieldToken,
})
_spec.SetField(authtokens.FieldToken, field.TypeBytes, value)
_node.Token = value
}
if value, ok := atc.mutation.ExpiresAt(); ok {
_spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{
Type: field.TypeTime,
Value: value,
Column: authtokens.FieldExpiresAt,
})
_spec.SetField(authtokens.FieldExpiresAt, field.TypeTime, value)
_node.ExpiresAt = value
}
if nodes := atc.mutation.UserIDs(); len(nodes) > 0 {
@ -288,10 +249,7 @@ func (atc *AuthTokensCreate) createSpec() (*AuthTokens, *sqlgraph.CreateSpec) {
Columns: []string{authtokens.UserColumn},
Bidi: false,
Target: &sqlgraph.EdgeTarget{
IDSpec: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: user.FieldID,
},
IDSpec: sqlgraph.NewFieldSpec(user.FieldID, field.TypeUUID),
},
}
for _, k := range nodes {
@ -300,17 +258,37 @@ func (atc *AuthTokensCreate) createSpec() (*AuthTokens, *sqlgraph.CreateSpec) {
_node.user_auth_tokens = &nodes[0]
_spec.Edges = append(_spec.Edges, edge)
}
if nodes := atc.mutation.RolesIDs(); len(nodes) > 0 {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.O2O,
Inverse: false,
Table: authtokens.RolesTable,
Columns: []string{authtokens.RolesColumn},
Bidi: false,
Target: &sqlgraph.EdgeTarget{
IDSpec: sqlgraph.NewFieldSpec(authroles.FieldID, field.TypeInt),
},
}
for _, k := range nodes {
edge.Target.Nodes = append(edge.Target.Nodes, k)
}
_spec.Edges = append(_spec.Edges, edge)
}
return _node, _spec
}
// AuthTokensCreateBulk is the builder for creating many AuthTokens entities in bulk.
type AuthTokensCreateBulk struct {
config
err error
builders []*AuthTokensCreate
}
// Save creates the AuthTokens entities in the database.
func (atcb *AuthTokensCreateBulk) Save(ctx context.Context) ([]*AuthTokens, error) {
if atcb.err != nil {
return nil, atcb.err
}
specs := make([]*sqlgraph.CreateSpec, len(atcb.builders))
nodes := make([]*AuthTokens, len(atcb.builders))
mutators := make([]Mutator, len(atcb.builders))
@ -327,8 +305,8 @@ func (atcb *AuthTokensCreateBulk) Save(ctx context.Context) ([]*AuthTokens, erro
return nil, err
}
builder.mutation = mutation
nodes[i], specs[i] = builder.createSpec()
var err error
nodes[i], specs[i] = builder.createSpec()
if i < len(mutators)-1 {
_, err = mutators[i+1].Mutate(root, atcb.builders[i+1].mutation)
} else {

View file

@ -4,7 +4,6 @@ package ent
import (
"context"
"fmt"
"entgo.io/ent/dialect/sql"
"entgo.io/ent/dialect/sql/sqlgraph"
@ -28,34 +27,7 @@ func (atd *AuthTokensDelete) Where(ps ...predicate.AuthTokens) *AuthTokensDelete
// Exec executes the deletion query and returns how many vertices were deleted.
func (atd *AuthTokensDelete) Exec(ctx context.Context) (int, error) {
var (
err error
affected int
)
if len(atd.hooks) == 0 {
affected, err = atd.sqlExec(ctx)
} else {
var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) {
mutation, ok := m.(*AuthTokensMutation)
if !ok {
return nil, fmt.Errorf("unexpected mutation type %T", m)
}
atd.mutation = mutation
affected, err = atd.sqlExec(ctx)
mutation.done = true
return affected, err
})
for i := len(atd.hooks) - 1; i >= 0; i-- {
if atd.hooks[i] == nil {
return 0, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)")
}
mut = atd.hooks[i](mut)
}
if _, err := mut.Mutate(ctx, atd.mutation); err != nil {
return 0, err
}
}
return affected, err
return withHooks(ctx, atd.sqlExec, atd.mutation, atd.hooks)
}
// ExecX is like Exec, but panics if an error occurs.
@ -68,15 +40,7 @@ func (atd *AuthTokensDelete) ExecX(ctx context.Context) int {
}
func (atd *AuthTokensDelete) sqlExec(ctx context.Context) (int, error) {
_spec := &sqlgraph.DeleteSpec{
Node: &sqlgraph.NodeSpec{
Table: authtokens.Table,
ID: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: authtokens.FieldID,
},
},
}
_spec := sqlgraph.NewDeleteSpec(authtokens.Table, sqlgraph.NewFieldSpec(authtokens.FieldID, field.TypeUUID))
if ps := atd.mutation.predicates; len(ps) > 0 {
_spec.Predicate = func(selector *sql.Selector) {
for i := range ps {
@ -88,6 +52,7 @@ func (atd *AuthTokensDelete) sqlExec(ctx context.Context) (int, error) {
if err != nil && sqlgraph.IsConstraintError(err) {
err = &ConstraintError{msg: err.Error(), wrap: err}
}
atd.mutation.done = true
return affected, err
}
@ -96,6 +61,12 @@ type AuthTokensDeleteOne struct {
atd *AuthTokensDelete
}
// Where appends a list predicates to the AuthTokensDelete builder.
func (atdo *AuthTokensDeleteOne) Where(ps ...predicate.AuthTokens) *AuthTokensDeleteOne {
atdo.atd.mutation.Where(ps...)
return atdo
}
// Exec executes the deletion query.
func (atdo *AuthTokensDeleteOne) Exec(ctx context.Context) error {
n, err := atdo.atd.Exec(ctx)
@ -111,5 +82,7 @@ func (atdo *AuthTokensDeleteOne) Exec(ctx context.Context) error {
// ExecX is like Exec, but panics if an error occurs.
func (atdo *AuthTokensDeleteOne) ExecX(ctx context.Context) {
atdo.atd.ExecX(ctx)
if err := atdo.Exec(ctx); err != nil {
panic(err)
}
}

Some files were not shown because too many files have changed in this diff Show more