mirror of
https://github.com/hay-kot/homebox.git
synced 2024-11-23 17:15:42 +00:00
feat: items-editor (#5)
* format readme * update logo * format html * add logo to docs * repository for document and document tokens * add attachments type and repository * autogenerate types via scripts * use autogenerated types * attachment type updates * add insured and quantity fields for items * implement HasID interface for entities * implement label updates for items * implement service update method * WIP item update client side actions * check err on attachment * finish types for basic items editor * remove unused var * house keeping
This commit is contained in:
parent
fbc364dcd2
commit
95ab14b866
125 changed files with 15626 additions and 1791 deletions
31
.github/dependabot.yml
vendored
Normal file
31
.github/dependabot.yml
vendored
Normal file
|
@ -0,0 +1,31 @@
|
||||||
|
version: 2
|
||||||
|
updates:
|
||||||
|
# Fetch and update latest `npm` packages
|
||||||
|
- package-ecosystem: npm
|
||||||
|
directory: "/frontend"
|
||||||
|
schedule:
|
||||||
|
interval: daily
|
||||||
|
time: "00:00"
|
||||||
|
open-pull-requests-limit: 10
|
||||||
|
reviewers:
|
||||||
|
- hay-kot
|
||||||
|
assignees:
|
||||||
|
- hay-kot
|
||||||
|
commit-message:
|
||||||
|
prefix: fix
|
||||||
|
prefix-development: chore
|
||||||
|
include: scope
|
||||||
|
- package-ecosystem: gomod
|
||||||
|
directory: backend
|
||||||
|
schedule:
|
||||||
|
interval: daily
|
||||||
|
time: "00:00"
|
||||||
|
open-pull-requests-limit: 10
|
||||||
|
reviewers:
|
||||||
|
- hay-kot
|
||||||
|
assignees:
|
||||||
|
- hay-kot
|
||||||
|
commit-message:
|
||||||
|
prefix: fix
|
||||||
|
prefix-development: chore
|
||||||
|
include: scope
|
72
.github/pull_request_template.md
vendored
Normal file
72
.github/pull_request_template.md
vendored
Normal file
|
@ -0,0 +1,72 @@
|
||||||
|
<!--
|
||||||
|
This template provides some ideas of things to include in your PR description.
|
||||||
|
To start, try providing a short summary of your changes in the Title above.
|
||||||
|
If a section of the PR template does not apply to this PR, then delete that section.
|
||||||
|
-->
|
||||||
|
|
||||||
|
## What type of PR is this?
|
||||||
|
|
||||||
|
_(REQUIRED)_
|
||||||
|
|
||||||
|
<!--
|
||||||
|
Delete any of the following that do not apply:
|
||||||
|
-->
|
||||||
|
|
||||||
|
- bug
|
||||||
|
- cleanup
|
||||||
|
- documentation
|
||||||
|
- feature
|
||||||
|
|
||||||
|
## What this PR does / why we need it:
|
||||||
|
|
||||||
|
_(REQUIRED)_
|
||||||
|
|
||||||
|
<!--
|
||||||
|
What goal is this change working towards?
|
||||||
|
Provide a bullet pointed summary of how each file was changed.
|
||||||
|
Briefly explain any decisions you made with respect to the changes.
|
||||||
|
Include anything here that you didn't include in *Release Notes*
|
||||||
|
above, such as changes to CI or changes to internal methods.
|
||||||
|
-->
|
||||||
|
|
||||||
|
## Which issue(s) this PR fixes:
|
||||||
|
|
||||||
|
_(REQUIRED)_
|
||||||
|
|
||||||
|
<!--
|
||||||
|
If this PR fixes one of more issues, list them here.
|
||||||
|
One line each, like so:
|
||||||
|
Fixes #123
|
||||||
|
Fixes #39
|
||||||
|
-->
|
||||||
|
|
||||||
|
## Special notes for your reviewer:
|
||||||
|
|
||||||
|
_(fill-in or delete this section)_
|
||||||
|
|
||||||
|
<!--
|
||||||
|
Is there any particular feedback you would / wouldn't like?
|
||||||
|
Which parts of the code should reviewers focus on?
|
||||||
|
-->
|
||||||
|
|
||||||
|
## Testing
|
||||||
|
|
||||||
|
_(fill-in or delete this section)_
|
||||||
|
|
||||||
|
<!--
|
||||||
|
Describe how you tested this change.
|
||||||
|
-->
|
||||||
|
|
||||||
|
## Release Notes
|
||||||
|
|
||||||
|
_(REQUIRED)_
|
||||||
|
<!--
|
||||||
|
If this PR makes user facing changes, please describe them here. This
|
||||||
|
description will be copied into the release notes/changelog, whenever the
|
||||||
|
next version is released. Keep this section short, and focus on high level
|
||||||
|
changes.
|
||||||
|
Put your text between the block. To omit notes, use NONE within the block.
|
||||||
|
-->
|
||||||
|
|
||||||
|
```release-note
|
||||||
|
```
|
12
.gitignore
vendored
12
.gitignore
vendored
|
@ -2,7 +2,6 @@
|
||||||
config.yml
|
config.yml
|
||||||
homebox.db
|
homebox.db
|
||||||
.idea
|
.idea
|
||||||
.vscode
|
|
||||||
|
|
||||||
.DS_Store
|
.DS_Store
|
||||||
test-mailer.json
|
test-mailer.json
|
||||||
|
@ -35,4 +34,13 @@ backend/.env
|
||||||
|
|
||||||
# Output Directory for Nuxt/Frontend during build step
|
# Output Directory for Nuxt/Frontend during build step
|
||||||
backend/app/api/public/*
|
backend/app/api/public/*
|
||||||
!backend/app/api/public/.gitkeep
|
!backend/app/api/public/.gitkeep
|
||||||
|
|
||||||
|
node_modules
|
||||||
|
*.log*
|
||||||
|
.nuxt
|
||||||
|
.nitro
|
||||||
|
.cache
|
||||||
|
.output
|
||||||
|
.env
|
||||||
|
dist
|
||||||
|
|
14
.vscode/settings.json
vendored
Normal file
14
.vscode/settings.json
vendored
Normal file
|
@ -0,0 +1,14 @@
|
||||||
|
{
|
||||||
|
"editor.codeActionsOnSave": {
|
||||||
|
"source.fixAll.eslint": true
|
||||||
|
},
|
||||||
|
"yaml.schemas": {
|
||||||
|
"https://squidfunk.github.io/mkdocs-material/schema.json": "mkdocs.yml"
|
||||||
|
},
|
||||||
|
"explorer.fileNesting.enabled": true,
|
||||||
|
"explorer.fileNesting.patterns": {
|
||||||
|
"package.json": "package-lock.json, yarn.lock, .eslintrc.js, tsconfig.json, .prettierrc, .editorconfig, pnpm-lock.yaml, postcss.config.js, tailwind.config.js",
|
||||||
|
"docker-compose.yml": "Dockerfile, .dockerignore, docker-compose.dev.yml, docker-compose.yml",
|
||||||
|
"README.md": "LICENSE, SECURITY.md"
|
||||||
|
}
|
||||||
|
}
|
661
LICENSE
Normal file
661
LICENSE
Normal file
|
@ -0,0 +1,661 @@
|
||||||
|
GNU AFFERO GENERAL PUBLIC LICENSE
|
||||||
|
Version 3, 19 November 2007
|
||||||
|
|
||||||
|
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
||||||
|
Everyone is permitted to copy and distribute verbatim copies
|
||||||
|
of this license document, but changing it is not allowed.
|
||||||
|
|
||||||
|
Preamble
|
||||||
|
|
||||||
|
The GNU Affero General Public License is a free, copyleft license for
|
||||||
|
software and other kinds of works, specifically designed to ensure
|
||||||
|
cooperation with the community in the case of network server software.
|
||||||
|
|
||||||
|
The licenses for most software and other practical works are designed
|
||||||
|
to take away your freedom to share and change the works. By contrast,
|
||||||
|
our General Public Licenses are intended to guarantee your freedom to
|
||||||
|
share and change all versions of a program--to make sure it remains free
|
||||||
|
software for all its users.
|
||||||
|
|
||||||
|
When we speak of free software, we are referring to freedom, not
|
||||||
|
price. Our General Public Licenses are designed to make sure that you
|
||||||
|
have the freedom to distribute copies of free software (and charge for
|
||||||
|
them if you wish), that you receive source code or can get it if you
|
||||||
|
want it, that you can change the software or use pieces of it in new
|
||||||
|
free programs, and that you know you can do these things.
|
||||||
|
|
||||||
|
Developers that use our General Public Licenses protect your rights
|
||||||
|
with two steps: (1) assert copyright on the software, and (2) offer
|
||||||
|
you this License which gives you legal permission to copy, distribute
|
||||||
|
and/or modify the software.
|
||||||
|
|
||||||
|
A secondary benefit of defending all users' freedom is that
|
||||||
|
improvements made in alternate versions of the program, if they
|
||||||
|
receive widespread use, become available for other developers to
|
||||||
|
incorporate. Many developers of free software are heartened and
|
||||||
|
encouraged by the resulting cooperation. However, in the case of
|
||||||
|
software used on network servers, this result may fail to come about.
|
||||||
|
The GNU General Public License permits making a modified version and
|
||||||
|
letting the public access it on a server without ever releasing its
|
||||||
|
source code to the public.
|
||||||
|
|
||||||
|
The GNU Affero General Public License is designed specifically to
|
||||||
|
ensure that, in such cases, the modified source code becomes available
|
||||||
|
to the community. It requires the operator of a network server to
|
||||||
|
provide the source code of the modified version running there to the
|
||||||
|
users of that server. Therefore, public use of a modified version, on
|
||||||
|
a publicly accessible server, gives the public access to the source
|
||||||
|
code of the modified version.
|
||||||
|
|
||||||
|
An older license, called the Affero General Public License and
|
||||||
|
published by Affero, was designed to accomplish similar goals. This is
|
||||||
|
a different license, not a version of the Affero GPL, but Affero has
|
||||||
|
released a new version of the Affero GPL which permits relicensing under
|
||||||
|
this license.
|
||||||
|
|
||||||
|
The precise terms and conditions for copying, distribution and
|
||||||
|
modification follow.
|
||||||
|
|
||||||
|
TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
0. Definitions.
|
||||||
|
|
||||||
|
"This License" refers to version 3 of the GNU Affero General Public License.
|
||||||
|
|
||||||
|
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||||
|
works, such as semiconductor masks.
|
||||||
|
|
||||||
|
"The Program" refers to any copyrightable work licensed under this
|
||||||
|
License. Each licensee is addressed as "you". "Licensees" and
|
||||||
|
"recipients" may be individuals or organizations.
|
||||||
|
|
||||||
|
To "modify" a work means to copy from or adapt all or part of the work
|
||||||
|
in a fashion requiring copyright permission, other than the making of an
|
||||||
|
exact copy. The resulting work is called a "modified version" of the
|
||||||
|
earlier work or a work "based on" the earlier work.
|
||||||
|
|
||||||
|
A "covered work" means either the unmodified Program or a work based
|
||||||
|
on the Program.
|
||||||
|
|
||||||
|
To "propagate" a work means to do anything with it that, without
|
||||||
|
permission, would make you directly or secondarily liable for
|
||||||
|
infringement under applicable copyright law, except executing it on a
|
||||||
|
computer or modifying a private copy. Propagation includes copying,
|
||||||
|
distribution (with or without modification), making available to the
|
||||||
|
public, and in some countries other activities as well.
|
||||||
|
|
||||||
|
To "convey" a work means any kind of propagation that enables other
|
||||||
|
parties to make or receive copies. Mere interaction with a user through
|
||||||
|
a computer network, with no transfer of a copy, is not conveying.
|
||||||
|
|
||||||
|
An interactive user interface displays "Appropriate Legal Notices"
|
||||||
|
to the extent that it includes a convenient and prominently visible
|
||||||
|
feature that (1) displays an appropriate copyright notice, and (2)
|
||||||
|
tells the user that there is no warranty for the work (except to the
|
||||||
|
extent that warranties are provided), that licensees may convey the
|
||||||
|
work under this License, and how to view a copy of this License. If
|
||||||
|
the interface presents a list of user commands or options, such as a
|
||||||
|
menu, a prominent item in the list meets this criterion.
|
||||||
|
|
||||||
|
1. Source Code.
|
||||||
|
|
||||||
|
The "source code" for a work means the preferred form of the work
|
||||||
|
for making modifications to it. "Object code" means any non-source
|
||||||
|
form of a work.
|
||||||
|
|
||||||
|
A "Standard Interface" means an interface that either is an official
|
||||||
|
standard defined by a recognized standards body, or, in the case of
|
||||||
|
interfaces specified for a particular programming language, one that
|
||||||
|
is widely used among developers working in that language.
|
||||||
|
|
||||||
|
The "System Libraries" of an executable work include anything, other
|
||||||
|
than the work as a whole, that (a) is included in the normal form of
|
||||||
|
packaging a Major Component, but which is not part of that Major
|
||||||
|
Component, and (b) serves only to enable use of the work with that
|
||||||
|
Major Component, or to implement a Standard Interface for which an
|
||||||
|
implementation is available to the public in source code form. A
|
||||||
|
"Major Component", in this context, means a major essential component
|
||||||
|
(kernel, window system, and so on) of the specific operating system
|
||||||
|
(if any) on which the executable work runs, or a compiler used to
|
||||||
|
produce the work, or an object code interpreter used to run it.
|
||||||
|
|
||||||
|
The "Corresponding Source" for a work in object code form means all
|
||||||
|
the source code needed to generate, install, and (for an executable
|
||||||
|
work) run the object code and to modify the work, including scripts to
|
||||||
|
control those activities. However, it does not include the work's
|
||||||
|
System Libraries, or general-purpose tools or generally available free
|
||||||
|
programs which are used unmodified in performing those activities but
|
||||||
|
which are not part of the work. For example, Corresponding Source
|
||||||
|
includes interface definition files associated with source files for
|
||||||
|
the work, and the source code for shared libraries and dynamically
|
||||||
|
linked subprograms that the work is specifically designed to require,
|
||||||
|
such as by intimate data communication or control flow between those
|
||||||
|
subprograms and other parts of the work.
|
||||||
|
|
||||||
|
The Corresponding Source need not include anything that users
|
||||||
|
can regenerate automatically from other parts of the Corresponding
|
||||||
|
Source.
|
||||||
|
|
||||||
|
The Corresponding Source for a work in source code form is that
|
||||||
|
same work.
|
||||||
|
|
||||||
|
2. Basic Permissions.
|
||||||
|
|
||||||
|
All rights granted under this License are granted for the term of
|
||||||
|
copyright on the Program, and are irrevocable provided the stated
|
||||||
|
conditions are met. This License explicitly affirms your unlimited
|
||||||
|
permission to run the unmodified Program. The output from running a
|
||||||
|
covered work is covered by this License only if the output, given its
|
||||||
|
content, constitutes a covered work. This License acknowledges your
|
||||||
|
rights of fair use or other equivalent, as provided by copyright law.
|
||||||
|
|
||||||
|
You may make, run and propagate covered works that you do not
|
||||||
|
convey, without conditions so long as your license otherwise remains
|
||||||
|
in force. You may convey covered works to others for the sole purpose
|
||||||
|
of having them make modifications exclusively for you, or provide you
|
||||||
|
with facilities for running those works, provided that you comply with
|
||||||
|
the terms of this License in conveying all material for which you do
|
||||||
|
not control copyright. Those thus making or running the covered works
|
||||||
|
for you must do so exclusively on your behalf, under your direction
|
||||||
|
and control, on terms that prohibit them from making any copies of
|
||||||
|
your copyrighted material outside their relationship with you.
|
||||||
|
|
||||||
|
Conveying under any other circumstances is permitted solely under
|
||||||
|
the conditions stated below. Sublicensing is not allowed; section 10
|
||||||
|
makes it unnecessary.
|
||||||
|
|
||||||
|
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||||
|
|
||||||
|
No covered work shall be deemed part of an effective technological
|
||||||
|
measure under any applicable law fulfilling obligations under article
|
||||||
|
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
||||||
|
similar laws prohibiting or restricting circumvention of such
|
||||||
|
measures.
|
||||||
|
|
||||||
|
When you convey a covered work, you waive any legal power to forbid
|
||||||
|
circumvention of technological measures to the extent such circumvention
|
||||||
|
is effected by exercising rights under this License with respect to
|
||||||
|
the covered work, and you disclaim any intention to limit operation or
|
||||||
|
modification of the work as a means of enforcing, against the work's
|
||||||
|
users, your or third parties' legal rights to forbid circumvention of
|
||||||
|
technological measures.
|
||||||
|
|
||||||
|
4. Conveying Verbatim Copies.
|
||||||
|
|
||||||
|
You may convey verbatim copies of the Program's source code as you
|
||||||
|
receive it, in any medium, provided that you conspicuously and
|
||||||
|
appropriately publish on each copy an appropriate copyright notice;
|
||||||
|
keep intact all notices stating that this License and any
|
||||||
|
non-permissive terms added in accord with section 7 apply to the code;
|
||||||
|
keep intact all notices of the absence of any warranty; and give all
|
||||||
|
recipients a copy of this License along with the Program.
|
||||||
|
|
||||||
|
You may charge any price or no price for each copy that you convey,
|
||||||
|
and you may offer support or warranty protection for a fee.
|
||||||
|
|
||||||
|
5. Conveying Modified Source Versions.
|
||||||
|
|
||||||
|
You may convey a work based on the Program, or the modifications to
|
||||||
|
produce it from the Program, in the form of source code under the
|
||||||
|
terms of section 4, provided that you also meet all of these conditions:
|
||||||
|
|
||||||
|
a) The work must carry prominent notices stating that you modified
|
||||||
|
it, and giving a relevant date.
|
||||||
|
|
||||||
|
b) The work must carry prominent notices stating that it is
|
||||||
|
released under this License and any conditions added under section
|
||||||
|
7. This requirement modifies the requirement in section 4 to
|
||||||
|
"keep intact all notices".
|
||||||
|
|
||||||
|
c) You must license the entire work, as a whole, under this
|
||||||
|
License to anyone who comes into possession of a copy. This
|
||||||
|
License will therefore apply, along with any applicable section 7
|
||||||
|
additional terms, to the whole of the work, and all its parts,
|
||||||
|
regardless of how they are packaged. This License gives no
|
||||||
|
permission to license the work in any other way, but it does not
|
||||||
|
invalidate such permission if you have separately received it.
|
||||||
|
|
||||||
|
d) If the work has interactive user interfaces, each must display
|
||||||
|
Appropriate Legal Notices; however, if the Program has interactive
|
||||||
|
interfaces that do not display Appropriate Legal Notices, your
|
||||||
|
work need not make them do so.
|
||||||
|
|
||||||
|
A compilation of a covered work with other separate and independent
|
||||||
|
works, which are not by their nature extensions of the covered work,
|
||||||
|
and which are not combined with it such as to form a larger program,
|
||||||
|
in or on a volume of a storage or distribution medium, is called an
|
||||||
|
"aggregate" if the compilation and its resulting copyright are not
|
||||||
|
used to limit the access or legal rights of the compilation's users
|
||||||
|
beyond what the individual works permit. Inclusion of a covered work
|
||||||
|
in an aggregate does not cause this License to apply to the other
|
||||||
|
parts of the aggregate.
|
||||||
|
|
||||||
|
6. Conveying Non-Source Forms.
|
||||||
|
|
||||||
|
You may convey a covered work in object code form under the terms
|
||||||
|
of sections 4 and 5, provided that you also convey the
|
||||||
|
machine-readable Corresponding Source under the terms of this License,
|
||||||
|
in one of these ways:
|
||||||
|
|
||||||
|
a) Convey the object code in, or embodied in, a physical product
|
||||||
|
(including a physical distribution medium), accompanied by the
|
||||||
|
Corresponding Source fixed on a durable physical medium
|
||||||
|
customarily used for software interchange.
|
||||||
|
|
||||||
|
b) Convey the object code in, or embodied in, a physical product
|
||||||
|
(including a physical distribution medium), accompanied by a
|
||||||
|
written offer, valid for at least three years and valid for as
|
||||||
|
long as you offer spare parts or customer support for that product
|
||||||
|
model, to give anyone who possesses the object code either (1) a
|
||||||
|
copy of the Corresponding Source for all the software in the
|
||||||
|
product that is covered by this License, on a durable physical
|
||||||
|
medium customarily used for software interchange, for a price no
|
||||||
|
more than your reasonable cost of physically performing this
|
||||||
|
conveying of source, or (2) access to copy the
|
||||||
|
Corresponding Source from a network server at no charge.
|
||||||
|
|
||||||
|
c) Convey individual copies of the object code with a copy of the
|
||||||
|
written offer to provide the Corresponding Source. This
|
||||||
|
alternative is allowed only occasionally and noncommercially, and
|
||||||
|
only if you received the object code with such an offer, in accord
|
||||||
|
with subsection 6b.
|
||||||
|
|
||||||
|
d) Convey the object code by offering access from a designated
|
||||||
|
place (gratis or for a charge), and offer equivalent access to the
|
||||||
|
Corresponding Source in the same way through the same place at no
|
||||||
|
further charge. You need not require recipients to copy the
|
||||||
|
Corresponding Source along with the object code. If the place to
|
||||||
|
copy the object code is a network server, the Corresponding Source
|
||||||
|
may be on a different server (operated by you or a third party)
|
||||||
|
that supports equivalent copying facilities, provided you maintain
|
||||||
|
clear directions next to the object code saying where to find the
|
||||||
|
Corresponding Source. Regardless of what server hosts the
|
||||||
|
Corresponding Source, you remain obligated to ensure that it is
|
||||||
|
available for as long as needed to satisfy these requirements.
|
||||||
|
|
||||||
|
e) Convey the object code using peer-to-peer transmission, provided
|
||||||
|
you inform other peers where the object code and Corresponding
|
||||||
|
Source of the work are being offered to the general public at no
|
||||||
|
charge under subsection 6d.
|
||||||
|
|
||||||
|
A separable portion of the object code, whose source code is excluded
|
||||||
|
from the Corresponding Source as a System Library, need not be
|
||||||
|
included in conveying the object code work.
|
||||||
|
|
||||||
|
A "User Product" is either (1) a "consumer product", which means any
|
||||||
|
tangible personal property which is normally used for personal, family,
|
||||||
|
or household purposes, or (2) anything designed or sold for incorporation
|
||||||
|
into a dwelling. In determining whether a product is a consumer product,
|
||||||
|
doubtful cases shall be resolved in favor of coverage. For a particular
|
||||||
|
product received by a particular user, "normally used" refers to a
|
||||||
|
typical or common use of that class of product, regardless of the status
|
||||||
|
of the particular user or of the way in which the particular user
|
||||||
|
actually uses, or expects or is expected to use, the product. A product
|
||||||
|
is a consumer product regardless of whether the product has substantial
|
||||||
|
commercial, industrial or non-consumer uses, unless such uses represent
|
||||||
|
the only significant mode of use of the product.
|
||||||
|
|
||||||
|
"Installation Information" for a User Product means any methods,
|
||||||
|
procedures, authorization keys, or other information required to install
|
||||||
|
and execute modified versions of a covered work in that User Product from
|
||||||
|
a modified version of its Corresponding Source. The information must
|
||||||
|
suffice to ensure that the continued functioning of the modified object
|
||||||
|
code is in no case prevented or interfered with solely because
|
||||||
|
modification has been made.
|
||||||
|
|
||||||
|
If you convey an object code work under this section in, or with, or
|
||||||
|
specifically for use in, a User Product, and the conveying occurs as
|
||||||
|
part of a transaction in which the right of possession and use of the
|
||||||
|
User Product is transferred to the recipient in perpetuity or for a
|
||||||
|
fixed term (regardless of how the transaction is characterized), the
|
||||||
|
Corresponding Source conveyed under this section must be accompanied
|
||||||
|
by the Installation Information. But this requirement does not apply
|
||||||
|
if neither you nor any third party retains the ability to install
|
||||||
|
modified object code on the User Product (for example, the work has
|
||||||
|
been installed in ROM).
|
||||||
|
|
||||||
|
The requirement to provide Installation Information does not include a
|
||||||
|
requirement to continue to provide support service, warranty, or updates
|
||||||
|
for a work that has been modified or installed by the recipient, or for
|
||||||
|
the User Product in which it has been modified or installed. Access to a
|
||||||
|
network may be denied when the modification itself materially and
|
||||||
|
adversely affects the operation of the network or violates the rules and
|
||||||
|
protocols for communication across the network.
|
||||||
|
|
||||||
|
Corresponding Source conveyed, and Installation Information provided,
|
||||||
|
in accord with this section must be in a format that is publicly
|
||||||
|
documented (and with an implementation available to the public in
|
||||||
|
source code form), and must require no special password or key for
|
||||||
|
unpacking, reading or copying.
|
||||||
|
|
||||||
|
7. Additional Terms.
|
||||||
|
|
||||||
|
"Additional permissions" are terms that supplement the terms of this
|
||||||
|
License by making exceptions from one or more of its conditions.
|
||||||
|
Additional permissions that are applicable to the entire Program shall
|
||||||
|
be treated as though they were included in this License, to the extent
|
||||||
|
that they are valid under applicable law. If additional permissions
|
||||||
|
apply only to part of the Program, that part may be used separately
|
||||||
|
under those permissions, but the entire Program remains governed by
|
||||||
|
this License without regard to the additional permissions.
|
||||||
|
|
||||||
|
When you convey a copy of a covered work, you may at your option
|
||||||
|
remove any additional permissions from that copy, or from any part of
|
||||||
|
it. (Additional permissions may be written to require their own
|
||||||
|
removal in certain cases when you modify the work.) You may place
|
||||||
|
additional permissions on material, added by you to a covered work,
|
||||||
|
for which you have or can give appropriate copyright permission.
|
||||||
|
|
||||||
|
Notwithstanding any other provision of this License, for material you
|
||||||
|
add to a covered work, you may (if authorized by the copyright holders of
|
||||||
|
that material) supplement the terms of this License with terms:
|
||||||
|
|
||||||
|
a) Disclaiming warranty or limiting liability differently from the
|
||||||
|
terms of sections 15 and 16 of this License; or
|
||||||
|
|
||||||
|
b) Requiring preservation of specified reasonable legal notices or
|
||||||
|
author attributions in that material or in the Appropriate Legal
|
||||||
|
Notices displayed by works containing it; or
|
||||||
|
|
||||||
|
c) Prohibiting misrepresentation of the origin of that material, or
|
||||||
|
requiring that modified versions of such material be marked in
|
||||||
|
reasonable ways as different from the original version; or
|
||||||
|
|
||||||
|
d) Limiting the use for publicity purposes of names of licensors or
|
||||||
|
authors of the material; or
|
||||||
|
|
||||||
|
e) Declining to grant rights under trademark law for use of some
|
||||||
|
trade names, trademarks, or service marks; or
|
||||||
|
|
||||||
|
f) Requiring indemnification of licensors and authors of that
|
||||||
|
material by anyone who conveys the material (or modified versions of
|
||||||
|
it) with contractual assumptions of liability to the recipient, for
|
||||||
|
any liability that these contractual assumptions directly impose on
|
||||||
|
those licensors and authors.
|
||||||
|
|
||||||
|
All other non-permissive additional terms are considered "further
|
||||||
|
restrictions" within the meaning of section 10. If the Program as you
|
||||||
|
received it, or any part of it, contains a notice stating that it is
|
||||||
|
governed by this License along with a term that is a further
|
||||||
|
restriction, you may remove that term. If a license document contains
|
||||||
|
a further restriction but permits relicensing or conveying under this
|
||||||
|
License, you may add to a covered work material governed by the terms
|
||||||
|
of that license document, provided that the further restriction does
|
||||||
|
not survive such relicensing or conveying.
|
||||||
|
|
||||||
|
If you add terms to a covered work in accord with this section, you
|
||||||
|
must place, in the relevant source files, a statement of the
|
||||||
|
additional terms that apply to those files, or a notice indicating
|
||||||
|
where to find the applicable terms.
|
||||||
|
|
||||||
|
Additional terms, permissive or non-permissive, may be stated in the
|
||||||
|
form of a separately written license, or stated as exceptions;
|
||||||
|
the above requirements apply either way.
|
||||||
|
|
||||||
|
8. Termination.
|
||||||
|
|
||||||
|
You may not propagate or modify a covered work except as expressly
|
||||||
|
provided under this License. Any attempt otherwise to propagate or
|
||||||
|
modify it is void, and will automatically terminate your rights under
|
||||||
|
this License (including any patent licenses granted under the third
|
||||||
|
paragraph of section 11).
|
||||||
|
|
||||||
|
However, if you cease all violation of this License, then your
|
||||||
|
license from a particular copyright holder is reinstated (a)
|
||||||
|
provisionally, unless and until the copyright holder explicitly and
|
||||||
|
finally terminates your license, and (b) permanently, if the copyright
|
||||||
|
holder fails to notify you of the violation by some reasonable means
|
||||||
|
prior to 60 days after the cessation.
|
||||||
|
|
||||||
|
Moreover, your license from a particular copyright holder is
|
||||||
|
reinstated permanently if the copyright holder notifies you of the
|
||||||
|
violation by some reasonable means, this is the first time you have
|
||||||
|
received notice of violation of this License (for any work) from that
|
||||||
|
copyright holder, and you cure the violation prior to 30 days after
|
||||||
|
your receipt of the notice.
|
||||||
|
|
||||||
|
Termination of your rights under this section does not terminate the
|
||||||
|
licenses of parties who have received copies or rights from you under
|
||||||
|
this License. If your rights have been terminated and not permanently
|
||||||
|
reinstated, you do not qualify to receive new licenses for the same
|
||||||
|
material under section 10.
|
||||||
|
|
||||||
|
9. Acceptance Not Required for Having Copies.
|
||||||
|
|
||||||
|
You are not required to accept this License in order to receive or
|
||||||
|
run a copy of the Program. Ancillary propagation of a covered work
|
||||||
|
occurring solely as a consequence of using peer-to-peer transmission
|
||||||
|
to receive a copy likewise does not require acceptance. However,
|
||||||
|
nothing other than this License grants you permission to propagate or
|
||||||
|
modify any covered work. These actions infringe copyright if you do
|
||||||
|
not accept this License. Therefore, by modifying or propagating a
|
||||||
|
covered work, you indicate your acceptance of this License to do so.
|
||||||
|
|
||||||
|
10. Automatic Licensing of Downstream Recipients.
|
||||||
|
|
||||||
|
Each time you convey a covered work, the recipient automatically
|
||||||
|
receives a license from the original licensors, to run, modify and
|
||||||
|
propagate that work, subject to this License. You are not responsible
|
||||||
|
for enforcing compliance by third parties with this License.
|
||||||
|
|
||||||
|
An "entity transaction" is a transaction transferring control of an
|
||||||
|
organization, or substantially all assets of one, or subdividing an
|
||||||
|
organization, or merging organizations. If propagation of a covered
|
||||||
|
work results from an entity transaction, each party to that
|
||||||
|
transaction who receives a copy of the work also receives whatever
|
||||||
|
licenses to the work the party's predecessor in interest had or could
|
||||||
|
give under the previous paragraph, plus a right to possession of the
|
||||||
|
Corresponding Source of the work from the predecessor in interest, if
|
||||||
|
the predecessor has it or can get it with reasonable efforts.
|
||||||
|
|
||||||
|
You may not impose any further restrictions on the exercise of the
|
||||||
|
rights granted or affirmed under this License. For example, you may
|
||||||
|
not impose a license fee, royalty, or other charge for exercise of
|
||||||
|
rights granted under this License, and you may not initiate litigation
|
||||||
|
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
||||||
|
any patent claim is infringed by making, using, selling, offering for
|
||||||
|
sale, or importing the Program or any portion of it.
|
||||||
|
|
||||||
|
11. Patents.
|
||||||
|
|
||||||
|
A "contributor" is a copyright holder who authorizes use under this
|
||||||
|
License of the Program or a work on which the Program is based. The
|
||||||
|
work thus licensed is called the contributor's "contributor version".
|
||||||
|
|
||||||
|
A contributor's "essential patent claims" are all patent claims
|
||||||
|
owned or controlled by the contributor, whether already acquired or
|
||||||
|
hereafter acquired, that would be infringed by some manner, permitted
|
||||||
|
by this License, of making, using, or selling its contributor version,
|
||||||
|
but do not include claims that would be infringed only as a
|
||||||
|
consequence of further modification of the contributor version. For
|
||||||
|
purposes of this definition, "control" includes the right to grant
|
||||||
|
patent sublicenses in a manner consistent with the requirements of
|
||||||
|
this License.
|
||||||
|
|
||||||
|
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
||||||
|
patent license under the contributor's essential patent claims, to
|
||||||
|
make, use, sell, offer for sale, import and otherwise run, modify and
|
||||||
|
propagate the contents of its contributor version.
|
||||||
|
|
||||||
|
In the following three paragraphs, a "patent license" is any express
|
||||||
|
agreement or commitment, however denominated, not to enforce a patent
|
||||||
|
(such as an express permission to practice a patent or covenant not to
|
||||||
|
sue for patent infringement). To "grant" such a patent license to a
|
||||||
|
party means to make such an agreement or commitment not to enforce a
|
||||||
|
patent against the party.
|
||||||
|
|
||||||
|
If you convey a covered work, knowingly relying on a patent license,
|
||||||
|
and the Corresponding Source of the work is not available for anyone
|
||||||
|
to copy, free of charge and under the terms of this License, through a
|
||||||
|
publicly available network server or other readily accessible means,
|
||||||
|
then you must either (1) cause the Corresponding Source to be so
|
||||||
|
available, or (2) arrange to deprive yourself of the benefit of the
|
||||||
|
patent license for this particular work, or (3) arrange, in a manner
|
||||||
|
consistent with the requirements of this License, to extend the patent
|
||||||
|
license to downstream recipients. "Knowingly relying" means you have
|
||||||
|
actual knowledge that, but for the patent license, your conveying the
|
||||||
|
covered work in a country, or your recipient's use of the covered work
|
||||||
|
in a country, would infringe one or more identifiable patents in that
|
||||||
|
country that you have reason to believe are valid.
|
||||||
|
|
||||||
|
If, pursuant to or in connection with a single transaction or
|
||||||
|
arrangement, you convey, or propagate by procuring conveyance of, a
|
||||||
|
covered work, and grant a patent license to some of the parties
|
||||||
|
receiving the covered work authorizing them to use, propagate, modify
|
||||||
|
or convey a specific copy of the covered work, then the patent license
|
||||||
|
you grant is automatically extended to all recipients of the covered
|
||||||
|
work and works based on it.
|
||||||
|
|
||||||
|
A patent license is "discriminatory" if it does not include within
|
||||||
|
the scope of its coverage, prohibits the exercise of, or is
|
||||||
|
conditioned on the non-exercise of one or more of the rights that are
|
||||||
|
specifically granted under this License. You may not convey a covered
|
||||||
|
work if you are a party to an arrangement with a third party that is
|
||||||
|
in the business of distributing software, under which you make payment
|
||||||
|
to the third party based on the extent of your activity of conveying
|
||||||
|
the work, and under which the third party grants, to any of the
|
||||||
|
parties who would receive the covered work from you, a discriminatory
|
||||||
|
patent license (a) in connection with copies of the covered work
|
||||||
|
conveyed by you (or copies made from those copies), or (b) primarily
|
||||||
|
for and in connection with specific products or compilations that
|
||||||
|
contain the covered work, unless you entered into that arrangement,
|
||||||
|
or that patent license was granted, prior to 28 March 2007.
|
||||||
|
|
||||||
|
Nothing in this License shall be construed as excluding or limiting
|
||||||
|
any implied license or other defenses to infringement that may
|
||||||
|
otherwise be available to you under applicable patent law.
|
||||||
|
|
||||||
|
12. No Surrender of Others' Freedom.
|
||||||
|
|
||||||
|
If conditions are imposed on you (whether by court order, agreement or
|
||||||
|
otherwise) that contradict the conditions of this License, they do not
|
||||||
|
excuse you from the conditions of this License. If you cannot convey a
|
||||||
|
covered work so as to satisfy simultaneously your obligations under this
|
||||||
|
License and any other pertinent obligations, then as a consequence you may
|
||||||
|
not convey it at all. For example, if you agree to terms that obligate you
|
||||||
|
to collect a royalty for further conveying from those to whom you convey
|
||||||
|
the Program, the only way you could satisfy both those terms and this
|
||||||
|
License would be to refrain entirely from conveying the Program.
|
||||||
|
|
||||||
|
13. Remote Network Interaction; Use with the GNU General Public License.
|
||||||
|
|
||||||
|
Notwithstanding any other provision of this License, if you modify the
|
||||||
|
Program, your modified version must prominently offer all users
|
||||||
|
interacting with it remotely through a computer network (if your version
|
||||||
|
supports such interaction) an opportunity to receive the Corresponding
|
||||||
|
Source of your version by providing access to the Corresponding Source
|
||||||
|
from a network server at no charge, through some standard or customary
|
||||||
|
means of facilitating copying of software. This Corresponding Source
|
||||||
|
shall include the Corresponding Source for any work covered by version 3
|
||||||
|
of the GNU General Public License that is incorporated pursuant to the
|
||||||
|
following paragraph.
|
||||||
|
|
||||||
|
Notwithstanding any other provision of this License, you have
|
||||||
|
permission to link or combine any covered work with a work licensed
|
||||||
|
under version 3 of the GNU General Public License into a single
|
||||||
|
combined work, and to convey the resulting work. The terms of this
|
||||||
|
License will continue to apply to the part which is the covered work,
|
||||||
|
but the work with which it is combined will remain governed by version
|
||||||
|
3 of the GNU General Public License.
|
||||||
|
|
||||||
|
14. Revised Versions of this License.
|
||||||
|
|
||||||
|
The Free Software Foundation may publish revised and/or new versions of
|
||||||
|
the GNU Affero General Public License from time to time. Such new versions
|
||||||
|
will be similar in spirit to the present version, but may differ in detail to
|
||||||
|
address new problems or concerns.
|
||||||
|
|
||||||
|
Each version is given a distinguishing version number. If the
|
||||||
|
Program specifies that a certain numbered version of the GNU Affero General
|
||||||
|
Public License "or any later version" applies to it, you have the
|
||||||
|
option of following the terms and conditions either of that numbered
|
||||||
|
version or of any later version published by the Free Software
|
||||||
|
Foundation. If the Program does not specify a version number of the
|
||||||
|
GNU Affero General Public License, you may choose any version ever published
|
||||||
|
by the Free Software Foundation.
|
||||||
|
|
||||||
|
If the Program specifies that a proxy can decide which future
|
||||||
|
versions of the GNU Affero General Public License can be used, that proxy's
|
||||||
|
public statement of acceptance of a version permanently authorizes you
|
||||||
|
to choose that version for the Program.
|
||||||
|
|
||||||
|
Later license versions may give you additional or different
|
||||||
|
permissions. However, no additional obligations are imposed on any
|
||||||
|
author or copyright holder as a result of your choosing to follow a
|
||||||
|
later version.
|
||||||
|
|
||||||
|
15. Disclaimer of Warranty.
|
||||||
|
|
||||||
|
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
||||||
|
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
||||||
|
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
||||||
|
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
||||||
|
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||||
|
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
||||||
|
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
||||||
|
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||||
|
|
||||||
|
16. Limitation of Liability.
|
||||||
|
|
||||||
|
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||||
|
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
||||||
|
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
||||||
|
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
||||||
|
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
||||||
|
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
||||||
|
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
||||||
|
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
||||||
|
SUCH DAMAGES.
|
||||||
|
|
||||||
|
17. Interpretation of Sections 15 and 16.
|
||||||
|
|
||||||
|
If the disclaimer of warranty and limitation of liability provided
|
||||||
|
above cannot be given local legal effect according to their terms,
|
||||||
|
reviewing courts shall apply local law that most closely approximates
|
||||||
|
an absolute waiver of all civil liability in connection with the
|
||||||
|
Program, unless a warranty or assumption of liability accompanies a
|
||||||
|
copy of the Program in return for a fee.
|
||||||
|
|
||||||
|
END OF TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
How to Apply These Terms to Your New Programs
|
||||||
|
|
||||||
|
If you develop a new program, and you want it to be of the greatest
|
||||||
|
possible use to the public, the best way to achieve this is to make it
|
||||||
|
free software which everyone can redistribute and change under these terms.
|
||||||
|
|
||||||
|
To do so, attach the following notices to the program. It is safest
|
||||||
|
to attach them to the start of each source file to most effectively
|
||||||
|
state the exclusion of warranty; and each file should have at least
|
||||||
|
the "copyright" line and a pointer to where the full notice is found.
|
||||||
|
|
||||||
|
<one line to give the program's name and a brief idea of what it does.>
|
||||||
|
Copyright (C) <year> <name of author>
|
||||||
|
|
||||||
|
This program is free software: you can redistribute it and/or modify
|
||||||
|
it under the terms of the GNU Affero General Public License as published by
|
||||||
|
the Free Software Foundation, either version 3 of the License, or
|
||||||
|
(at your option) any later version.
|
||||||
|
|
||||||
|
This program is distributed in the hope that it will be useful,
|
||||||
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
You should have received a copy of the GNU Affero General Public License
|
||||||
|
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
Also add information on how to contact you by electronic and paper mail.
|
||||||
|
|
||||||
|
If your software can interact with users remotely through a computer
|
||||||
|
network, you should also make sure that it provides a way for users to
|
||||||
|
get its source. For example, if your program is a web application, its
|
||||||
|
interface could display a "Source" link that leads users to an archive
|
||||||
|
of the code. There are many ways you could offer source, and different
|
||||||
|
solutions will be better for different programs; see section 13 for the
|
||||||
|
specific requirements.
|
||||||
|
|
||||||
|
You should also get your employer (if you work as a programmer) or school,
|
||||||
|
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
||||||
|
For more information on this, and how to apply and follow the GNU AGPL, see
|
||||||
|
<https://www.gnu.org/licenses/>.
|
47
README.md
47
README.md
|
@ -1,14 +1,25 @@
|
||||||
<h1 align="center"> HomeBox </h1>
|
<div align="center">
|
||||||
<p align="center" style="width: 100%">
|
<svg width="200" align="center" viewBox="0 0 10817 9730" xmlns="http://www.w3.org/2000/svg" xml:space="preserve" style="fill-rule:evenodd;clip-rule:evenodd;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:5.42683">
|
||||||
<a href="https://hay-kot.github.io/homebox/">Docs</a>
|
<path d="M9310.16 2560.9c245.302 249.894 419.711 539.916 565.373 845.231 47.039 98.872 36.229 215.514-28.2 304.05-64.391 88.536-172.099 134.676-280.631 120.28 0 .053-.039.053-.039.053" style="fill:gray;stroke:#000;stroke-width:206.41px"/>
|
||||||
|
|
<path d="M5401.56 487.044c-127.958 6.227-254.855 40.77-370.992 103.628-765.271 414.225-2397.45 1297.68-3193.03 1728.32-137.966 74.669-250.327 183.605-328.791 313.046l3963.09 2122.43s-249.048 416.428-470.593 786.926c-189.24 316.445-592.833 429.831-919.198 258.219l-2699.36-1419.32v2215.59c0 226.273 128.751 435.33 337.755 548.466 764.649 413.885 2620.97 1418.66 3385.59 1832.51 209.018 113.137 466.496 113.137 675.514 0 764.623-413.857 2620.94-1418.63 3385.59-1832.51 208.989-113.136 337.743-322.193 337.743-548.466v-3513.48c0-318.684-174.59-611.722-454.853-763.409-795.543-430.632-2427.75-1314.09-3193.02-1728.32-141.693-76.684-299.364-111.227-455.442-103.628" style="fill:#dadada;stroke:#000;stroke-width:206.42px"/>
|
||||||
<a href="https://homebox.fly.dev">Demo</a>
|
<path d="M5471.83 4754.46V504.71c-127.958 6.226-325.127 23.1-441.264 85.958-765.271 414.225-2397.45 1297.68-3193.03 1728.32-137.966 74.669-250.327 183.605-328.791 313.046l3963.09 2122.43Z" style="fill:gray;stroke:#000;stroke-width:206.42px"/>
|
||||||
|
|
<path d="m1459.34 2725.96-373.791 715.667c-177.166 339.292-46.417 758 292.375 936.167l4.75 2.5m0 0 2699.37 1419.29c326.374 171.625 729.916 58.25 919.165-258.208 221.542-370.5 470.583-786.917 470.583-786.917l-3963.04-2122.42-2.167 3.458-47.25 90.458" style="fill:#dadada;stroke:#000;stroke-width:206.42px"/>
|
||||||
<a href="https://discord.gg/tuncmNrE4z">Discord</a>
|
<path d="M5443.74 520.879v4149.79" style="fill:none;stroke:#000;stroke-width:153.5px"/>
|
||||||
|
<path d="M8951.41 4102.72c0-41.65-22.221-80.136-58.291-100.961-36.069-20.825-80.51-20.825-116.58 0l-2439.92 1408.69c-36.07 20.825-58.29 59.311-58.29 100.961V7058c0 41.65 22.22 80.136 58.29 100.961 36.07 20.825 80.51 20.825 116.58 0l2439.92-1408.69c36.07-20.825 58.291-59.312 58.291-100.962v-1546.59Z" style="fill:#567f67"/>
|
||||||
|
<path d="M8951.41 4102.72c0-41.65-22.221-80.136-58.291-100.961-36.069-20.825-80.51-20.825-116.58 0l-2439.92 1408.69c-36.07 20.825-58.29 59.311-58.29 100.961V7058c0 41.65 22.22 80.136 58.29 100.961 36.07 20.825 80.51 20.825 116.58 0l2439.92-1408.69c36.07-20.825 58.291-59.312 58.291-100.962v-1546.59ZM6463.98 5551.29v1387.06l2301.77-1328.92V4222.37L6463.98 5551.29Z"/>
|
||||||
|
<path d="M5443.76 9041.74v-4278.4" style="fill:none;stroke:#000;stroke-width:206.44px;stroke-linejoin:miter"/>
|
||||||
|
<path d="m5471.79 4773.86 3829.35-2188.22" style="fill:none;stroke:#000;stroke-width:206.43px;stroke-linejoin:miter"/>
|
||||||
|
</svg>
|
||||||
|
</div>
|
||||||
|
<h1 align="center" style="margin-top: -10px"> HomeBox </h1>
|
||||||
|
<p align="center" style="width: 100;">
|
||||||
|
<a href="https://hay-kot.github.io/homebox/">Docs</a>
|
||||||
|
|
|
||||||
|
<a href="https://homebox.fly.dev">Demo</a>
|
||||||
|
|
|
||||||
|
<a href="https://discord.gg/tuncmNrE4z">Discord</a>
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## MVP Todo
|
## MVP Todo
|
||||||
|
|
||||||
- [x] Locations
|
- [x] Locations
|
||||||
|
@ -24,8 +35,9 @@
|
||||||
- [ ] Update
|
- [ ] Update
|
||||||
- [x] Delete
|
- [x] Delete
|
||||||
- [ ] Asset Attachments for Items
|
- [ ] Asset Attachments for Items
|
||||||
- [ ] Fields To Add
|
- [x] Fields To Add
|
||||||
- [ ] Quantity
|
- [x] Quantity
|
||||||
|
- [x] Insured (bool)
|
||||||
- [ ] Bulk Import via CSV
|
- [ ] Bulk Import via CSV
|
||||||
- [x] Initial
|
- [x] Initial
|
||||||
- [ ] Add Warranty Columns
|
- [ ] Add Warranty Columns
|
||||||
|
@ -41,16 +53,18 @@
|
||||||
- [ ] Db Migrations
|
- [ ] Db Migrations
|
||||||
- [ ] How To
|
- [ ] How To
|
||||||
- [ ] Repo House Keeping
|
- [ ] Repo House Keeping
|
||||||
|
- [x] Add License
|
||||||
- [ ] Issues Template
|
- [ ] Issues Template
|
||||||
- [ ] PR Templates
|
- [x] PR Templates
|
||||||
- [ ] Contributors Guide
|
- [ ] Contributors Guide
|
||||||
- [ ] Security Policy
|
- [x] Security Policy
|
||||||
- [ ] Feature Request Template
|
- [ ] Feature Request Template
|
||||||
- [ ] Embedded Version Info
|
- [ ] Embedded Version Info
|
||||||
- [ ] Version Number
|
- [ ] Version Number
|
||||||
- [ ] Git Has
|
- [ ] Git Has
|
||||||
- [ ] Setup Docker Volumes in Dockerfile
|
- [ ] Setup Docker Volumes in Dockerfile
|
||||||
## All Todos
|
|
||||||
|
## All Todo's
|
||||||
|
|
||||||
- [ ] User Invitation Links to Join Group
|
- [ ] User Invitation Links to Join Group
|
||||||
- [ ] Maintenance Logs
|
- [ ] Maintenance Logs
|
||||||
|
@ -71,4 +85,7 @@
|
||||||
- [x] Warranty Information
|
- [x] Warranty Information
|
||||||
- [x] Option for Lifetime Warranty or Warranty Period
|
- [x] Option for Lifetime Warranty or Warranty Period
|
||||||
- [ ] Expose Swagger API Documentation
|
- [ ] Expose Swagger API Documentation
|
||||||
- [ ] Dynamic Port / Host Settings
|
- [ ] Dynamic Port / Host Settings
|
||||||
|
|
||||||
|
## Credits
|
||||||
|
- Logo by [@lakotelman](https://github.com/lakotelman)
|
||||||
|
|
9
SECURITY.md
Normal file
9
SECURITY.md
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
# Security Policy
|
||||||
|
|
||||||
|
## Supported Versions
|
||||||
|
|
||||||
|
Since this software is still considered beta/WIP support is always only given for the latest version.
|
||||||
|
|
||||||
|
## Reporting a Vulnerability
|
||||||
|
|
||||||
|
Please open a normal public issue if you have any security related concerns.
|
43
Taskfile.yml
43
Taskfile.yml
|
@ -1,20 +1,27 @@
|
||||||
version: "3"
|
version: "3"
|
||||||
|
|
||||||
tasks:
|
tasks:
|
||||||
|
generate:
|
||||||
|
cmds:
|
||||||
|
- |
|
||||||
|
cd backend && ent generate ./ent/schema \
|
||||||
|
--template=ent/schema/templates/has_id.tmpl
|
||||||
|
- cd backend/app/api/ && swag fmt
|
||||||
|
- cd backend/app/api/ && swag init --dir=./,../../internal,../../pkgs
|
||||||
|
- |
|
||||||
|
npx swagger-typescript-api \
|
||||||
|
--no-client \
|
||||||
|
--clean-output \
|
||||||
|
--modular \
|
||||||
|
--path ./backend/app/api/docs/swagger.json \
|
||||||
|
--output ./frontend/lib/api/types
|
||||||
|
|
||||||
|
python3 ./scripts/process-types.py ./frontend/lib/api/types/data-contracts.ts
|
||||||
api:
|
api:
|
||||||
cmds:
|
cmds:
|
||||||
- cd backend/app/api/ && swag fmt
|
- task: generate
|
||||||
- cd backend/app/api/ && swag init --dir=./,../../internal,../../pkgs,../../ent
|
|
||||||
# - |
|
|
||||||
# npx swagger-typescript-api \
|
|
||||||
# --path ./backend/app/api/docs/swagger.json \
|
|
||||||
# --output ./client/auto-client \
|
|
||||||
# --module-name-first-tag \
|
|
||||||
# --modular
|
|
||||||
- cd backend && go run ./app/api/ {{.CLI_ARGS}}
|
- cd backend && go run ./app/api/ {{.CLI_ARGS}}
|
||||||
silent: false
|
silent: false
|
||||||
sources:
|
|
||||||
- ./backend/**/*.go
|
|
||||||
|
|
||||||
api:build:
|
api:build:
|
||||||
cmds:
|
cmds:
|
||||||
|
@ -26,6 +33,10 @@ tasks:
|
||||||
- cd backend && go test ./app/api/
|
- cd backend && go test ./app/api/
|
||||||
silent: true
|
silent: true
|
||||||
|
|
||||||
|
api:watch:
|
||||||
|
cmds:
|
||||||
|
- cd backend && gotestsum --watch ./...
|
||||||
|
|
||||||
api:coverage:
|
api:coverage:
|
||||||
cmds:
|
cmds:
|
||||||
- cd backend && go test -race -coverprofile=coverage.out -covermode=atomic ./app/... ./internal/... ./pkgs/... -v -cover
|
- cd backend && go test -race -coverprofile=coverage.out -covermode=atomic ./app/... ./internal/... ./pkgs/... -v -cover
|
||||||
|
@ -39,12 +50,12 @@ tasks:
|
||||||
- cd frontend && pnpm run test:ci
|
- cd frontend && pnpm run test:ci
|
||||||
silent: true
|
silent: true
|
||||||
|
|
||||||
docker:build:
|
frontend:watch:
|
||||||
|
desc: Starts the vitest test runner in watch mode
|
||||||
cmds:
|
cmds:
|
||||||
- cd backend && docker-compose up --build
|
- cd frontend && pnpm vitest --watch
|
||||||
silent: true
|
|
||||||
|
|
||||||
generate:types:
|
frontend:
|
||||||
|
desc: Run frontend development server
|
||||||
cmds:
|
cmds:
|
||||||
- cd backend && go run ./app/generator
|
- cd frontend && pnpm dev
|
||||||
silent: true
|
|
||||||
|
|
|
@ -49,7 +49,7 @@ const docTemplate = `{
|
||||||
"items": {
|
"items": {
|
||||||
"type": "array",
|
"type": "array",
|
||||||
"items": {
|
"items": {
|
||||||
"$ref": "#/definitions/types.ItemOut"
|
"$ref": "#/definitions/types.ItemSummary"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -175,6 +175,15 @@ const docTemplate = `{
|
||||||
"name": "id",
|
"name": "id",
|
||||||
"in": "path",
|
"in": "path",
|
||||||
"required": true
|
"required": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"description": "Item Data",
|
||||||
|
"name": "payload",
|
||||||
|
"in": "body",
|
||||||
|
"required": true,
|
||||||
|
"schema": {
|
||||||
|
"$ref": "#/definitions/types.ItemUpdate"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"responses": {
|
"responses": {
|
||||||
|
@ -691,7 +700,7 @@ const docTemplate = `{
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
"item": {
|
"item": {
|
||||||
"$ref": "#/definitions/ent.User"
|
"$ref": "#/definitions/types.UserOut"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -788,422 +797,6 @@ const docTemplate = `{
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"definitions": {
|
"definitions": {
|
||||||
"ent.AuthTokens": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"created_at": {
|
|
||||||
"description": "CreatedAt holds the value of the \"created_at\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"edges": {
|
|
||||||
"description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the AuthTokensQuery when eager-loading is set.",
|
|
||||||
"$ref": "#/definitions/ent.AuthTokensEdges"
|
|
||||||
},
|
|
||||||
"expires_at": {
|
|
||||||
"description": "ExpiresAt holds the value of the \"expires_at\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"id": {
|
|
||||||
"description": "ID of the ent.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"token": {
|
|
||||||
"description": "Token holds the value of the \"token\" field.",
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "integer"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"updated_at": {
|
|
||||||
"description": "UpdatedAt holds the value of the \"updated_at\" field.",
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"ent.AuthTokensEdges": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"user": {
|
|
||||||
"description": "User holds the value of the user edge.",
|
|
||||||
"$ref": "#/definitions/ent.User"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"ent.Group": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"created_at": {
|
|
||||||
"description": "CreatedAt holds the value of the \"created_at\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"currency": {
|
|
||||||
"description": "Currency holds the value of the \"currency\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"edges": {
|
|
||||||
"description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the GroupQuery when eager-loading is set.",
|
|
||||||
"$ref": "#/definitions/ent.GroupEdges"
|
|
||||||
},
|
|
||||||
"id": {
|
|
||||||
"description": "ID of the ent.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"name": {
|
|
||||||
"description": "Name holds the value of the \"name\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"updated_at": {
|
|
||||||
"description": "UpdatedAt holds the value of the \"updated_at\" field.",
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"ent.GroupEdges": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"items": {
|
|
||||||
"description": "Items holds the value of the items edge.",
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"$ref": "#/definitions/ent.Item"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"labels": {
|
|
||||||
"description": "Labels holds the value of the labels edge.",
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"$ref": "#/definitions/ent.Label"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"locations": {
|
|
||||||
"description": "Locations holds the value of the locations edge.",
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"$ref": "#/definitions/ent.Location"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"users": {
|
|
||||||
"description": "Users holds the value of the users edge.",
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"$ref": "#/definitions/ent.User"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"ent.Item": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"created_at": {
|
|
||||||
"description": "CreatedAt holds the value of the \"created_at\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"description": {
|
|
||||||
"description": "Description holds the value of the \"description\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"edges": {
|
|
||||||
"description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the ItemQuery when eager-loading is set.",
|
|
||||||
"$ref": "#/definitions/ent.ItemEdges"
|
|
||||||
},
|
|
||||||
"id": {
|
|
||||||
"description": "ID of the ent.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"lifetime_warranty": {
|
|
||||||
"description": "LifetimeWarranty holds the value of the \"lifetime_warranty\" field.",
|
|
||||||
"type": "boolean"
|
|
||||||
},
|
|
||||||
"manufacturer": {
|
|
||||||
"description": "Manufacturer holds the value of the \"manufacturer\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"model_number": {
|
|
||||||
"description": "ModelNumber holds the value of the \"model_number\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"name": {
|
|
||||||
"description": "Name holds the value of the \"name\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"notes": {
|
|
||||||
"description": "Notes holds the value of the \"notes\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"purchase_from": {
|
|
||||||
"description": "PurchaseFrom holds the value of the \"purchase_from\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"purchase_price": {
|
|
||||||
"description": "PurchasePrice holds the value of the \"purchase_price\" field.",
|
|
||||||
"type": "number"
|
|
||||||
},
|
|
||||||
"purchase_time": {
|
|
||||||
"description": "PurchaseTime holds the value of the \"purchase_time\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"serial_number": {
|
|
||||||
"description": "SerialNumber holds the value of the \"serial_number\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"sold_notes": {
|
|
||||||
"description": "SoldNotes holds the value of the \"sold_notes\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"sold_price": {
|
|
||||||
"description": "SoldPrice holds the value of the \"sold_price\" field.",
|
|
||||||
"type": "number"
|
|
||||||
},
|
|
||||||
"sold_time": {
|
|
||||||
"description": "SoldTime holds the value of the \"sold_time\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"sold_to": {
|
|
||||||
"description": "SoldTo holds the value of the \"sold_to\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"updated_at": {
|
|
||||||
"description": "UpdatedAt holds the value of the \"updated_at\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"warranty_details": {
|
|
||||||
"description": "WarrantyDetails holds the value of the \"warranty_details\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"warranty_expires": {
|
|
||||||
"description": "WarrantyExpires holds the value of the \"warranty_expires\" field.",
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"ent.ItemEdges": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"fields": {
|
|
||||||
"description": "Fields holds the value of the fields edge.",
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"$ref": "#/definitions/ent.ItemField"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"group": {
|
|
||||||
"description": "Group holds the value of the group edge.",
|
|
||||||
"$ref": "#/definitions/ent.Group"
|
|
||||||
},
|
|
||||||
"label": {
|
|
||||||
"description": "Label holds the value of the label edge.",
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"$ref": "#/definitions/ent.Label"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"location": {
|
|
||||||
"description": "Location holds the value of the location edge.",
|
|
||||||
"$ref": "#/definitions/ent.Location"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"ent.ItemField": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"boolean_value": {
|
|
||||||
"description": "BooleanValue holds the value of the \"boolean_value\" field.",
|
|
||||||
"type": "boolean"
|
|
||||||
},
|
|
||||||
"created_at": {
|
|
||||||
"description": "CreatedAt holds the value of the \"created_at\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"description": {
|
|
||||||
"description": "Description holds the value of the \"description\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"edges": {
|
|
||||||
"description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the ItemFieldQuery when eager-loading is set.",
|
|
||||||
"$ref": "#/definitions/ent.ItemFieldEdges"
|
|
||||||
},
|
|
||||||
"id": {
|
|
||||||
"description": "ID of the ent.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"name": {
|
|
||||||
"description": "Name holds the value of the \"name\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"number_value": {
|
|
||||||
"description": "NumberValue holds the value of the \"number_value\" field.",
|
|
||||||
"type": "integer"
|
|
||||||
},
|
|
||||||
"text_value": {
|
|
||||||
"description": "TextValue holds the value of the \"text_value\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"time_value": {
|
|
||||||
"description": "TimeValue holds the value of the \"time_value\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"type": {
|
|
||||||
"description": "Type holds the value of the \"type\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"updated_at": {
|
|
||||||
"description": "UpdatedAt holds the value of the \"updated_at\" field.",
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"ent.ItemFieldEdges": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"item": {
|
|
||||||
"description": "Item holds the value of the item edge.",
|
|
||||||
"$ref": "#/definitions/ent.Item"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"ent.Label": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"color": {
|
|
||||||
"description": "Color holds the value of the \"color\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"created_at": {
|
|
||||||
"description": "CreatedAt holds the value of the \"created_at\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"description": {
|
|
||||||
"description": "Description holds the value of the \"description\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"edges": {
|
|
||||||
"description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the LabelQuery when eager-loading is set.",
|
|
||||||
"$ref": "#/definitions/ent.LabelEdges"
|
|
||||||
},
|
|
||||||
"id": {
|
|
||||||
"description": "ID of the ent.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"name": {
|
|
||||||
"description": "Name holds the value of the \"name\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"updated_at": {
|
|
||||||
"description": "UpdatedAt holds the value of the \"updated_at\" field.",
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"ent.LabelEdges": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"group": {
|
|
||||||
"description": "Group holds the value of the group edge.",
|
|
||||||
"$ref": "#/definitions/ent.Group"
|
|
||||||
},
|
|
||||||
"items": {
|
|
||||||
"description": "Items holds the value of the items edge.",
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"$ref": "#/definitions/ent.Item"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"ent.Location": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"created_at": {
|
|
||||||
"description": "CreatedAt holds the value of the \"created_at\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"description": {
|
|
||||||
"description": "Description holds the value of the \"description\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"edges": {
|
|
||||||
"description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the LocationQuery when eager-loading is set.",
|
|
||||||
"$ref": "#/definitions/ent.LocationEdges"
|
|
||||||
},
|
|
||||||
"id": {
|
|
||||||
"description": "ID of the ent.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"name": {
|
|
||||||
"description": "Name holds the value of the \"name\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"updated_at": {
|
|
||||||
"description": "UpdatedAt holds the value of the \"updated_at\" field.",
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"ent.LocationEdges": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"group": {
|
|
||||||
"description": "Group holds the value of the group edge.",
|
|
||||||
"$ref": "#/definitions/ent.Group"
|
|
||||||
},
|
|
||||||
"items": {
|
|
||||||
"description": "Items holds the value of the items edge.",
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"$ref": "#/definitions/ent.Item"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"ent.User": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"created_at": {
|
|
||||||
"description": "CreatedAt holds the value of the \"created_at\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"edges": {
|
|
||||||
"description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the UserQuery when eager-loading is set.",
|
|
||||||
"$ref": "#/definitions/ent.UserEdges"
|
|
||||||
},
|
|
||||||
"email": {
|
|
||||||
"description": "Email holds the value of the \"email\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"id": {
|
|
||||||
"description": "ID of the ent.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"is_superuser": {
|
|
||||||
"description": "IsSuperuser holds the value of the \"is_superuser\" field.",
|
|
||||||
"type": "boolean"
|
|
||||||
},
|
|
||||||
"name": {
|
|
||||||
"description": "Name holds the value of the \"name\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"updated_at": {
|
|
||||||
"description": "UpdatedAt holds the value of the \"updated_at\" field.",
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"ent.UserEdges": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"auth_tokens": {
|
|
||||||
"description": "AuthTokens holds the value of the auth_tokens edge.",
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"$ref": "#/definitions/ent.AuthTokens"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"group": {
|
|
||||||
"description": "Group holds the value of the group edge.",
|
|
||||||
"$ref": "#/definitions/ent.Group"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"server.Result": {
|
"server.Result": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
|
@ -1245,6 +838,37 @@ const docTemplate = `{
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"types.DocumentOut": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"id": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"path": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"title": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"types.ItemAttachment": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"createdAt": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"document": {
|
||||||
|
"$ref": "#/definitions/types.DocumentOut"
|
||||||
|
},
|
||||||
|
"id": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"updatedAt": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
"types.ItemCreate": {
|
"types.ItemCreate": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
|
@ -1269,6 +893,12 @@ const docTemplate = `{
|
||||||
"types.ItemOut": {
|
"types.ItemOut": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
|
"attachments": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"$ref": "#/definitions/types.ItemAttachment"
|
||||||
|
}
|
||||||
|
},
|
||||||
"createdAt": {
|
"createdAt": {
|
||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
|
@ -1278,6 +908,9 @@ const docTemplate = `{
|
||||||
"id": {
|
"id": {
|
||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
|
"insured": {
|
||||||
|
"type": "boolean"
|
||||||
|
},
|
||||||
"labels": {
|
"labels": {
|
||||||
"type": "array",
|
"type": "array",
|
||||||
"items": {
|
"items": {
|
||||||
|
@ -1309,12 +942,16 @@ const docTemplate = `{
|
||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
"purchasePrice": {
|
"purchasePrice": {
|
||||||
"type": "number"
|
"type": "string",
|
||||||
|
"example": "0"
|
||||||
},
|
},
|
||||||
"purchaseTime": {
|
"purchaseTime": {
|
||||||
"description": "Purchase",
|
"description": "Purchase",
|
||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
|
"quantity": {
|
||||||
|
"type": "integer"
|
||||||
|
},
|
||||||
"serialNumber": {
|
"serialNumber": {
|
||||||
"description": "Identifications",
|
"description": "Identifications",
|
||||||
"type": "string"
|
"type": "string"
|
||||||
|
@ -1323,7 +960,8 @@ const docTemplate = `{
|
||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
"soldPrice": {
|
"soldPrice": {
|
||||||
"type": "number"
|
"type": "string",
|
||||||
|
"example": "0"
|
||||||
},
|
},
|
||||||
"soldTime": {
|
"soldTime": {
|
||||||
"description": "Sold",
|
"description": "Sold",
|
||||||
|
@ -1355,6 +993,9 @@ const docTemplate = `{
|
||||||
"id": {
|
"id": {
|
||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
|
"insured": {
|
||||||
|
"type": "boolean"
|
||||||
|
},
|
||||||
"labels": {
|
"labels": {
|
||||||
"type": "array",
|
"type": "array",
|
||||||
"items": {
|
"items": {
|
||||||
|
@ -1386,12 +1027,16 @@ const docTemplate = `{
|
||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
"purchasePrice": {
|
"purchasePrice": {
|
||||||
"type": "number"
|
"type": "string",
|
||||||
|
"example": "0"
|
||||||
},
|
},
|
||||||
"purchaseTime": {
|
"purchaseTime": {
|
||||||
"description": "Purchase",
|
"description": "Purchase",
|
||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
|
"quantity": {
|
||||||
|
"type": "integer"
|
||||||
|
},
|
||||||
"serialNumber": {
|
"serialNumber": {
|
||||||
"description": "Identifications",
|
"description": "Identifications",
|
||||||
"type": "string"
|
"type": "string"
|
||||||
|
@ -1400,7 +1045,8 @@ const docTemplate = `{
|
||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
"soldPrice": {
|
"soldPrice": {
|
||||||
"type": "number"
|
"type": "string",
|
||||||
|
"example": "0"
|
||||||
},
|
},
|
||||||
"soldTime": {
|
"soldTime": {
|
||||||
"description": "Sold",
|
"description": "Sold",
|
||||||
|
@ -1420,6 +1066,85 @@ const docTemplate = `{
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"types.ItemUpdate": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"description": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"id": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"insured": {
|
||||||
|
"type": "boolean"
|
||||||
|
},
|
||||||
|
"labelIds": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"lifetimeWarranty": {
|
||||||
|
"description": "Warranty",
|
||||||
|
"type": "boolean"
|
||||||
|
},
|
||||||
|
"locationId": {
|
||||||
|
"description": "Edges",
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"manufacturer": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"modelNumber": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"name": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"notes": {
|
||||||
|
"description": "Extras",
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"purchaseFrom": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"purchasePrice": {
|
||||||
|
"type": "string",
|
||||||
|
"example": "0"
|
||||||
|
},
|
||||||
|
"purchaseTime": {
|
||||||
|
"description": "Purchase",
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"quantity": {
|
||||||
|
"type": "integer"
|
||||||
|
},
|
||||||
|
"serialNumber": {
|
||||||
|
"description": "Identifications",
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"soldNotes": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"soldPrice": {
|
||||||
|
"type": "string",
|
||||||
|
"example": "0"
|
||||||
|
},
|
||||||
|
"soldTime": {
|
||||||
|
"description": "Sold",
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"soldTo": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"warrantyDetails": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"warrantyExpires": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
"types.LabelCreate": {
|
"types.LabelCreate": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
|
@ -1591,6 +1316,29 @@ const docTemplate = `{
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"types.UserOut": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"email": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"groupId": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"groupName": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"id": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"isSuperuser": {
|
||||||
|
"type": "boolean"
|
||||||
|
},
|
||||||
|
"name": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
"types.UserRegistration": {
|
"types.UserRegistration": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
|
|
|
@ -41,7 +41,7 @@
|
||||||
"items": {
|
"items": {
|
||||||
"type": "array",
|
"type": "array",
|
||||||
"items": {
|
"items": {
|
||||||
"$ref": "#/definitions/types.ItemOut"
|
"$ref": "#/definitions/types.ItemSummary"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -167,6 +167,15 @@
|
||||||
"name": "id",
|
"name": "id",
|
||||||
"in": "path",
|
"in": "path",
|
||||||
"required": true
|
"required": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"description": "Item Data",
|
||||||
|
"name": "payload",
|
||||||
|
"in": "body",
|
||||||
|
"required": true,
|
||||||
|
"schema": {
|
||||||
|
"$ref": "#/definitions/types.ItemUpdate"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"responses": {
|
"responses": {
|
||||||
|
@ -683,7 +692,7 @@
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
"item": {
|
"item": {
|
||||||
"$ref": "#/definitions/ent.User"
|
"$ref": "#/definitions/types.UserOut"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -780,422 +789,6 @@
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"definitions": {
|
"definitions": {
|
||||||
"ent.AuthTokens": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"created_at": {
|
|
||||||
"description": "CreatedAt holds the value of the \"created_at\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"edges": {
|
|
||||||
"description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the AuthTokensQuery when eager-loading is set.",
|
|
||||||
"$ref": "#/definitions/ent.AuthTokensEdges"
|
|
||||||
},
|
|
||||||
"expires_at": {
|
|
||||||
"description": "ExpiresAt holds the value of the \"expires_at\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"id": {
|
|
||||||
"description": "ID of the ent.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"token": {
|
|
||||||
"description": "Token holds the value of the \"token\" field.",
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "integer"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"updated_at": {
|
|
||||||
"description": "UpdatedAt holds the value of the \"updated_at\" field.",
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"ent.AuthTokensEdges": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"user": {
|
|
||||||
"description": "User holds the value of the user edge.",
|
|
||||||
"$ref": "#/definitions/ent.User"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"ent.Group": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"created_at": {
|
|
||||||
"description": "CreatedAt holds the value of the \"created_at\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"currency": {
|
|
||||||
"description": "Currency holds the value of the \"currency\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"edges": {
|
|
||||||
"description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the GroupQuery when eager-loading is set.",
|
|
||||||
"$ref": "#/definitions/ent.GroupEdges"
|
|
||||||
},
|
|
||||||
"id": {
|
|
||||||
"description": "ID of the ent.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"name": {
|
|
||||||
"description": "Name holds the value of the \"name\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"updated_at": {
|
|
||||||
"description": "UpdatedAt holds the value of the \"updated_at\" field.",
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"ent.GroupEdges": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"items": {
|
|
||||||
"description": "Items holds the value of the items edge.",
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"$ref": "#/definitions/ent.Item"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"labels": {
|
|
||||||
"description": "Labels holds the value of the labels edge.",
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"$ref": "#/definitions/ent.Label"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"locations": {
|
|
||||||
"description": "Locations holds the value of the locations edge.",
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"$ref": "#/definitions/ent.Location"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"users": {
|
|
||||||
"description": "Users holds the value of the users edge.",
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"$ref": "#/definitions/ent.User"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"ent.Item": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"created_at": {
|
|
||||||
"description": "CreatedAt holds the value of the \"created_at\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"description": {
|
|
||||||
"description": "Description holds the value of the \"description\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"edges": {
|
|
||||||
"description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the ItemQuery when eager-loading is set.",
|
|
||||||
"$ref": "#/definitions/ent.ItemEdges"
|
|
||||||
},
|
|
||||||
"id": {
|
|
||||||
"description": "ID of the ent.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"lifetime_warranty": {
|
|
||||||
"description": "LifetimeWarranty holds the value of the \"lifetime_warranty\" field.",
|
|
||||||
"type": "boolean"
|
|
||||||
},
|
|
||||||
"manufacturer": {
|
|
||||||
"description": "Manufacturer holds the value of the \"manufacturer\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"model_number": {
|
|
||||||
"description": "ModelNumber holds the value of the \"model_number\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"name": {
|
|
||||||
"description": "Name holds the value of the \"name\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"notes": {
|
|
||||||
"description": "Notes holds the value of the \"notes\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"purchase_from": {
|
|
||||||
"description": "PurchaseFrom holds the value of the \"purchase_from\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"purchase_price": {
|
|
||||||
"description": "PurchasePrice holds the value of the \"purchase_price\" field.",
|
|
||||||
"type": "number"
|
|
||||||
},
|
|
||||||
"purchase_time": {
|
|
||||||
"description": "PurchaseTime holds the value of the \"purchase_time\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"serial_number": {
|
|
||||||
"description": "SerialNumber holds the value of the \"serial_number\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"sold_notes": {
|
|
||||||
"description": "SoldNotes holds the value of the \"sold_notes\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"sold_price": {
|
|
||||||
"description": "SoldPrice holds the value of the \"sold_price\" field.",
|
|
||||||
"type": "number"
|
|
||||||
},
|
|
||||||
"sold_time": {
|
|
||||||
"description": "SoldTime holds the value of the \"sold_time\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"sold_to": {
|
|
||||||
"description": "SoldTo holds the value of the \"sold_to\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"updated_at": {
|
|
||||||
"description": "UpdatedAt holds the value of the \"updated_at\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"warranty_details": {
|
|
||||||
"description": "WarrantyDetails holds the value of the \"warranty_details\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"warranty_expires": {
|
|
||||||
"description": "WarrantyExpires holds the value of the \"warranty_expires\" field.",
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"ent.ItemEdges": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"fields": {
|
|
||||||
"description": "Fields holds the value of the fields edge.",
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"$ref": "#/definitions/ent.ItemField"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"group": {
|
|
||||||
"description": "Group holds the value of the group edge.",
|
|
||||||
"$ref": "#/definitions/ent.Group"
|
|
||||||
},
|
|
||||||
"label": {
|
|
||||||
"description": "Label holds the value of the label edge.",
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"$ref": "#/definitions/ent.Label"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"location": {
|
|
||||||
"description": "Location holds the value of the location edge.",
|
|
||||||
"$ref": "#/definitions/ent.Location"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"ent.ItemField": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"boolean_value": {
|
|
||||||
"description": "BooleanValue holds the value of the \"boolean_value\" field.",
|
|
||||||
"type": "boolean"
|
|
||||||
},
|
|
||||||
"created_at": {
|
|
||||||
"description": "CreatedAt holds the value of the \"created_at\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"description": {
|
|
||||||
"description": "Description holds the value of the \"description\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"edges": {
|
|
||||||
"description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the ItemFieldQuery when eager-loading is set.",
|
|
||||||
"$ref": "#/definitions/ent.ItemFieldEdges"
|
|
||||||
},
|
|
||||||
"id": {
|
|
||||||
"description": "ID of the ent.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"name": {
|
|
||||||
"description": "Name holds the value of the \"name\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"number_value": {
|
|
||||||
"description": "NumberValue holds the value of the \"number_value\" field.",
|
|
||||||
"type": "integer"
|
|
||||||
},
|
|
||||||
"text_value": {
|
|
||||||
"description": "TextValue holds the value of the \"text_value\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"time_value": {
|
|
||||||
"description": "TimeValue holds the value of the \"time_value\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"type": {
|
|
||||||
"description": "Type holds the value of the \"type\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"updated_at": {
|
|
||||||
"description": "UpdatedAt holds the value of the \"updated_at\" field.",
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"ent.ItemFieldEdges": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"item": {
|
|
||||||
"description": "Item holds the value of the item edge.",
|
|
||||||
"$ref": "#/definitions/ent.Item"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"ent.Label": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"color": {
|
|
||||||
"description": "Color holds the value of the \"color\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"created_at": {
|
|
||||||
"description": "CreatedAt holds the value of the \"created_at\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"description": {
|
|
||||||
"description": "Description holds the value of the \"description\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"edges": {
|
|
||||||
"description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the LabelQuery when eager-loading is set.",
|
|
||||||
"$ref": "#/definitions/ent.LabelEdges"
|
|
||||||
},
|
|
||||||
"id": {
|
|
||||||
"description": "ID of the ent.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"name": {
|
|
||||||
"description": "Name holds the value of the \"name\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"updated_at": {
|
|
||||||
"description": "UpdatedAt holds the value of the \"updated_at\" field.",
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"ent.LabelEdges": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"group": {
|
|
||||||
"description": "Group holds the value of the group edge.",
|
|
||||||
"$ref": "#/definitions/ent.Group"
|
|
||||||
},
|
|
||||||
"items": {
|
|
||||||
"description": "Items holds the value of the items edge.",
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"$ref": "#/definitions/ent.Item"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"ent.Location": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"created_at": {
|
|
||||||
"description": "CreatedAt holds the value of the \"created_at\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"description": {
|
|
||||||
"description": "Description holds the value of the \"description\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"edges": {
|
|
||||||
"description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the LocationQuery when eager-loading is set.",
|
|
||||||
"$ref": "#/definitions/ent.LocationEdges"
|
|
||||||
},
|
|
||||||
"id": {
|
|
||||||
"description": "ID of the ent.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"name": {
|
|
||||||
"description": "Name holds the value of the \"name\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"updated_at": {
|
|
||||||
"description": "UpdatedAt holds the value of the \"updated_at\" field.",
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"ent.LocationEdges": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"group": {
|
|
||||||
"description": "Group holds the value of the group edge.",
|
|
||||||
"$ref": "#/definitions/ent.Group"
|
|
||||||
},
|
|
||||||
"items": {
|
|
||||||
"description": "Items holds the value of the items edge.",
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"$ref": "#/definitions/ent.Item"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"ent.User": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"created_at": {
|
|
||||||
"description": "CreatedAt holds the value of the \"created_at\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"edges": {
|
|
||||||
"description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the UserQuery when eager-loading is set.",
|
|
||||||
"$ref": "#/definitions/ent.UserEdges"
|
|
||||||
},
|
|
||||||
"email": {
|
|
||||||
"description": "Email holds the value of the \"email\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"id": {
|
|
||||||
"description": "ID of the ent.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"is_superuser": {
|
|
||||||
"description": "IsSuperuser holds the value of the \"is_superuser\" field.",
|
|
||||||
"type": "boolean"
|
|
||||||
},
|
|
||||||
"name": {
|
|
||||||
"description": "Name holds the value of the \"name\" field.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"updated_at": {
|
|
||||||
"description": "UpdatedAt holds the value of the \"updated_at\" field.",
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"ent.UserEdges": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"auth_tokens": {
|
|
||||||
"description": "AuthTokens holds the value of the auth_tokens edge.",
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"$ref": "#/definitions/ent.AuthTokens"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"group": {
|
|
||||||
"description": "Group holds the value of the group edge.",
|
|
||||||
"$ref": "#/definitions/ent.Group"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"server.Result": {
|
"server.Result": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
|
@ -1237,6 +830,37 @@
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"types.DocumentOut": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"id": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"path": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"title": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"types.ItemAttachment": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"createdAt": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"document": {
|
||||||
|
"$ref": "#/definitions/types.DocumentOut"
|
||||||
|
},
|
||||||
|
"id": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"updatedAt": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
"types.ItemCreate": {
|
"types.ItemCreate": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
|
@ -1261,6 +885,12 @@
|
||||||
"types.ItemOut": {
|
"types.ItemOut": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
|
"attachments": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"$ref": "#/definitions/types.ItemAttachment"
|
||||||
|
}
|
||||||
|
},
|
||||||
"createdAt": {
|
"createdAt": {
|
||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
|
@ -1270,6 +900,9 @@
|
||||||
"id": {
|
"id": {
|
||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
|
"insured": {
|
||||||
|
"type": "boolean"
|
||||||
|
},
|
||||||
"labels": {
|
"labels": {
|
||||||
"type": "array",
|
"type": "array",
|
||||||
"items": {
|
"items": {
|
||||||
|
@ -1301,12 +934,16 @@
|
||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
"purchasePrice": {
|
"purchasePrice": {
|
||||||
"type": "number"
|
"type": "string",
|
||||||
|
"example": "0"
|
||||||
},
|
},
|
||||||
"purchaseTime": {
|
"purchaseTime": {
|
||||||
"description": "Purchase",
|
"description": "Purchase",
|
||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
|
"quantity": {
|
||||||
|
"type": "integer"
|
||||||
|
},
|
||||||
"serialNumber": {
|
"serialNumber": {
|
||||||
"description": "Identifications",
|
"description": "Identifications",
|
||||||
"type": "string"
|
"type": "string"
|
||||||
|
@ -1315,7 +952,8 @@
|
||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
"soldPrice": {
|
"soldPrice": {
|
||||||
"type": "number"
|
"type": "string",
|
||||||
|
"example": "0"
|
||||||
},
|
},
|
||||||
"soldTime": {
|
"soldTime": {
|
||||||
"description": "Sold",
|
"description": "Sold",
|
||||||
|
@ -1347,6 +985,9 @@
|
||||||
"id": {
|
"id": {
|
||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
|
"insured": {
|
||||||
|
"type": "boolean"
|
||||||
|
},
|
||||||
"labels": {
|
"labels": {
|
||||||
"type": "array",
|
"type": "array",
|
||||||
"items": {
|
"items": {
|
||||||
|
@ -1378,12 +1019,16 @@
|
||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
"purchasePrice": {
|
"purchasePrice": {
|
||||||
"type": "number"
|
"type": "string",
|
||||||
|
"example": "0"
|
||||||
},
|
},
|
||||||
"purchaseTime": {
|
"purchaseTime": {
|
||||||
"description": "Purchase",
|
"description": "Purchase",
|
||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
|
"quantity": {
|
||||||
|
"type": "integer"
|
||||||
|
},
|
||||||
"serialNumber": {
|
"serialNumber": {
|
||||||
"description": "Identifications",
|
"description": "Identifications",
|
||||||
"type": "string"
|
"type": "string"
|
||||||
|
@ -1392,7 +1037,8 @@
|
||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
"soldPrice": {
|
"soldPrice": {
|
||||||
"type": "number"
|
"type": "string",
|
||||||
|
"example": "0"
|
||||||
},
|
},
|
||||||
"soldTime": {
|
"soldTime": {
|
||||||
"description": "Sold",
|
"description": "Sold",
|
||||||
|
@ -1412,6 +1058,85 @@
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"types.ItemUpdate": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"description": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"id": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"insured": {
|
||||||
|
"type": "boolean"
|
||||||
|
},
|
||||||
|
"labelIds": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"lifetimeWarranty": {
|
||||||
|
"description": "Warranty",
|
||||||
|
"type": "boolean"
|
||||||
|
},
|
||||||
|
"locationId": {
|
||||||
|
"description": "Edges",
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"manufacturer": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"modelNumber": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"name": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"notes": {
|
||||||
|
"description": "Extras",
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"purchaseFrom": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"purchasePrice": {
|
||||||
|
"type": "string",
|
||||||
|
"example": "0"
|
||||||
|
},
|
||||||
|
"purchaseTime": {
|
||||||
|
"description": "Purchase",
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"quantity": {
|
||||||
|
"type": "integer"
|
||||||
|
},
|
||||||
|
"serialNumber": {
|
||||||
|
"description": "Identifications",
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"soldNotes": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"soldPrice": {
|
||||||
|
"type": "string",
|
||||||
|
"example": "0"
|
||||||
|
},
|
||||||
|
"soldTime": {
|
||||||
|
"description": "Sold",
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"soldTo": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"warrantyDetails": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"warrantyExpires": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
"types.LabelCreate": {
|
"types.LabelCreate": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
|
@ -1583,6 +1308,29 @@
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"types.UserOut": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"email": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"groupId": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"groupName": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"id": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"isSuperuser": {
|
||||||
|
"type": "boolean"
|
||||||
|
},
|
||||||
|
"name": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
"types.UserRegistration": {
|
"types.UserRegistration": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
|
|
|
@ -1,318 +1,5 @@
|
||||||
basePath: /api
|
basePath: /api
|
||||||
definitions:
|
definitions:
|
||||||
ent.AuthTokens:
|
|
||||||
properties:
|
|
||||||
created_at:
|
|
||||||
description: CreatedAt holds the value of the "created_at" field.
|
|
||||||
type: string
|
|
||||||
edges:
|
|
||||||
$ref: '#/definitions/ent.AuthTokensEdges'
|
|
||||||
description: |-
|
|
||||||
Edges holds the relations/edges for other nodes in the graph.
|
|
||||||
The values are being populated by the AuthTokensQuery when eager-loading is set.
|
|
||||||
expires_at:
|
|
||||||
description: ExpiresAt holds the value of the "expires_at" field.
|
|
||||||
type: string
|
|
||||||
id:
|
|
||||||
description: ID of the ent.
|
|
||||||
type: string
|
|
||||||
token:
|
|
||||||
description: Token holds the value of the "token" field.
|
|
||||||
items:
|
|
||||||
type: integer
|
|
||||||
type: array
|
|
||||||
updated_at:
|
|
||||||
description: UpdatedAt holds the value of the "updated_at" field.
|
|
||||||
type: string
|
|
||||||
type: object
|
|
||||||
ent.AuthTokensEdges:
|
|
||||||
properties:
|
|
||||||
user:
|
|
||||||
$ref: '#/definitions/ent.User'
|
|
||||||
description: User holds the value of the user edge.
|
|
||||||
type: object
|
|
||||||
ent.Group:
|
|
||||||
properties:
|
|
||||||
created_at:
|
|
||||||
description: CreatedAt holds the value of the "created_at" field.
|
|
||||||
type: string
|
|
||||||
currency:
|
|
||||||
description: Currency holds the value of the "currency" field.
|
|
||||||
type: string
|
|
||||||
edges:
|
|
||||||
$ref: '#/definitions/ent.GroupEdges'
|
|
||||||
description: |-
|
|
||||||
Edges holds the relations/edges for other nodes in the graph.
|
|
||||||
The values are being populated by the GroupQuery when eager-loading is set.
|
|
||||||
id:
|
|
||||||
description: ID of the ent.
|
|
||||||
type: string
|
|
||||||
name:
|
|
||||||
description: Name holds the value of the "name" field.
|
|
||||||
type: string
|
|
||||||
updated_at:
|
|
||||||
description: UpdatedAt holds the value of the "updated_at" field.
|
|
||||||
type: string
|
|
||||||
type: object
|
|
||||||
ent.GroupEdges:
|
|
||||||
properties:
|
|
||||||
items:
|
|
||||||
description: Items holds the value of the items edge.
|
|
||||||
items:
|
|
||||||
$ref: '#/definitions/ent.Item'
|
|
||||||
type: array
|
|
||||||
labels:
|
|
||||||
description: Labels holds the value of the labels edge.
|
|
||||||
items:
|
|
||||||
$ref: '#/definitions/ent.Label'
|
|
||||||
type: array
|
|
||||||
locations:
|
|
||||||
description: Locations holds the value of the locations edge.
|
|
||||||
items:
|
|
||||||
$ref: '#/definitions/ent.Location'
|
|
||||||
type: array
|
|
||||||
users:
|
|
||||||
description: Users holds the value of the users edge.
|
|
||||||
items:
|
|
||||||
$ref: '#/definitions/ent.User'
|
|
||||||
type: array
|
|
||||||
type: object
|
|
||||||
ent.Item:
|
|
||||||
properties:
|
|
||||||
created_at:
|
|
||||||
description: CreatedAt holds the value of the "created_at" field.
|
|
||||||
type: string
|
|
||||||
description:
|
|
||||||
description: Description holds the value of the "description" field.
|
|
||||||
type: string
|
|
||||||
edges:
|
|
||||||
$ref: '#/definitions/ent.ItemEdges'
|
|
||||||
description: |-
|
|
||||||
Edges holds the relations/edges for other nodes in the graph.
|
|
||||||
The values are being populated by the ItemQuery when eager-loading is set.
|
|
||||||
id:
|
|
||||||
description: ID of the ent.
|
|
||||||
type: string
|
|
||||||
lifetime_warranty:
|
|
||||||
description: LifetimeWarranty holds the value of the "lifetime_warranty" field.
|
|
||||||
type: boolean
|
|
||||||
manufacturer:
|
|
||||||
description: Manufacturer holds the value of the "manufacturer" field.
|
|
||||||
type: string
|
|
||||||
model_number:
|
|
||||||
description: ModelNumber holds the value of the "model_number" field.
|
|
||||||
type: string
|
|
||||||
name:
|
|
||||||
description: Name holds the value of the "name" field.
|
|
||||||
type: string
|
|
||||||
notes:
|
|
||||||
description: Notes holds the value of the "notes" field.
|
|
||||||
type: string
|
|
||||||
purchase_from:
|
|
||||||
description: PurchaseFrom holds the value of the "purchase_from" field.
|
|
||||||
type: string
|
|
||||||
purchase_price:
|
|
||||||
description: PurchasePrice holds the value of the "purchase_price" field.
|
|
||||||
type: number
|
|
||||||
purchase_time:
|
|
||||||
description: PurchaseTime holds the value of the "purchase_time" field.
|
|
||||||
type: string
|
|
||||||
serial_number:
|
|
||||||
description: SerialNumber holds the value of the "serial_number" field.
|
|
||||||
type: string
|
|
||||||
sold_notes:
|
|
||||||
description: SoldNotes holds the value of the "sold_notes" field.
|
|
||||||
type: string
|
|
||||||
sold_price:
|
|
||||||
description: SoldPrice holds the value of the "sold_price" field.
|
|
||||||
type: number
|
|
||||||
sold_time:
|
|
||||||
description: SoldTime holds the value of the "sold_time" field.
|
|
||||||
type: string
|
|
||||||
sold_to:
|
|
||||||
description: SoldTo holds the value of the "sold_to" field.
|
|
||||||
type: string
|
|
||||||
updated_at:
|
|
||||||
description: UpdatedAt holds the value of the "updated_at" field.
|
|
||||||
type: string
|
|
||||||
warranty_details:
|
|
||||||
description: WarrantyDetails holds the value of the "warranty_details" field.
|
|
||||||
type: string
|
|
||||||
warranty_expires:
|
|
||||||
description: WarrantyExpires holds the value of the "warranty_expires" field.
|
|
||||||
type: string
|
|
||||||
type: object
|
|
||||||
ent.ItemEdges:
|
|
||||||
properties:
|
|
||||||
fields:
|
|
||||||
description: Fields holds the value of the fields edge.
|
|
||||||
items:
|
|
||||||
$ref: '#/definitions/ent.ItemField'
|
|
||||||
type: array
|
|
||||||
group:
|
|
||||||
$ref: '#/definitions/ent.Group'
|
|
||||||
description: Group holds the value of the group edge.
|
|
||||||
label:
|
|
||||||
description: Label holds the value of the label edge.
|
|
||||||
items:
|
|
||||||
$ref: '#/definitions/ent.Label'
|
|
||||||
type: array
|
|
||||||
location:
|
|
||||||
$ref: '#/definitions/ent.Location'
|
|
||||||
description: Location holds the value of the location edge.
|
|
||||||
type: object
|
|
||||||
ent.ItemField:
|
|
||||||
properties:
|
|
||||||
boolean_value:
|
|
||||||
description: BooleanValue holds the value of the "boolean_value" field.
|
|
||||||
type: boolean
|
|
||||||
created_at:
|
|
||||||
description: CreatedAt holds the value of the "created_at" field.
|
|
||||||
type: string
|
|
||||||
description:
|
|
||||||
description: Description holds the value of the "description" field.
|
|
||||||
type: string
|
|
||||||
edges:
|
|
||||||
$ref: '#/definitions/ent.ItemFieldEdges'
|
|
||||||
description: |-
|
|
||||||
Edges holds the relations/edges for other nodes in the graph.
|
|
||||||
The values are being populated by the ItemFieldQuery when eager-loading is set.
|
|
||||||
id:
|
|
||||||
description: ID of the ent.
|
|
||||||
type: string
|
|
||||||
name:
|
|
||||||
description: Name holds the value of the "name" field.
|
|
||||||
type: string
|
|
||||||
number_value:
|
|
||||||
description: NumberValue holds the value of the "number_value" field.
|
|
||||||
type: integer
|
|
||||||
text_value:
|
|
||||||
description: TextValue holds the value of the "text_value" field.
|
|
||||||
type: string
|
|
||||||
time_value:
|
|
||||||
description: TimeValue holds the value of the "time_value" field.
|
|
||||||
type: string
|
|
||||||
type:
|
|
||||||
description: Type holds the value of the "type" field.
|
|
||||||
type: string
|
|
||||||
updated_at:
|
|
||||||
description: UpdatedAt holds the value of the "updated_at" field.
|
|
||||||
type: string
|
|
||||||
type: object
|
|
||||||
ent.ItemFieldEdges:
|
|
||||||
properties:
|
|
||||||
item:
|
|
||||||
$ref: '#/definitions/ent.Item'
|
|
||||||
description: Item holds the value of the item edge.
|
|
||||||
type: object
|
|
||||||
ent.Label:
|
|
||||||
properties:
|
|
||||||
color:
|
|
||||||
description: Color holds the value of the "color" field.
|
|
||||||
type: string
|
|
||||||
created_at:
|
|
||||||
description: CreatedAt holds the value of the "created_at" field.
|
|
||||||
type: string
|
|
||||||
description:
|
|
||||||
description: Description holds the value of the "description" field.
|
|
||||||
type: string
|
|
||||||
edges:
|
|
||||||
$ref: '#/definitions/ent.LabelEdges'
|
|
||||||
description: |-
|
|
||||||
Edges holds the relations/edges for other nodes in the graph.
|
|
||||||
The values are being populated by the LabelQuery when eager-loading is set.
|
|
||||||
id:
|
|
||||||
description: ID of the ent.
|
|
||||||
type: string
|
|
||||||
name:
|
|
||||||
description: Name holds the value of the "name" field.
|
|
||||||
type: string
|
|
||||||
updated_at:
|
|
||||||
description: UpdatedAt holds the value of the "updated_at" field.
|
|
||||||
type: string
|
|
||||||
type: object
|
|
||||||
ent.LabelEdges:
|
|
||||||
properties:
|
|
||||||
group:
|
|
||||||
$ref: '#/definitions/ent.Group'
|
|
||||||
description: Group holds the value of the group edge.
|
|
||||||
items:
|
|
||||||
description: Items holds the value of the items edge.
|
|
||||||
items:
|
|
||||||
$ref: '#/definitions/ent.Item'
|
|
||||||
type: array
|
|
||||||
type: object
|
|
||||||
ent.Location:
|
|
||||||
properties:
|
|
||||||
created_at:
|
|
||||||
description: CreatedAt holds the value of the "created_at" field.
|
|
||||||
type: string
|
|
||||||
description:
|
|
||||||
description: Description holds the value of the "description" field.
|
|
||||||
type: string
|
|
||||||
edges:
|
|
||||||
$ref: '#/definitions/ent.LocationEdges'
|
|
||||||
description: |-
|
|
||||||
Edges holds the relations/edges for other nodes in the graph.
|
|
||||||
The values are being populated by the LocationQuery when eager-loading is set.
|
|
||||||
id:
|
|
||||||
description: ID of the ent.
|
|
||||||
type: string
|
|
||||||
name:
|
|
||||||
description: Name holds the value of the "name" field.
|
|
||||||
type: string
|
|
||||||
updated_at:
|
|
||||||
description: UpdatedAt holds the value of the "updated_at" field.
|
|
||||||
type: string
|
|
||||||
type: object
|
|
||||||
ent.LocationEdges:
|
|
||||||
properties:
|
|
||||||
group:
|
|
||||||
$ref: '#/definitions/ent.Group'
|
|
||||||
description: Group holds the value of the group edge.
|
|
||||||
items:
|
|
||||||
description: Items holds the value of the items edge.
|
|
||||||
items:
|
|
||||||
$ref: '#/definitions/ent.Item'
|
|
||||||
type: array
|
|
||||||
type: object
|
|
||||||
ent.User:
|
|
||||||
properties:
|
|
||||||
created_at:
|
|
||||||
description: CreatedAt holds the value of the "created_at" field.
|
|
||||||
type: string
|
|
||||||
edges:
|
|
||||||
$ref: '#/definitions/ent.UserEdges'
|
|
||||||
description: |-
|
|
||||||
Edges holds the relations/edges for other nodes in the graph.
|
|
||||||
The values are being populated by the UserQuery when eager-loading is set.
|
|
||||||
email:
|
|
||||||
description: Email holds the value of the "email" field.
|
|
||||||
type: string
|
|
||||||
id:
|
|
||||||
description: ID of the ent.
|
|
||||||
type: string
|
|
||||||
is_superuser:
|
|
||||||
description: IsSuperuser holds the value of the "is_superuser" field.
|
|
||||||
type: boolean
|
|
||||||
name:
|
|
||||||
description: Name holds the value of the "name" field.
|
|
||||||
type: string
|
|
||||||
updated_at:
|
|
||||||
description: UpdatedAt holds the value of the "updated_at" field.
|
|
||||||
type: string
|
|
||||||
type: object
|
|
||||||
ent.UserEdges:
|
|
||||||
properties:
|
|
||||||
auth_tokens:
|
|
||||||
description: AuthTokens holds the value of the auth_tokens edge.
|
|
||||||
items:
|
|
||||||
$ref: '#/definitions/ent.AuthTokens'
|
|
||||||
type: array
|
|
||||||
group:
|
|
||||||
$ref: '#/definitions/ent.Group'
|
|
||||||
description: Group holds the value of the group edge.
|
|
||||||
type: object
|
|
||||||
server.Result:
|
server.Result:
|
||||||
properties:
|
properties:
|
||||||
details: {}
|
details: {}
|
||||||
|
@ -340,6 +27,26 @@ definitions:
|
||||||
type: string
|
type: string
|
||||||
type: array
|
type: array
|
||||||
type: object
|
type: object
|
||||||
|
types.DocumentOut:
|
||||||
|
properties:
|
||||||
|
id:
|
||||||
|
type: string
|
||||||
|
path:
|
||||||
|
type: string
|
||||||
|
title:
|
||||||
|
type: string
|
||||||
|
type: object
|
||||||
|
types.ItemAttachment:
|
||||||
|
properties:
|
||||||
|
createdAt:
|
||||||
|
type: string
|
||||||
|
document:
|
||||||
|
$ref: '#/definitions/types.DocumentOut'
|
||||||
|
id:
|
||||||
|
type: string
|
||||||
|
updatedAt:
|
||||||
|
type: string
|
||||||
|
type: object
|
||||||
types.ItemCreate:
|
types.ItemCreate:
|
||||||
properties:
|
properties:
|
||||||
description:
|
description:
|
||||||
|
@ -356,12 +63,18 @@ definitions:
|
||||||
type: object
|
type: object
|
||||||
types.ItemOut:
|
types.ItemOut:
|
||||||
properties:
|
properties:
|
||||||
|
attachments:
|
||||||
|
items:
|
||||||
|
$ref: '#/definitions/types.ItemAttachment'
|
||||||
|
type: array
|
||||||
createdAt:
|
createdAt:
|
||||||
type: string
|
type: string
|
||||||
description:
|
description:
|
||||||
type: string
|
type: string
|
||||||
id:
|
id:
|
||||||
type: string
|
type: string
|
||||||
|
insured:
|
||||||
|
type: boolean
|
||||||
labels:
|
labels:
|
||||||
items:
|
items:
|
||||||
$ref: '#/definitions/types.LabelSummary'
|
$ref: '#/definitions/types.LabelSummary'
|
||||||
|
@ -384,17 +97,21 @@ definitions:
|
||||||
purchaseFrom:
|
purchaseFrom:
|
||||||
type: string
|
type: string
|
||||||
purchasePrice:
|
purchasePrice:
|
||||||
type: number
|
example: "0"
|
||||||
|
type: string
|
||||||
purchaseTime:
|
purchaseTime:
|
||||||
description: Purchase
|
description: Purchase
|
||||||
type: string
|
type: string
|
||||||
|
quantity:
|
||||||
|
type: integer
|
||||||
serialNumber:
|
serialNumber:
|
||||||
description: Identifications
|
description: Identifications
|
||||||
type: string
|
type: string
|
||||||
soldNotes:
|
soldNotes:
|
||||||
type: string
|
type: string
|
||||||
soldPrice:
|
soldPrice:
|
||||||
type: number
|
example: "0"
|
||||||
|
type: string
|
||||||
soldTime:
|
soldTime:
|
||||||
description: Sold
|
description: Sold
|
||||||
type: string
|
type: string
|
||||||
|
@ -415,6 +132,8 @@ definitions:
|
||||||
type: string
|
type: string
|
||||||
id:
|
id:
|
||||||
type: string
|
type: string
|
||||||
|
insured:
|
||||||
|
type: boolean
|
||||||
labels:
|
labels:
|
||||||
items:
|
items:
|
||||||
$ref: '#/definitions/types.LabelSummary'
|
$ref: '#/definitions/types.LabelSummary'
|
||||||
|
@ -437,17 +156,21 @@ definitions:
|
||||||
purchaseFrom:
|
purchaseFrom:
|
||||||
type: string
|
type: string
|
||||||
purchasePrice:
|
purchasePrice:
|
||||||
type: number
|
example: "0"
|
||||||
|
type: string
|
||||||
purchaseTime:
|
purchaseTime:
|
||||||
description: Purchase
|
description: Purchase
|
||||||
type: string
|
type: string
|
||||||
|
quantity:
|
||||||
|
type: integer
|
||||||
serialNumber:
|
serialNumber:
|
||||||
description: Identifications
|
description: Identifications
|
||||||
type: string
|
type: string
|
||||||
soldNotes:
|
soldNotes:
|
||||||
type: string
|
type: string
|
||||||
soldPrice:
|
soldPrice:
|
||||||
type: number
|
example: "0"
|
||||||
|
type: string
|
||||||
soldTime:
|
soldTime:
|
||||||
description: Sold
|
description: Sold
|
||||||
type: string
|
type: string
|
||||||
|
@ -460,6 +183,61 @@ definitions:
|
||||||
warrantyExpires:
|
warrantyExpires:
|
||||||
type: string
|
type: string
|
||||||
type: object
|
type: object
|
||||||
|
types.ItemUpdate:
|
||||||
|
properties:
|
||||||
|
description:
|
||||||
|
type: string
|
||||||
|
id:
|
||||||
|
type: string
|
||||||
|
insured:
|
||||||
|
type: boolean
|
||||||
|
labelIds:
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
type: array
|
||||||
|
lifetimeWarranty:
|
||||||
|
description: Warranty
|
||||||
|
type: boolean
|
||||||
|
locationId:
|
||||||
|
description: Edges
|
||||||
|
type: string
|
||||||
|
manufacturer:
|
||||||
|
type: string
|
||||||
|
modelNumber:
|
||||||
|
type: string
|
||||||
|
name:
|
||||||
|
type: string
|
||||||
|
notes:
|
||||||
|
description: Extras
|
||||||
|
type: string
|
||||||
|
purchaseFrom:
|
||||||
|
type: string
|
||||||
|
purchasePrice:
|
||||||
|
example: "0"
|
||||||
|
type: string
|
||||||
|
purchaseTime:
|
||||||
|
description: Purchase
|
||||||
|
type: string
|
||||||
|
quantity:
|
||||||
|
type: integer
|
||||||
|
serialNumber:
|
||||||
|
description: Identifications
|
||||||
|
type: string
|
||||||
|
soldNotes:
|
||||||
|
type: string
|
||||||
|
soldPrice:
|
||||||
|
example: "0"
|
||||||
|
type: string
|
||||||
|
soldTime:
|
||||||
|
description: Sold
|
||||||
|
type: string
|
||||||
|
soldTo:
|
||||||
|
type: string
|
||||||
|
warrantyDetails:
|
||||||
|
type: string
|
||||||
|
warrantyExpires:
|
||||||
|
type: string
|
||||||
|
type: object
|
||||||
types.LabelCreate:
|
types.LabelCreate:
|
||||||
properties:
|
properties:
|
||||||
color:
|
color:
|
||||||
|
@ -571,6 +349,21 @@ definitions:
|
||||||
password:
|
password:
|
||||||
type: string
|
type: string
|
||||||
type: object
|
type: object
|
||||||
|
types.UserOut:
|
||||||
|
properties:
|
||||||
|
email:
|
||||||
|
type: string
|
||||||
|
groupId:
|
||||||
|
type: string
|
||||||
|
groupName:
|
||||||
|
type: string
|
||||||
|
id:
|
||||||
|
type: string
|
||||||
|
isSuperuser:
|
||||||
|
type: boolean
|
||||||
|
name:
|
||||||
|
type: string
|
||||||
|
type: object
|
||||||
types.UserRegistration:
|
types.UserRegistration:
|
||||||
properties:
|
properties:
|
||||||
groupName:
|
groupName:
|
||||||
|
@ -609,7 +402,7 @@ paths:
|
||||||
- properties:
|
- properties:
|
||||||
items:
|
items:
|
||||||
items:
|
items:
|
||||||
$ref: '#/definitions/types.ItemOut'
|
$ref: '#/definitions/types.ItemSummary'
|
||||||
type: array
|
type: array
|
||||||
type: object
|
type: object
|
||||||
security:
|
security:
|
||||||
|
@ -681,6 +474,12 @@ paths:
|
||||||
name: id
|
name: id
|
||||||
required: true
|
required: true
|
||||||
type: string
|
type: string
|
||||||
|
- description: Item Data
|
||||||
|
in: body
|
||||||
|
name: payload
|
||||||
|
required: true
|
||||||
|
schema:
|
||||||
|
$ref: '#/definitions/types.ItemUpdate'
|
||||||
produces:
|
produces:
|
||||||
- application/json
|
- application/json
|
||||||
responses:
|
responses:
|
||||||
|
@ -1006,7 +805,7 @@ paths:
|
||||||
- $ref: '#/definitions/server.Result'
|
- $ref: '#/definitions/server.Result'
|
||||||
- properties:
|
- properties:
|
||||||
item:
|
item:
|
||||||
$ref: '#/definitions/ent.User'
|
$ref: '#/definitions/types.UserOut'
|
||||||
type: object
|
type: object
|
||||||
security:
|
security:
|
||||||
- Bearer: []
|
- Bearer: []
|
||||||
|
|
|
@ -14,7 +14,7 @@ import (
|
||||||
// @Summary Get All Items
|
// @Summary Get All Items
|
||||||
// @Tags Items
|
// @Tags Items
|
||||||
// @Produce json
|
// @Produce json
|
||||||
// @Success 200 {object} server.Results{items=[]types.ItemOut}
|
// @Success 200 {object} server.Results{items=[]types.ItemSummary}
|
||||||
// @Router /v1/items [GET]
|
// @Router /v1/items [GET]
|
||||||
// @Security Bearer
|
// @Security Bearer
|
||||||
func (ctrl *V1Controller) HandleItemsGetAll() http.HandlerFunc {
|
func (ctrl *V1Controller) HandleItemsGetAll() http.HandlerFunc {
|
||||||
|
@ -64,7 +64,7 @@ func (ctrl *V1Controller) HandleItemsCreate() http.HandlerFunc {
|
||||||
// @Summary deletes a item
|
// @Summary deletes a item
|
||||||
// @Tags Items
|
// @Tags Items
|
||||||
// @Produce json
|
// @Produce json
|
||||||
// @Param id path string true "Item ID"
|
// @Param id path string true "Item ID"
|
||||||
// @Success 204
|
// @Success 204
|
||||||
// @Router /v1/items/{id} [DELETE]
|
// @Router /v1/items/{id} [DELETE]
|
||||||
// @Security Bearer
|
// @Security Bearer
|
||||||
|
@ -90,7 +90,7 @@ func (ctrl *V1Controller) HandleItemDelete() http.HandlerFunc {
|
||||||
// @Tags Items
|
// @Tags Items
|
||||||
// @Produce json
|
// @Produce json
|
||||||
// @Param id path string true "Item ID"
|
// @Param id path string true "Item ID"
|
||||||
// @Success 200 {object} types.ItemOut
|
// @Success 200 {object} types.ItemOut
|
||||||
// @Router /v1/items/{id} [GET]
|
// @Router /v1/items/{id} [GET]
|
||||||
// @Security Bearer
|
// @Security Bearer
|
||||||
func (ctrl *V1Controller) HandleItemGet() http.HandlerFunc {
|
func (ctrl *V1Controller) HandleItemGet() http.HandlerFunc {
|
||||||
|
@ -115,6 +115,7 @@ func (ctrl *V1Controller) HandleItemGet() http.HandlerFunc {
|
||||||
// @Tags Items
|
// @Tags Items
|
||||||
// @Produce json
|
// @Produce json
|
||||||
// @Param id path string true "Item ID"
|
// @Param id path string true "Item ID"
|
||||||
|
// @Param payload body types.ItemUpdate true "Item Data"
|
||||||
// @Success 200 {object} types.ItemOut
|
// @Success 200 {object} types.ItemOut
|
||||||
// @Router /v1/items/{id} [PUT]
|
// @Router /v1/items/{id} [PUT]
|
||||||
// @Security Bearer
|
// @Security Bearer
|
||||||
|
|
|
@ -41,7 +41,7 @@ func (ctrl *V1Controller) HandleUserRegistration() http.HandlerFunc {
|
||||||
// @Summary Get the current user
|
// @Summary Get the current user
|
||||||
// @Tags User
|
// @Tags User
|
||||||
// @Produce json
|
// @Produce json
|
||||||
// @Success 200 {object} server.Result{item=ent.User}
|
// @Success 200 {object} server.Result{item=types.UserOut}
|
||||||
// @Router /v1/users/self [GET]
|
// @Router /v1/users/self [GET]
|
||||||
// @Security Bearer
|
// @Security Bearer
|
||||||
func (ctrl *V1Controller) HandleUserSelf() http.HandlerFunc {
|
func (ctrl *V1Controller) HandleUserSelf() http.HandlerFunc {
|
||||||
|
|
197
backend/ent/attachment.go
Normal file
197
backend/ent/attachment.go
Normal file
|
@ -0,0 +1,197 @@
|
||||||
|
// Code generated by ent, DO NOT EDIT.
|
||||||
|
|
||||||
|
package ent
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"entgo.io/ent/dialect/sql"
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/hay-kot/content/backend/ent/attachment"
|
||||||
|
"github.com/hay-kot/content/backend/ent/document"
|
||||||
|
"github.com/hay-kot/content/backend/ent/item"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Attachment is the model entity for the Attachment schema.
|
||||||
|
type Attachment struct {
|
||||||
|
config `json:"-"`
|
||||||
|
// ID of the ent.
|
||||||
|
ID uuid.UUID `json:"id,omitempty"`
|
||||||
|
// CreatedAt holds the value of the "created_at" field.
|
||||||
|
CreatedAt time.Time `json:"created_at,omitempty"`
|
||||||
|
// UpdatedAt holds the value of the "updated_at" field.
|
||||||
|
UpdatedAt time.Time `json:"updated_at,omitempty"`
|
||||||
|
// Type holds the value of the "type" field.
|
||||||
|
Type attachment.Type `json:"type,omitempty"`
|
||||||
|
// Edges holds the relations/edges for other nodes in the graph.
|
||||||
|
// The values are being populated by the AttachmentQuery when eager-loading is set.
|
||||||
|
Edges AttachmentEdges `json:"edges"`
|
||||||
|
document_attachments *uuid.UUID
|
||||||
|
item_attachments *uuid.UUID
|
||||||
|
}
|
||||||
|
|
||||||
|
// AttachmentEdges holds the relations/edges for other nodes in the graph.
|
||||||
|
type AttachmentEdges struct {
|
||||||
|
// Item holds the value of the item edge.
|
||||||
|
Item *Item `json:"item,omitempty"`
|
||||||
|
// Document holds the value of the document edge.
|
||||||
|
Document *Document `json:"document,omitempty"`
|
||||||
|
// loadedTypes holds the information for reporting if a
|
||||||
|
// type was loaded (or requested) in eager-loading or not.
|
||||||
|
loadedTypes [2]bool
|
||||||
|
}
|
||||||
|
|
||||||
|
// ItemOrErr returns the Item value or an error if the edge
|
||||||
|
// was not loaded in eager-loading, or loaded but was not found.
|
||||||
|
func (e AttachmentEdges) ItemOrErr() (*Item, error) {
|
||||||
|
if e.loadedTypes[0] {
|
||||||
|
if e.Item == nil {
|
||||||
|
// Edge was loaded but was not found.
|
||||||
|
return nil, &NotFoundError{label: item.Label}
|
||||||
|
}
|
||||||
|
return e.Item, nil
|
||||||
|
}
|
||||||
|
return nil, &NotLoadedError{edge: "item"}
|
||||||
|
}
|
||||||
|
|
||||||
|
// DocumentOrErr returns the Document value or an error if the edge
|
||||||
|
// was not loaded in eager-loading, or loaded but was not found.
|
||||||
|
func (e AttachmentEdges) DocumentOrErr() (*Document, error) {
|
||||||
|
if e.loadedTypes[1] {
|
||||||
|
if e.Document == nil {
|
||||||
|
// Edge was loaded but was not found.
|
||||||
|
return nil, &NotFoundError{label: document.Label}
|
||||||
|
}
|
||||||
|
return e.Document, nil
|
||||||
|
}
|
||||||
|
return nil, &NotLoadedError{edge: "document"}
|
||||||
|
}
|
||||||
|
|
||||||
|
// scanValues returns the types for scanning values from sql.Rows.
|
||||||
|
func (*Attachment) scanValues(columns []string) ([]interface{}, error) {
|
||||||
|
values := make([]interface{}, len(columns))
|
||||||
|
for i := range columns {
|
||||||
|
switch columns[i] {
|
||||||
|
case attachment.FieldType:
|
||||||
|
values[i] = new(sql.NullString)
|
||||||
|
case attachment.FieldCreatedAt, attachment.FieldUpdatedAt:
|
||||||
|
values[i] = new(sql.NullTime)
|
||||||
|
case attachment.FieldID:
|
||||||
|
values[i] = new(uuid.UUID)
|
||||||
|
case attachment.ForeignKeys[0]: // document_attachments
|
||||||
|
values[i] = &sql.NullScanner{S: new(uuid.UUID)}
|
||||||
|
case attachment.ForeignKeys[1]: // item_attachments
|
||||||
|
values[i] = &sql.NullScanner{S: new(uuid.UUID)}
|
||||||
|
default:
|
||||||
|
return nil, fmt.Errorf("unexpected column %q for type Attachment", columns[i])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return values, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// assignValues assigns the values that were returned from sql.Rows (after scanning)
|
||||||
|
// to the Attachment fields.
|
||||||
|
func (a *Attachment) assignValues(columns []string, values []interface{}) error {
|
||||||
|
if m, n := len(values), len(columns); m < n {
|
||||||
|
return fmt.Errorf("mismatch number of scan values: %d != %d", m, n)
|
||||||
|
}
|
||||||
|
for i := range columns {
|
||||||
|
switch columns[i] {
|
||||||
|
case attachment.FieldID:
|
||||||
|
if value, ok := values[i].(*uuid.UUID); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field id", values[i])
|
||||||
|
} else if value != nil {
|
||||||
|
a.ID = *value
|
||||||
|
}
|
||||||
|
case attachment.FieldCreatedAt:
|
||||||
|
if value, ok := values[i].(*sql.NullTime); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field created_at", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
a.CreatedAt = value.Time
|
||||||
|
}
|
||||||
|
case attachment.FieldUpdatedAt:
|
||||||
|
if value, ok := values[i].(*sql.NullTime); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field updated_at", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
a.UpdatedAt = value.Time
|
||||||
|
}
|
||||||
|
case attachment.FieldType:
|
||||||
|
if value, ok := values[i].(*sql.NullString); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field type", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
a.Type = attachment.Type(value.String)
|
||||||
|
}
|
||||||
|
case attachment.ForeignKeys[0]:
|
||||||
|
if value, ok := values[i].(*sql.NullScanner); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field document_attachments", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
a.document_attachments = new(uuid.UUID)
|
||||||
|
*a.document_attachments = *value.S.(*uuid.UUID)
|
||||||
|
}
|
||||||
|
case attachment.ForeignKeys[1]:
|
||||||
|
if value, ok := values[i].(*sql.NullScanner); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field item_attachments", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
a.item_attachments = new(uuid.UUID)
|
||||||
|
*a.item_attachments = *value.S.(*uuid.UUID)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// QueryItem queries the "item" edge of the Attachment entity.
|
||||||
|
func (a *Attachment) QueryItem() *ItemQuery {
|
||||||
|
return (&AttachmentClient{config: a.config}).QueryItem(a)
|
||||||
|
}
|
||||||
|
|
||||||
|
// QueryDocument queries the "document" edge of the Attachment entity.
|
||||||
|
func (a *Attachment) QueryDocument() *DocumentQuery {
|
||||||
|
return (&AttachmentClient{config: a.config}).QueryDocument(a)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update returns a builder for updating this Attachment.
|
||||||
|
// Note that you need to call Attachment.Unwrap() before calling this method if this Attachment
|
||||||
|
// was returned from a transaction, and the transaction was committed or rolled back.
|
||||||
|
func (a *Attachment) Update() *AttachmentUpdateOne {
|
||||||
|
return (&AttachmentClient{config: a.config}).UpdateOne(a)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Unwrap unwraps the Attachment entity that was returned from a transaction after it was closed,
|
||||||
|
// so that all future queries will be executed through the driver which created the transaction.
|
||||||
|
func (a *Attachment) Unwrap() *Attachment {
|
||||||
|
_tx, ok := a.config.driver.(*txDriver)
|
||||||
|
if !ok {
|
||||||
|
panic("ent: Attachment is not a transactional entity")
|
||||||
|
}
|
||||||
|
a.config.driver = _tx.drv
|
||||||
|
return a
|
||||||
|
}
|
||||||
|
|
||||||
|
// String implements the fmt.Stringer.
|
||||||
|
func (a *Attachment) String() string {
|
||||||
|
var builder strings.Builder
|
||||||
|
builder.WriteString("Attachment(")
|
||||||
|
builder.WriteString(fmt.Sprintf("id=%v, ", a.ID))
|
||||||
|
builder.WriteString("created_at=")
|
||||||
|
builder.WriteString(a.CreatedAt.Format(time.ANSIC))
|
||||||
|
builder.WriteString(", ")
|
||||||
|
builder.WriteString("updated_at=")
|
||||||
|
builder.WriteString(a.UpdatedAt.Format(time.ANSIC))
|
||||||
|
builder.WriteString(", ")
|
||||||
|
builder.WriteString("type=")
|
||||||
|
builder.WriteString(fmt.Sprintf("%v", a.Type))
|
||||||
|
builder.WriteByte(')')
|
||||||
|
return builder.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Attachments is a parsable slice of Attachment.
|
||||||
|
type Attachments []*Attachment
|
||||||
|
|
||||||
|
func (a Attachments) config(cfg config) {
|
||||||
|
for _i := range a {
|
||||||
|
a[_i].config = cfg
|
||||||
|
}
|
||||||
|
}
|
112
backend/ent/attachment/attachment.go
Normal file
112
backend/ent/attachment/attachment.go
Normal file
|
@ -0,0 +1,112 @@
|
||||||
|
// Code generated by ent, DO NOT EDIT.
|
||||||
|
|
||||||
|
package attachment
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
// Label holds the string label denoting the attachment type in the database.
|
||||||
|
Label = "attachment"
|
||||||
|
// FieldID holds the string denoting the id field in the database.
|
||||||
|
FieldID = "id"
|
||||||
|
// FieldCreatedAt holds the string denoting the created_at field in the database.
|
||||||
|
FieldCreatedAt = "created_at"
|
||||||
|
// FieldUpdatedAt holds the string denoting the updated_at field in the database.
|
||||||
|
FieldUpdatedAt = "updated_at"
|
||||||
|
// FieldType holds the string denoting the type field in the database.
|
||||||
|
FieldType = "type"
|
||||||
|
// EdgeItem holds the string denoting the item edge name in mutations.
|
||||||
|
EdgeItem = "item"
|
||||||
|
// EdgeDocument holds the string denoting the document edge name in mutations.
|
||||||
|
EdgeDocument = "document"
|
||||||
|
// Table holds the table name of the attachment in the database.
|
||||||
|
Table = "attachments"
|
||||||
|
// ItemTable is the table that holds the item relation/edge.
|
||||||
|
ItemTable = "attachments"
|
||||||
|
// ItemInverseTable is the table name for the Item entity.
|
||||||
|
// It exists in this package in order to avoid circular dependency with the "item" package.
|
||||||
|
ItemInverseTable = "items"
|
||||||
|
// ItemColumn is the table column denoting the item relation/edge.
|
||||||
|
ItemColumn = "item_attachments"
|
||||||
|
// DocumentTable is the table that holds the document relation/edge.
|
||||||
|
DocumentTable = "attachments"
|
||||||
|
// DocumentInverseTable is the table name for the Document entity.
|
||||||
|
// It exists in this package in order to avoid circular dependency with the "document" package.
|
||||||
|
DocumentInverseTable = "documents"
|
||||||
|
// DocumentColumn is the table column denoting the document relation/edge.
|
||||||
|
DocumentColumn = "document_attachments"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Columns holds all SQL columns for attachment fields.
|
||||||
|
var Columns = []string{
|
||||||
|
FieldID,
|
||||||
|
FieldCreatedAt,
|
||||||
|
FieldUpdatedAt,
|
||||||
|
FieldType,
|
||||||
|
}
|
||||||
|
|
||||||
|
// ForeignKeys holds the SQL foreign-keys that are owned by the "attachments"
|
||||||
|
// table and are not defined as standalone fields in the schema.
|
||||||
|
var ForeignKeys = []string{
|
||||||
|
"document_attachments",
|
||||||
|
"item_attachments",
|
||||||
|
}
|
||||||
|
|
||||||
|
// ValidColumn reports if the column name is valid (part of the table columns).
|
||||||
|
func ValidColumn(column string) bool {
|
||||||
|
for i := range Columns {
|
||||||
|
if column == Columns[i] {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for i := range ForeignKeys {
|
||||||
|
if column == ForeignKeys[i] {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
// DefaultCreatedAt holds the default value on creation for the "created_at" field.
|
||||||
|
DefaultCreatedAt func() time.Time
|
||||||
|
// DefaultUpdatedAt holds the default value on creation for the "updated_at" field.
|
||||||
|
DefaultUpdatedAt func() time.Time
|
||||||
|
// UpdateDefaultUpdatedAt holds the default value on update for the "updated_at" field.
|
||||||
|
UpdateDefaultUpdatedAt func() time.Time
|
||||||
|
// DefaultID holds the default value on creation for the "id" field.
|
||||||
|
DefaultID func() uuid.UUID
|
||||||
|
)
|
||||||
|
|
||||||
|
// Type defines the type for the "type" enum field.
|
||||||
|
type Type string
|
||||||
|
|
||||||
|
// TypeAttachment is the default value of the Type enum.
|
||||||
|
const DefaultType = TypeAttachment
|
||||||
|
|
||||||
|
// Type values.
|
||||||
|
const (
|
||||||
|
TypePhoto Type = "photo"
|
||||||
|
TypeManual Type = "manual"
|
||||||
|
TypeWarranty Type = "warranty"
|
||||||
|
TypeAttachment Type = "attachment"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (_type Type) String() string {
|
||||||
|
return string(_type)
|
||||||
|
}
|
||||||
|
|
||||||
|
// TypeValidator is a validator for the "type" field enum values. It is called by the builders before save.
|
||||||
|
func TypeValidator(_type Type) error {
|
||||||
|
switch _type {
|
||||||
|
case TypePhoto, TypeManual, TypeWarranty, TypeAttachment:
|
||||||
|
return nil
|
||||||
|
default:
|
||||||
|
return fmt.Errorf("attachment: invalid enum value for type field: %q", _type)
|
||||||
|
}
|
||||||
|
}
|
349
backend/ent/attachment/where.go
Normal file
349
backend/ent/attachment/where.go
Normal file
|
@ -0,0 +1,349 @@
|
||||||
|
// Code generated by ent, DO NOT EDIT.
|
||||||
|
|
||||||
|
package attachment
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"entgo.io/ent/dialect/sql"
|
||||||
|
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/hay-kot/content/backend/ent/predicate"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ID filters vertices based on their ID field.
|
||||||
|
func ID(id uuid.UUID) predicate.Attachment {
|
||||||
|
return predicate.Attachment(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.EQ(s.C(FieldID), id))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDEQ applies the EQ predicate on the ID field.
|
||||||
|
func IDEQ(id uuid.UUID) predicate.Attachment {
|
||||||
|
return predicate.Attachment(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.EQ(s.C(FieldID), id))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDNEQ applies the NEQ predicate on the ID field.
|
||||||
|
func IDNEQ(id uuid.UUID) predicate.Attachment {
|
||||||
|
return predicate.Attachment(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.NEQ(s.C(FieldID), id))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDIn applies the In predicate on the ID field.
|
||||||
|
func IDIn(ids ...uuid.UUID) predicate.Attachment {
|
||||||
|
return predicate.Attachment(func(s *sql.Selector) {
|
||||||
|
v := make([]interface{}, len(ids))
|
||||||
|
for i := range v {
|
||||||
|
v[i] = ids[i]
|
||||||
|
}
|
||||||
|
s.Where(sql.In(s.C(FieldID), v...))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDNotIn applies the NotIn predicate on the ID field.
|
||||||
|
func IDNotIn(ids ...uuid.UUID) predicate.Attachment {
|
||||||
|
return predicate.Attachment(func(s *sql.Selector) {
|
||||||
|
v := make([]interface{}, len(ids))
|
||||||
|
for i := range v {
|
||||||
|
v[i] = ids[i]
|
||||||
|
}
|
||||||
|
s.Where(sql.NotIn(s.C(FieldID), v...))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDGT applies the GT predicate on the ID field.
|
||||||
|
func IDGT(id uuid.UUID) predicate.Attachment {
|
||||||
|
return predicate.Attachment(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.GT(s.C(FieldID), id))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDGTE applies the GTE predicate on the ID field.
|
||||||
|
func IDGTE(id uuid.UUID) predicate.Attachment {
|
||||||
|
return predicate.Attachment(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.GTE(s.C(FieldID), id))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDLT applies the LT predicate on the ID field.
|
||||||
|
func IDLT(id uuid.UUID) predicate.Attachment {
|
||||||
|
return predicate.Attachment(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.LT(s.C(FieldID), id))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDLTE applies the LTE predicate on the ID field.
|
||||||
|
func IDLTE(id uuid.UUID) predicate.Attachment {
|
||||||
|
return predicate.Attachment(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.LTE(s.C(FieldID), id))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatedAt applies equality check predicate on the "created_at" field. It's identical to CreatedAtEQ.
|
||||||
|
func CreatedAt(v time.Time) predicate.Attachment {
|
||||||
|
return predicate.Attachment(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.EQ(s.C(FieldCreatedAt), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdatedAt applies equality check predicate on the "updated_at" field. It's identical to UpdatedAtEQ.
|
||||||
|
func UpdatedAt(v time.Time) predicate.Attachment {
|
||||||
|
return predicate.Attachment(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.EQ(s.C(FieldUpdatedAt), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatedAtEQ applies the EQ predicate on the "created_at" field.
|
||||||
|
func CreatedAtEQ(v time.Time) predicate.Attachment {
|
||||||
|
return predicate.Attachment(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.EQ(s.C(FieldCreatedAt), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatedAtNEQ applies the NEQ predicate on the "created_at" field.
|
||||||
|
func CreatedAtNEQ(v time.Time) predicate.Attachment {
|
||||||
|
return predicate.Attachment(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.NEQ(s.C(FieldCreatedAt), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatedAtIn applies the In predicate on the "created_at" field.
|
||||||
|
func CreatedAtIn(vs ...time.Time) predicate.Attachment {
|
||||||
|
v := make([]interface{}, len(vs))
|
||||||
|
for i := range v {
|
||||||
|
v[i] = vs[i]
|
||||||
|
}
|
||||||
|
return predicate.Attachment(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.In(s.C(FieldCreatedAt), v...))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatedAtNotIn applies the NotIn predicate on the "created_at" field.
|
||||||
|
func CreatedAtNotIn(vs ...time.Time) predicate.Attachment {
|
||||||
|
v := make([]interface{}, len(vs))
|
||||||
|
for i := range v {
|
||||||
|
v[i] = vs[i]
|
||||||
|
}
|
||||||
|
return predicate.Attachment(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.NotIn(s.C(FieldCreatedAt), v...))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatedAtGT applies the GT predicate on the "created_at" field.
|
||||||
|
func CreatedAtGT(v time.Time) predicate.Attachment {
|
||||||
|
return predicate.Attachment(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.GT(s.C(FieldCreatedAt), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatedAtGTE applies the GTE predicate on the "created_at" field.
|
||||||
|
func CreatedAtGTE(v time.Time) predicate.Attachment {
|
||||||
|
return predicate.Attachment(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.GTE(s.C(FieldCreatedAt), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatedAtLT applies the LT predicate on the "created_at" field.
|
||||||
|
func CreatedAtLT(v time.Time) predicate.Attachment {
|
||||||
|
return predicate.Attachment(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.LT(s.C(FieldCreatedAt), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatedAtLTE applies the LTE predicate on the "created_at" field.
|
||||||
|
func CreatedAtLTE(v time.Time) predicate.Attachment {
|
||||||
|
return predicate.Attachment(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.LTE(s.C(FieldCreatedAt), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdatedAtEQ applies the EQ predicate on the "updated_at" field.
|
||||||
|
func UpdatedAtEQ(v time.Time) predicate.Attachment {
|
||||||
|
return predicate.Attachment(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.EQ(s.C(FieldUpdatedAt), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdatedAtNEQ applies the NEQ predicate on the "updated_at" field.
|
||||||
|
func UpdatedAtNEQ(v time.Time) predicate.Attachment {
|
||||||
|
return predicate.Attachment(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.NEQ(s.C(FieldUpdatedAt), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdatedAtIn applies the In predicate on the "updated_at" field.
|
||||||
|
func UpdatedAtIn(vs ...time.Time) predicate.Attachment {
|
||||||
|
v := make([]interface{}, len(vs))
|
||||||
|
for i := range v {
|
||||||
|
v[i] = vs[i]
|
||||||
|
}
|
||||||
|
return predicate.Attachment(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.In(s.C(FieldUpdatedAt), v...))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdatedAtNotIn applies the NotIn predicate on the "updated_at" field.
|
||||||
|
func UpdatedAtNotIn(vs ...time.Time) predicate.Attachment {
|
||||||
|
v := make([]interface{}, len(vs))
|
||||||
|
for i := range v {
|
||||||
|
v[i] = vs[i]
|
||||||
|
}
|
||||||
|
return predicate.Attachment(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.NotIn(s.C(FieldUpdatedAt), v...))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdatedAtGT applies the GT predicate on the "updated_at" field.
|
||||||
|
func UpdatedAtGT(v time.Time) predicate.Attachment {
|
||||||
|
return predicate.Attachment(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.GT(s.C(FieldUpdatedAt), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdatedAtGTE applies the GTE predicate on the "updated_at" field.
|
||||||
|
func UpdatedAtGTE(v time.Time) predicate.Attachment {
|
||||||
|
return predicate.Attachment(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.GTE(s.C(FieldUpdatedAt), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdatedAtLT applies the LT predicate on the "updated_at" field.
|
||||||
|
func UpdatedAtLT(v time.Time) predicate.Attachment {
|
||||||
|
return predicate.Attachment(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.LT(s.C(FieldUpdatedAt), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdatedAtLTE applies the LTE predicate on the "updated_at" field.
|
||||||
|
func UpdatedAtLTE(v time.Time) predicate.Attachment {
|
||||||
|
return predicate.Attachment(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.LTE(s.C(FieldUpdatedAt), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// TypeEQ applies the EQ predicate on the "type" field.
|
||||||
|
func TypeEQ(v Type) predicate.Attachment {
|
||||||
|
return predicate.Attachment(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.EQ(s.C(FieldType), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// TypeNEQ applies the NEQ predicate on the "type" field.
|
||||||
|
func TypeNEQ(v Type) predicate.Attachment {
|
||||||
|
return predicate.Attachment(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.NEQ(s.C(FieldType), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// TypeIn applies the In predicate on the "type" field.
|
||||||
|
func TypeIn(vs ...Type) predicate.Attachment {
|
||||||
|
v := make([]interface{}, len(vs))
|
||||||
|
for i := range v {
|
||||||
|
v[i] = vs[i]
|
||||||
|
}
|
||||||
|
return predicate.Attachment(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.In(s.C(FieldType), v...))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// TypeNotIn applies the NotIn predicate on the "type" field.
|
||||||
|
func TypeNotIn(vs ...Type) predicate.Attachment {
|
||||||
|
v := make([]interface{}, len(vs))
|
||||||
|
for i := range v {
|
||||||
|
v[i] = vs[i]
|
||||||
|
}
|
||||||
|
return predicate.Attachment(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.NotIn(s.C(FieldType), v...))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// HasItem applies the HasEdge predicate on the "item" edge.
|
||||||
|
func HasItem() predicate.Attachment {
|
||||||
|
return predicate.Attachment(func(s *sql.Selector) {
|
||||||
|
step := sqlgraph.NewStep(
|
||||||
|
sqlgraph.From(Table, FieldID),
|
||||||
|
sqlgraph.To(ItemTable, FieldID),
|
||||||
|
sqlgraph.Edge(sqlgraph.M2O, true, ItemTable, ItemColumn),
|
||||||
|
)
|
||||||
|
sqlgraph.HasNeighbors(s, step)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// HasItemWith applies the HasEdge predicate on the "item" edge with a given conditions (other predicates).
|
||||||
|
func HasItemWith(preds ...predicate.Item) predicate.Attachment {
|
||||||
|
return predicate.Attachment(func(s *sql.Selector) {
|
||||||
|
step := sqlgraph.NewStep(
|
||||||
|
sqlgraph.From(Table, FieldID),
|
||||||
|
sqlgraph.To(ItemInverseTable, FieldID),
|
||||||
|
sqlgraph.Edge(sqlgraph.M2O, true, ItemTable, ItemColumn),
|
||||||
|
)
|
||||||
|
sqlgraph.HasNeighborsWith(s, step, func(s *sql.Selector) {
|
||||||
|
for _, p := range preds {
|
||||||
|
p(s)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// HasDocument applies the HasEdge predicate on the "document" edge.
|
||||||
|
func HasDocument() predicate.Attachment {
|
||||||
|
return predicate.Attachment(func(s *sql.Selector) {
|
||||||
|
step := sqlgraph.NewStep(
|
||||||
|
sqlgraph.From(Table, FieldID),
|
||||||
|
sqlgraph.To(DocumentTable, FieldID),
|
||||||
|
sqlgraph.Edge(sqlgraph.M2O, true, DocumentTable, DocumentColumn),
|
||||||
|
)
|
||||||
|
sqlgraph.HasNeighbors(s, step)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// HasDocumentWith applies the HasEdge predicate on the "document" edge with a given conditions (other predicates).
|
||||||
|
func HasDocumentWith(preds ...predicate.Document) predicate.Attachment {
|
||||||
|
return predicate.Attachment(func(s *sql.Selector) {
|
||||||
|
step := sqlgraph.NewStep(
|
||||||
|
sqlgraph.From(Table, FieldID),
|
||||||
|
sqlgraph.To(DocumentInverseTable, FieldID),
|
||||||
|
sqlgraph.Edge(sqlgraph.M2O, true, DocumentTable, DocumentColumn),
|
||||||
|
)
|
||||||
|
sqlgraph.HasNeighborsWith(s, step, func(s *sql.Selector) {
|
||||||
|
for _, p := range preds {
|
||||||
|
p(s)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// And groups predicates with the AND operator between them.
|
||||||
|
func And(predicates ...predicate.Attachment) predicate.Attachment {
|
||||||
|
return predicate.Attachment(func(s *sql.Selector) {
|
||||||
|
s1 := s.Clone().SetP(nil)
|
||||||
|
for _, p := range predicates {
|
||||||
|
p(s1)
|
||||||
|
}
|
||||||
|
s.Where(s1.P())
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Or groups predicates with the OR operator between them.
|
||||||
|
func Or(predicates ...predicate.Attachment) predicate.Attachment {
|
||||||
|
return predicate.Attachment(func(s *sql.Selector) {
|
||||||
|
s1 := s.Clone().SetP(nil)
|
||||||
|
for i, p := range predicates {
|
||||||
|
if i > 0 {
|
||||||
|
s1.Or()
|
||||||
|
}
|
||||||
|
p(s1)
|
||||||
|
}
|
||||||
|
s.Where(s1.P())
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Not applies the not operator on the given predicate.
|
||||||
|
func Not(p predicate.Attachment) predicate.Attachment {
|
||||||
|
return predicate.Attachment(func(s *sql.Selector) {
|
||||||
|
p(s.Not())
|
||||||
|
})
|
||||||
|
}
|
402
backend/ent/attachment_create.go
Normal file
402
backend/ent/attachment_create.go
Normal file
|
@ -0,0 +1,402 @@
|
||||||
|
// Code generated by ent, DO NOT EDIT.
|
||||||
|
|
||||||
|
package ent
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||||
|
"entgo.io/ent/schema/field"
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/hay-kot/content/backend/ent/attachment"
|
||||||
|
"github.com/hay-kot/content/backend/ent/document"
|
||||||
|
"github.com/hay-kot/content/backend/ent/item"
|
||||||
|
)
|
||||||
|
|
||||||
|
// AttachmentCreate is the builder for creating a Attachment entity.
|
||||||
|
type AttachmentCreate struct {
|
||||||
|
config
|
||||||
|
mutation *AttachmentMutation
|
||||||
|
hooks []Hook
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetCreatedAt sets the "created_at" field.
|
||||||
|
func (ac *AttachmentCreate) SetCreatedAt(t time.Time) *AttachmentCreate {
|
||||||
|
ac.mutation.SetCreatedAt(t)
|
||||||
|
return ac
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableCreatedAt sets the "created_at" field if the given value is not nil.
|
||||||
|
func (ac *AttachmentCreate) SetNillableCreatedAt(t *time.Time) *AttachmentCreate {
|
||||||
|
if t != nil {
|
||||||
|
ac.SetCreatedAt(*t)
|
||||||
|
}
|
||||||
|
return ac
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetUpdatedAt sets the "updated_at" field.
|
||||||
|
func (ac *AttachmentCreate) SetUpdatedAt(t time.Time) *AttachmentCreate {
|
||||||
|
ac.mutation.SetUpdatedAt(t)
|
||||||
|
return ac
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableUpdatedAt sets the "updated_at" field if the given value is not nil.
|
||||||
|
func (ac *AttachmentCreate) SetNillableUpdatedAt(t *time.Time) *AttachmentCreate {
|
||||||
|
if t != nil {
|
||||||
|
ac.SetUpdatedAt(*t)
|
||||||
|
}
|
||||||
|
return ac
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetType sets the "type" field.
|
||||||
|
func (ac *AttachmentCreate) SetType(a attachment.Type) *AttachmentCreate {
|
||||||
|
ac.mutation.SetType(a)
|
||||||
|
return ac
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableType sets the "type" field if the given value is not nil.
|
||||||
|
func (ac *AttachmentCreate) SetNillableType(a *attachment.Type) *AttachmentCreate {
|
||||||
|
if a != nil {
|
||||||
|
ac.SetType(*a)
|
||||||
|
}
|
||||||
|
return ac
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetID sets the "id" field.
|
||||||
|
func (ac *AttachmentCreate) SetID(u uuid.UUID) *AttachmentCreate {
|
||||||
|
ac.mutation.SetID(u)
|
||||||
|
return ac
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableID sets the "id" field if the given value is not nil.
|
||||||
|
func (ac *AttachmentCreate) SetNillableID(u *uuid.UUID) *AttachmentCreate {
|
||||||
|
if u != nil {
|
||||||
|
ac.SetID(*u)
|
||||||
|
}
|
||||||
|
return ac
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetItemID sets the "item" edge to the Item entity by ID.
|
||||||
|
func (ac *AttachmentCreate) SetItemID(id uuid.UUID) *AttachmentCreate {
|
||||||
|
ac.mutation.SetItemID(id)
|
||||||
|
return ac
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetItem sets the "item" edge to the Item entity.
|
||||||
|
func (ac *AttachmentCreate) SetItem(i *Item) *AttachmentCreate {
|
||||||
|
return ac.SetItemID(i.ID)
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetDocumentID sets the "document" edge to the Document entity by ID.
|
||||||
|
func (ac *AttachmentCreate) SetDocumentID(id uuid.UUID) *AttachmentCreate {
|
||||||
|
ac.mutation.SetDocumentID(id)
|
||||||
|
return ac
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetDocument sets the "document" edge to the Document entity.
|
||||||
|
func (ac *AttachmentCreate) SetDocument(d *Document) *AttachmentCreate {
|
||||||
|
return ac.SetDocumentID(d.ID)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Mutation returns the AttachmentMutation object of the builder.
|
||||||
|
func (ac *AttachmentCreate) Mutation() *AttachmentMutation {
|
||||||
|
return ac.mutation
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save creates the Attachment in the database.
|
||||||
|
func (ac *AttachmentCreate) Save(ctx context.Context) (*Attachment, error) {
|
||||||
|
var (
|
||||||
|
err error
|
||||||
|
node *Attachment
|
||||||
|
)
|
||||||
|
ac.defaults()
|
||||||
|
if len(ac.hooks) == 0 {
|
||||||
|
if err = ac.check(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
node, err = ac.sqlSave(ctx)
|
||||||
|
} else {
|
||||||
|
var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) {
|
||||||
|
mutation, ok := m.(*AttachmentMutation)
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("unexpected mutation type %T", m)
|
||||||
|
}
|
||||||
|
if err = ac.check(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
ac.mutation = mutation
|
||||||
|
if node, err = ac.sqlSave(ctx); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
mutation.id = &node.ID
|
||||||
|
mutation.done = true
|
||||||
|
return node, err
|
||||||
|
})
|
||||||
|
for i := len(ac.hooks) - 1; i >= 0; i-- {
|
||||||
|
if ac.hooks[i] == nil {
|
||||||
|
return nil, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)")
|
||||||
|
}
|
||||||
|
mut = ac.hooks[i](mut)
|
||||||
|
}
|
||||||
|
v, err := mut.Mutate(ctx, ac.mutation)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
nv, ok := v.(*Attachment)
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("unexpected node type %T returned from AttachmentMutation", v)
|
||||||
|
}
|
||||||
|
node = nv
|
||||||
|
}
|
||||||
|
return node, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// SaveX calls Save and panics if Save returns an error.
|
||||||
|
func (ac *AttachmentCreate) SaveX(ctx context.Context) *Attachment {
|
||||||
|
v, err := ac.Save(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return v
|
||||||
|
}
|
||||||
|
|
||||||
|
// Exec executes the query.
|
||||||
|
func (ac *AttachmentCreate) Exec(ctx context.Context) error {
|
||||||
|
_, err := ac.Save(ctx)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExecX is like Exec, but panics if an error occurs.
|
||||||
|
func (ac *AttachmentCreate) ExecX(ctx context.Context) {
|
||||||
|
if err := ac.Exec(ctx); err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// defaults sets the default values of the builder before save.
|
||||||
|
func (ac *AttachmentCreate) defaults() {
|
||||||
|
if _, ok := ac.mutation.CreatedAt(); !ok {
|
||||||
|
v := attachment.DefaultCreatedAt()
|
||||||
|
ac.mutation.SetCreatedAt(v)
|
||||||
|
}
|
||||||
|
if _, ok := ac.mutation.UpdatedAt(); !ok {
|
||||||
|
v := attachment.DefaultUpdatedAt()
|
||||||
|
ac.mutation.SetUpdatedAt(v)
|
||||||
|
}
|
||||||
|
if _, ok := ac.mutation.GetType(); !ok {
|
||||||
|
v := attachment.DefaultType
|
||||||
|
ac.mutation.SetType(v)
|
||||||
|
}
|
||||||
|
if _, ok := ac.mutation.ID(); !ok {
|
||||||
|
v := attachment.DefaultID()
|
||||||
|
ac.mutation.SetID(v)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// check runs all checks and user-defined validators on the builder.
|
||||||
|
func (ac *AttachmentCreate) check() error {
|
||||||
|
if _, ok := ac.mutation.CreatedAt(); !ok {
|
||||||
|
return &ValidationError{Name: "created_at", err: errors.New(`ent: missing required field "Attachment.created_at"`)}
|
||||||
|
}
|
||||||
|
if _, ok := ac.mutation.UpdatedAt(); !ok {
|
||||||
|
return &ValidationError{Name: "updated_at", err: errors.New(`ent: missing required field "Attachment.updated_at"`)}
|
||||||
|
}
|
||||||
|
if _, ok := ac.mutation.GetType(); !ok {
|
||||||
|
return &ValidationError{Name: "type", err: errors.New(`ent: missing required field "Attachment.type"`)}
|
||||||
|
}
|
||||||
|
if v, ok := ac.mutation.GetType(); ok {
|
||||||
|
if err := attachment.TypeValidator(v); err != nil {
|
||||||
|
return &ValidationError{Name: "type", err: fmt.Errorf(`ent: validator failed for field "Attachment.type": %w`, err)}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if _, ok := ac.mutation.ItemID(); !ok {
|
||||||
|
return &ValidationError{Name: "item", err: errors.New(`ent: missing required edge "Attachment.item"`)}
|
||||||
|
}
|
||||||
|
if _, ok := ac.mutation.DocumentID(); !ok {
|
||||||
|
return &ValidationError{Name: "document", err: errors.New(`ent: missing required edge "Attachment.document"`)}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ac *AttachmentCreate) sqlSave(ctx context.Context) (*Attachment, error) {
|
||||||
|
_node, _spec := ac.createSpec()
|
||||||
|
if err := sqlgraph.CreateNode(ctx, ac.driver, _spec); err != nil {
|
||||||
|
if sqlgraph.IsConstraintError(err) {
|
||||||
|
err = &ConstraintError{msg: err.Error(), wrap: err}
|
||||||
|
}
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if _spec.ID.Value != nil {
|
||||||
|
if id, ok := _spec.ID.Value.(*uuid.UUID); ok {
|
||||||
|
_node.ID = *id
|
||||||
|
} else if err := _node.ID.Scan(_spec.ID.Value); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return _node, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ac *AttachmentCreate) createSpec() (*Attachment, *sqlgraph.CreateSpec) {
|
||||||
|
var (
|
||||||
|
_node = &Attachment{config: ac.config}
|
||||||
|
_spec = &sqlgraph.CreateSpec{
|
||||||
|
Table: attachment.Table,
|
||||||
|
ID: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: attachment.FieldID,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
if id, ok := ac.mutation.ID(); ok {
|
||||||
|
_node.ID = id
|
||||||
|
_spec.ID.Value = &id
|
||||||
|
}
|
||||||
|
if value, ok := ac.mutation.CreatedAt(); ok {
|
||||||
|
_spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeTime,
|
||||||
|
Value: value,
|
||||||
|
Column: attachment.FieldCreatedAt,
|
||||||
|
})
|
||||||
|
_node.CreatedAt = value
|
||||||
|
}
|
||||||
|
if value, ok := ac.mutation.UpdatedAt(); ok {
|
||||||
|
_spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeTime,
|
||||||
|
Value: value,
|
||||||
|
Column: attachment.FieldUpdatedAt,
|
||||||
|
})
|
||||||
|
_node.UpdatedAt = value
|
||||||
|
}
|
||||||
|
if value, ok := ac.mutation.GetType(); ok {
|
||||||
|
_spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeEnum,
|
||||||
|
Value: value,
|
||||||
|
Column: attachment.FieldType,
|
||||||
|
})
|
||||||
|
_node.Type = value
|
||||||
|
}
|
||||||
|
if nodes := ac.mutation.ItemIDs(); len(nodes) > 0 {
|
||||||
|
edge := &sqlgraph.EdgeSpec{
|
||||||
|
Rel: sqlgraph.M2O,
|
||||||
|
Inverse: true,
|
||||||
|
Table: attachment.ItemTable,
|
||||||
|
Columns: []string{attachment.ItemColumn},
|
||||||
|
Bidi: false,
|
||||||
|
Target: &sqlgraph.EdgeTarget{
|
||||||
|
IDSpec: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: item.FieldID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, k := range nodes {
|
||||||
|
edge.Target.Nodes = append(edge.Target.Nodes, k)
|
||||||
|
}
|
||||||
|
_node.item_attachments = &nodes[0]
|
||||||
|
_spec.Edges = append(_spec.Edges, edge)
|
||||||
|
}
|
||||||
|
if nodes := ac.mutation.DocumentIDs(); len(nodes) > 0 {
|
||||||
|
edge := &sqlgraph.EdgeSpec{
|
||||||
|
Rel: sqlgraph.M2O,
|
||||||
|
Inverse: true,
|
||||||
|
Table: attachment.DocumentTable,
|
||||||
|
Columns: []string{attachment.DocumentColumn},
|
||||||
|
Bidi: false,
|
||||||
|
Target: &sqlgraph.EdgeTarget{
|
||||||
|
IDSpec: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: document.FieldID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, k := range nodes {
|
||||||
|
edge.Target.Nodes = append(edge.Target.Nodes, k)
|
||||||
|
}
|
||||||
|
_node.document_attachments = &nodes[0]
|
||||||
|
_spec.Edges = append(_spec.Edges, edge)
|
||||||
|
}
|
||||||
|
return _node, _spec
|
||||||
|
}
|
||||||
|
|
||||||
|
// AttachmentCreateBulk is the builder for creating many Attachment entities in bulk.
|
||||||
|
type AttachmentCreateBulk struct {
|
||||||
|
config
|
||||||
|
builders []*AttachmentCreate
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save creates the Attachment entities in the database.
|
||||||
|
func (acb *AttachmentCreateBulk) Save(ctx context.Context) ([]*Attachment, error) {
|
||||||
|
specs := make([]*sqlgraph.CreateSpec, len(acb.builders))
|
||||||
|
nodes := make([]*Attachment, len(acb.builders))
|
||||||
|
mutators := make([]Mutator, len(acb.builders))
|
||||||
|
for i := range acb.builders {
|
||||||
|
func(i int, root context.Context) {
|
||||||
|
builder := acb.builders[i]
|
||||||
|
builder.defaults()
|
||||||
|
var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) {
|
||||||
|
mutation, ok := m.(*AttachmentMutation)
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("unexpected mutation type %T", m)
|
||||||
|
}
|
||||||
|
if err := builder.check(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
builder.mutation = mutation
|
||||||
|
nodes[i], specs[i] = builder.createSpec()
|
||||||
|
var err error
|
||||||
|
if i < len(mutators)-1 {
|
||||||
|
_, err = mutators[i+1].Mutate(root, acb.builders[i+1].mutation)
|
||||||
|
} else {
|
||||||
|
spec := &sqlgraph.BatchCreateSpec{Nodes: specs}
|
||||||
|
// Invoke the actual operation on the latest mutation in the chain.
|
||||||
|
if err = sqlgraph.BatchCreate(ctx, acb.driver, spec); err != nil {
|
||||||
|
if sqlgraph.IsConstraintError(err) {
|
||||||
|
err = &ConstraintError{msg: err.Error(), wrap: err}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
mutation.id = &nodes[i].ID
|
||||||
|
mutation.done = true
|
||||||
|
return nodes[i], nil
|
||||||
|
})
|
||||||
|
for i := len(builder.hooks) - 1; i >= 0; i-- {
|
||||||
|
mut = builder.hooks[i](mut)
|
||||||
|
}
|
||||||
|
mutators[i] = mut
|
||||||
|
}(i, ctx)
|
||||||
|
}
|
||||||
|
if len(mutators) > 0 {
|
||||||
|
if _, err := mutators[0].Mutate(ctx, acb.builders[0].mutation); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nodes, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// SaveX is like Save, but panics if an error occurs.
|
||||||
|
func (acb *AttachmentCreateBulk) SaveX(ctx context.Context) []*Attachment {
|
||||||
|
v, err := acb.Save(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return v
|
||||||
|
}
|
||||||
|
|
||||||
|
// Exec executes the query.
|
||||||
|
func (acb *AttachmentCreateBulk) Exec(ctx context.Context) error {
|
||||||
|
_, err := acb.Save(ctx)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExecX is like Exec, but panics if an error occurs.
|
||||||
|
func (acb *AttachmentCreateBulk) ExecX(ctx context.Context) {
|
||||||
|
if err := acb.Exec(ctx); err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
115
backend/ent/attachment_delete.go
Normal file
115
backend/ent/attachment_delete.go
Normal file
|
@ -0,0 +1,115 @@
|
||||||
|
// Code generated by ent, DO NOT EDIT.
|
||||||
|
|
||||||
|
package ent
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"entgo.io/ent/dialect/sql"
|
||||||
|
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||||
|
"entgo.io/ent/schema/field"
|
||||||
|
"github.com/hay-kot/content/backend/ent/attachment"
|
||||||
|
"github.com/hay-kot/content/backend/ent/predicate"
|
||||||
|
)
|
||||||
|
|
||||||
|
// AttachmentDelete is the builder for deleting a Attachment entity.
|
||||||
|
type AttachmentDelete struct {
|
||||||
|
config
|
||||||
|
hooks []Hook
|
||||||
|
mutation *AttachmentMutation
|
||||||
|
}
|
||||||
|
|
||||||
|
// Where appends a list predicates to the AttachmentDelete builder.
|
||||||
|
func (ad *AttachmentDelete) Where(ps ...predicate.Attachment) *AttachmentDelete {
|
||||||
|
ad.mutation.Where(ps...)
|
||||||
|
return ad
|
||||||
|
}
|
||||||
|
|
||||||
|
// Exec executes the deletion query and returns how many vertices were deleted.
|
||||||
|
func (ad *AttachmentDelete) Exec(ctx context.Context) (int, error) {
|
||||||
|
var (
|
||||||
|
err error
|
||||||
|
affected int
|
||||||
|
)
|
||||||
|
if len(ad.hooks) == 0 {
|
||||||
|
affected, err = ad.sqlExec(ctx)
|
||||||
|
} else {
|
||||||
|
var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) {
|
||||||
|
mutation, ok := m.(*AttachmentMutation)
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("unexpected mutation type %T", m)
|
||||||
|
}
|
||||||
|
ad.mutation = mutation
|
||||||
|
affected, err = ad.sqlExec(ctx)
|
||||||
|
mutation.done = true
|
||||||
|
return affected, err
|
||||||
|
})
|
||||||
|
for i := len(ad.hooks) - 1; i >= 0; i-- {
|
||||||
|
if ad.hooks[i] == nil {
|
||||||
|
return 0, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)")
|
||||||
|
}
|
||||||
|
mut = ad.hooks[i](mut)
|
||||||
|
}
|
||||||
|
if _, err := mut.Mutate(ctx, ad.mutation); err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return affected, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExecX is like Exec, but panics if an error occurs.
|
||||||
|
func (ad *AttachmentDelete) ExecX(ctx context.Context) int {
|
||||||
|
n, err := ad.Exec(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return n
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ad *AttachmentDelete) sqlExec(ctx context.Context) (int, error) {
|
||||||
|
_spec := &sqlgraph.DeleteSpec{
|
||||||
|
Node: &sqlgraph.NodeSpec{
|
||||||
|
Table: attachment.Table,
|
||||||
|
ID: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: attachment.FieldID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
if ps := ad.mutation.predicates; len(ps) > 0 {
|
||||||
|
_spec.Predicate = func(selector *sql.Selector) {
|
||||||
|
for i := range ps {
|
||||||
|
ps[i](selector)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
affected, err := sqlgraph.DeleteNodes(ctx, ad.driver, _spec)
|
||||||
|
if err != nil && sqlgraph.IsConstraintError(err) {
|
||||||
|
err = &ConstraintError{msg: err.Error(), wrap: err}
|
||||||
|
}
|
||||||
|
return affected, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// AttachmentDeleteOne is the builder for deleting a single Attachment entity.
|
||||||
|
type AttachmentDeleteOne struct {
|
||||||
|
ad *AttachmentDelete
|
||||||
|
}
|
||||||
|
|
||||||
|
// Exec executes the deletion query.
|
||||||
|
func (ado *AttachmentDeleteOne) Exec(ctx context.Context) error {
|
||||||
|
n, err := ado.ad.Exec(ctx)
|
||||||
|
switch {
|
||||||
|
case err != nil:
|
||||||
|
return err
|
||||||
|
case n == 0:
|
||||||
|
return &NotFoundError{attachment.Label}
|
||||||
|
default:
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExecX is like Exec, but panics if an error occurs.
|
||||||
|
func (ado *AttachmentDeleteOne) ExecX(ctx context.Context) {
|
||||||
|
ado.ad.ExecX(ctx)
|
||||||
|
}
|
683
backend/ent/attachment_query.go
Normal file
683
backend/ent/attachment_query.go
Normal file
|
@ -0,0 +1,683 @@
|
||||||
|
// Code generated by ent, DO NOT EDIT.
|
||||||
|
|
||||||
|
package ent
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"math"
|
||||||
|
|
||||||
|
"entgo.io/ent/dialect/sql"
|
||||||
|
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||||
|
"entgo.io/ent/schema/field"
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/hay-kot/content/backend/ent/attachment"
|
||||||
|
"github.com/hay-kot/content/backend/ent/document"
|
||||||
|
"github.com/hay-kot/content/backend/ent/item"
|
||||||
|
"github.com/hay-kot/content/backend/ent/predicate"
|
||||||
|
)
|
||||||
|
|
||||||
|
// AttachmentQuery is the builder for querying Attachment entities.
|
||||||
|
type AttachmentQuery struct {
|
||||||
|
config
|
||||||
|
limit *int
|
||||||
|
offset *int
|
||||||
|
unique *bool
|
||||||
|
order []OrderFunc
|
||||||
|
fields []string
|
||||||
|
predicates []predicate.Attachment
|
||||||
|
withItem *ItemQuery
|
||||||
|
withDocument *DocumentQuery
|
||||||
|
withFKs bool
|
||||||
|
// intermediate query (i.e. traversal path).
|
||||||
|
sql *sql.Selector
|
||||||
|
path func(context.Context) (*sql.Selector, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Where adds a new predicate for the AttachmentQuery builder.
|
||||||
|
func (aq *AttachmentQuery) Where(ps ...predicate.Attachment) *AttachmentQuery {
|
||||||
|
aq.predicates = append(aq.predicates, ps...)
|
||||||
|
return aq
|
||||||
|
}
|
||||||
|
|
||||||
|
// Limit adds a limit step to the query.
|
||||||
|
func (aq *AttachmentQuery) Limit(limit int) *AttachmentQuery {
|
||||||
|
aq.limit = &limit
|
||||||
|
return aq
|
||||||
|
}
|
||||||
|
|
||||||
|
// Offset adds an offset step to the query.
|
||||||
|
func (aq *AttachmentQuery) Offset(offset int) *AttachmentQuery {
|
||||||
|
aq.offset = &offset
|
||||||
|
return aq
|
||||||
|
}
|
||||||
|
|
||||||
|
// Unique configures the query builder to filter duplicate records on query.
|
||||||
|
// By default, unique is set to true, and can be disabled using this method.
|
||||||
|
func (aq *AttachmentQuery) Unique(unique bool) *AttachmentQuery {
|
||||||
|
aq.unique = &unique
|
||||||
|
return aq
|
||||||
|
}
|
||||||
|
|
||||||
|
// Order adds an order step to the query.
|
||||||
|
func (aq *AttachmentQuery) Order(o ...OrderFunc) *AttachmentQuery {
|
||||||
|
aq.order = append(aq.order, o...)
|
||||||
|
return aq
|
||||||
|
}
|
||||||
|
|
||||||
|
// QueryItem chains the current query on the "item" edge.
|
||||||
|
func (aq *AttachmentQuery) QueryItem() *ItemQuery {
|
||||||
|
query := &ItemQuery{config: aq.config}
|
||||||
|
query.path = func(ctx context.Context) (fromU *sql.Selector, err error) {
|
||||||
|
if err := aq.prepareQuery(ctx); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
selector := aq.sqlQuery(ctx)
|
||||||
|
if err := selector.Err(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
step := sqlgraph.NewStep(
|
||||||
|
sqlgraph.From(attachment.Table, attachment.FieldID, selector),
|
||||||
|
sqlgraph.To(item.Table, item.FieldID),
|
||||||
|
sqlgraph.Edge(sqlgraph.M2O, true, attachment.ItemTable, attachment.ItemColumn),
|
||||||
|
)
|
||||||
|
fromU = sqlgraph.SetNeighbors(aq.driver.Dialect(), step)
|
||||||
|
return fromU, nil
|
||||||
|
}
|
||||||
|
return query
|
||||||
|
}
|
||||||
|
|
||||||
|
// QueryDocument chains the current query on the "document" edge.
|
||||||
|
func (aq *AttachmentQuery) QueryDocument() *DocumentQuery {
|
||||||
|
query := &DocumentQuery{config: aq.config}
|
||||||
|
query.path = func(ctx context.Context) (fromU *sql.Selector, err error) {
|
||||||
|
if err := aq.prepareQuery(ctx); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
selector := aq.sqlQuery(ctx)
|
||||||
|
if err := selector.Err(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
step := sqlgraph.NewStep(
|
||||||
|
sqlgraph.From(attachment.Table, attachment.FieldID, selector),
|
||||||
|
sqlgraph.To(document.Table, document.FieldID),
|
||||||
|
sqlgraph.Edge(sqlgraph.M2O, true, attachment.DocumentTable, attachment.DocumentColumn),
|
||||||
|
)
|
||||||
|
fromU = sqlgraph.SetNeighbors(aq.driver.Dialect(), step)
|
||||||
|
return fromU, nil
|
||||||
|
}
|
||||||
|
return query
|
||||||
|
}
|
||||||
|
|
||||||
|
// First returns the first Attachment entity from the query.
|
||||||
|
// Returns a *NotFoundError when no Attachment was found.
|
||||||
|
func (aq *AttachmentQuery) First(ctx context.Context) (*Attachment, error) {
|
||||||
|
nodes, err := aq.Limit(1).All(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if len(nodes) == 0 {
|
||||||
|
return nil, &NotFoundError{attachment.Label}
|
||||||
|
}
|
||||||
|
return nodes[0], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// FirstX is like First, but panics if an error occurs.
|
||||||
|
func (aq *AttachmentQuery) FirstX(ctx context.Context) *Attachment {
|
||||||
|
node, err := aq.First(ctx)
|
||||||
|
if err != nil && !IsNotFound(err) {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return node
|
||||||
|
}
|
||||||
|
|
||||||
|
// FirstID returns the first Attachment ID from the query.
|
||||||
|
// Returns a *NotFoundError when no Attachment ID was found.
|
||||||
|
func (aq *AttachmentQuery) FirstID(ctx context.Context) (id uuid.UUID, err error) {
|
||||||
|
var ids []uuid.UUID
|
||||||
|
if ids, err = aq.Limit(1).IDs(ctx); err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if len(ids) == 0 {
|
||||||
|
err = &NotFoundError{attachment.Label}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
return ids[0], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// FirstIDX is like FirstID, but panics if an error occurs.
|
||||||
|
func (aq *AttachmentQuery) FirstIDX(ctx context.Context) uuid.UUID {
|
||||||
|
id, err := aq.FirstID(ctx)
|
||||||
|
if err != nil && !IsNotFound(err) {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return id
|
||||||
|
}
|
||||||
|
|
||||||
|
// Only returns a single Attachment entity found by the query, ensuring it only returns one.
|
||||||
|
// Returns a *NotSingularError when more than one Attachment entity is found.
|
||||||
|
// Returns a *NotFoundError when no Attachment entities are found.
|
||||||
|
func (aq *AttachmentQuery) Only(ctx context.Context) (*Attachment, error) {
|
||||||
|
nodes, err := aq.Limit(2).All(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
switch len(nodes) {
|
||||||
|
case 1:
|
||||||
|
return nodes[0], nil
|
||||||
|
case 0:
|
||||||
|
return nil, &NotFoundError{attachment.Label}
|
||||||
|
default:
|
||||||
|
return nil, &NotSingularError{attachment.Label}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// OnlyX is like Only, but panics if an error occurs.
|
||||||
|
func (aq *AttachmentQuery) OnlyX(ctx context.Context) *Attachment {
|
||||||
|
node, err := aq.Only(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return node
|
||||||
|
}
|
||||||
|
|
||||||
|
// OnlyID is like Only, but returns the only Attachment ID in the query.
|
||||||
|
// Returns a *NotSingularError when more than one Attachment ID is found.
|
||||||
|
// Returns a *NotFoundError when no entities are found.
|
||||||
|
func (aq *AttachmentQuery) OnlyID(ctx context.Context) (id uuid.UUID, err error) {
|
||||||
|
var ids []uuid.UUID
|
||||||
|
if ids, err = aq.Limit(2).IDs(ctx); err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
switch len(ids) {
|
||||||
|
case 1:
|
||||||
|
id = ids[0]
|
||||||
|
case 0:
|
||||||
|
err = &NotFoundError{attachment.Label}
|
||||||
|
default:
|
||||||
|
err = &NotSingularError{attachment.Label}
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// OnlyIDX is like OnlyID, but panics if an error occurs.
|
||||||
|
func (aq *AttachmentQuery) OnlyIDX(ctx context.Context) uuid.UUID {
|
||||||
|
id, err := aq.OnlyID(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return id
|
||||||
|
}
|
||||||
|
|
||||||
|
// All executes the query and returns a list of Attachments.
|
||||||
|
func (aq *AttachmentQuery) All(ctx context.Context) ([]*Attachment, error) {
|
||||||
|
if err := aq.prepareQuery(ctx); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return aq.sqlAll(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
// AllX is like All, but panics if an error occurs.
|
||||||
|
func (aq *AttachmentQuery) AllX(ctx context.Context) []*Attachment {
|
||||||
|
nodes, err := aq.All(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return nodes
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDs executes the query and returns a list of Attachment IDs.
|
||||||
|
func (aq *AttachmentQuery) IDs(ctx context.Context) ([]uuid.UUID, error) {
|
||||||
|
var ids []uuid.UUID
|
||||||
|
if err := aq.Select(attachment.FieldID).Scan(ctx, &ids); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return ids, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDsX is like IDs, but panics if an error occurs.
|
||||||
|
func (aq *AttachmentQuery) IDsX(ctx context.Context) []uuid.UUID {
|
||||||
|
ids, err := aq.IDs(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return ids
|
||||||
|
}
|
||||||
|
|
||||||
|
// Count returns the count of the given query.
|
||||||
|
func (aq *AttachmentQuery) Count(ctx context.Context) (int, error) {
|
||||||
|
if err := aq.prepareQuery(ctx); err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
return aq.sqlCount(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
// CountX is like Count, but panics if an error occurs.
|
||||||
|
func (aq *AttachmentQuery) CountX(ctx context.Context) int {
|
||||||
|
count, err := aq.Count(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return count
|
||||||
|
}
|
||||||
|
|
||||||
|
// Exist returns true if the query has elements in the graph.
|
||||||
|
func (aq *AttachmentQuery) Exist(ctx context.Context) (bool, error) {
|
||||||
|
if err := aq.prepareQuery(ctx); err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
return aq.sqlExist(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExistX is like Exist, but panics if an error occurs.
|
||||||
|
func (aq *AttachmentQuery) ExistX(ctx context.Context) bool {
|
||||||
|
exist, err := aq.Exist(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return exist
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clone returns a duplicate of the AttachmentQuery builder, including all associated steps. It can be
|
||||||
|
// used to prepare common query builders and use them differently after the clone is made.
|
||||||
|
func (aq *AttachmentQuery) Clone() *AttachmentQuery {
|
||||||
|
if aq == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return &AttachmentQuery{
|
||||||
|
config: aq.config,
|
||||||
|
limit: aq.limit,
|
||||||
|
offset: aq.offset,
|
||||||
|
order: append([]OrderFunc{}, aq.order...),
|
||||||
|
predicates: append([]predicate.Attachment{}, aq.predicates...),
|
||||||
|
withItem: aq.withItem.Clone(),
|
||||||
|
withDocument: aq.withDocument.Clone(),
|
||||||
|
// clone intermediate query.
|
||||||
|
sql: aq.sql.Clone(),
|
||||||
|
path: aq.path,
|
||||||
|
unique: aq.unique,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// WithItem tells the query-builder to eager-load the nodes that are connected to
|
||||||
|
// the "item" edge. The optional arguments are used to configure the query builder of the edge.
|
||||||
|
func (aq *AttachmentQuery) WithItem(opts ...func(*ItemQuery)) *AttachmentQuery {
|
||||||
|
query := &ItemQuery{config: aq.config}
|
||||||
|
for _, opt := range opts {
|
||||||
|
opt(query)
|
||||||
|
}
|
||||||
|
aq.withItem = query
|
||||||
|
return aq
|
||||||
|
}
|
||||||
|
|
||||||
|
// WithDocument tells the query-builder to eager-load the nodes that are connected to
|
||||||
|
// the "document" edge. The optional arguments are used to configure the query builder of the edge.
|
||||||
|
func (aq *AttachmentQuery) WithDocument(opts ...func(*DocumentQuery)) *AttachmentQuery {
|
||||||
|
query := &DocumentQuery{config: aq.config}
|
||||||
|
for _, opt := range opts {
|
||||||
|
opt(query)
|
||||||
|
}
|
||||||
|
aq.withDocument = query
|
||||||
|
return aq
|
||||||
|
}
|
||||||
|
|
||||||
|
// GroupBy is used to group vertices by one or more fields/columns.
|
||||||
|
// It is often used with aggregate functions, like: count, max, mean, min, sum.
|
||||||
|
//
|
||||||
|
// Example:
|
||||||
|
//
|
||||||
|
// var v []struct {
|
||||||
|
// CreatedAt time.Time `json:"created_at,omitempty"`
|
||||||
|
// Count int `json:"count,omitempty"`
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// client.Attachment.Query().
|
||||||
|
// GroupBy(attachment.FieldCreatedAt).
|
||||||
|
// Aggregate(ent.Count()).
|
||||||
|
// Scan(ctx, &v)
|
||||||
|
func (aq *AttachmentQuery) GroupBy(field string, fields ...string) *AttachmentGroupBy {
|
||||||
|
grbuild := &AttachmentGroupBy{config: aq.config}
|
||||||
|
grbuild.fields = append([]string{field}, fields...)
|
||||||
|
grbuild.path = func(ctx context.Context) (prev *sql.Selector, err error) {
|
||||||
|
if err := aq.prepareQuery(ctx); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return aq.sqlQuery(ctx), nil
|
||||||
|
}
|
||||||
|
grbuild.label = attachment.Label
|
||||||
|
grbuild.flds, grbuild.scan = &grbuild.fields, grbuild.Scan
|
||||||
|
return grbuild
|
||||||
|
}
|
||||||
|
|
||||||
|
// Select allows the selection one or more fields/columns for the given query,
|
||||||
|
// instead of selecting all fields in the entity.
|
||||||
|
//
|
||||||
|
// Example:
|
||||||
|
//
|
||||||
|
// var v []struct {
|
||||||
|
// CreatedAt time.Time `json:"created_at,omitempty"`
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// client.Attachment.Query().
|
||||||
|
// Select(attachment.FieldCreatedAt).
|
||||||
|
// Scan(ctx, &v)
|
||||||
|
func (aq *AttachmentQuery) Select(fields ...string) *AttachmentSelect {
|
||||||
|
aq.fields = append(aq.fields, fields...)
|
||||||
|
selbuild := &AttachmentSelect{AttachmentQuery: aq}
|
||||||
|
selbuild.label = attachment.Label
|
||||||
|
selbuild.flds, selbuild.scan = &aq.fields, selbuild.Scan
|
||||||
|
return selbuild
|
||||||
|
}
|
||||||
|
|
||||||
|
func (aq *AttachmentQuery) prepareQuery(ctx context.Context) error {
|
||||||
|
for _, f := range aq.fields {
|
||||||
|
if !attachment.ValidColumn(f) {
|
||||||
|
return &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if aq.path != nil {
|
||||||
|
prev, err := aq.path(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
aq.sql = prev
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (aq *AttachmentQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*Attachment, error) {
|
||||||
|
var (
|
||||||
|
nodes = []*Attachment{}
|
||||||
|
withFKs = aq.withFKs
|
||||||
|
_spec = aq.querySpec()
|
||||||
|
loadedTypes = [2]bool{
|
||||||
|
aq.withItem != nil,
|
||||||
|
aq.withDocument != nil,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
if aq.withItem != nil || aq.withDocument != nil {
|
||||||
|
withFKs = true
|
||||||
|
}
|
||||||
|
if withFKs {
|
||||||
|
_spec.Node.Columns = append(_spec.Node.Columns, attachment.ForeignKeys...)
|
||||||
|
}
|
||||||
|
_spec.ScanValues = func(columns []string) ([]interface{}, error) {
|
||||||
|
return (*Attachment).scanValues(nil, columns)
|
||||||
|
}
|
||||||
|
_spec.Assign = func(columns []string, values []interface{}) error {
|
||||||
|
node := &Attachment{config: aq.config}
|
||||||
|
nodes = append(nodes, node)
|
||||||
|
node.Edges.loadedTypes = loadedTypes
|
||||||
|
return node.assignValues(columns, values)
|
||||||
|
}
|
||||||
|
for i := range hooks {
|
||||||
|
hooks[i](ctx, _spec)
|
||||||
|
}
|
||||||
|
if err := sqlgraph.QueryNodes(ctx, aq.driver, _spec); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if len(nodes) == 0 {
|
||||||
|
return nodes, nil
|
||||||
|
}
|
||||||
|
if query := aq.withItem; query != nil {
|
||||||
|
if err := aq.loadItem(ctx, query, nodes, nil,
|
||||||
|
func(n *Attachment, e *Item) { n.Edges.Item = e }); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if query := aq.withDocument; query != nil {
|
||||||
|
if err := aq.loadDocument(ctx, query, nodes, nil,
|
||||||
|
func(n *Attachment, e *Document) { n.Edges.Document = e }); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nodes, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (aq *AttachmentQuery) loadItem(ctx context.Context, query *ItemQuery, nodes []*Attachment, init func(*Attachment), assign func(*Attachment, *Item)) error {
|
||||||
|
ids := make([]uuid.UUID, 0, len(nodes))
|
||||||
|
nodeids := make(map[uuid.UUID][]*Attachment)
|
||||||
|
for i := range nodes {
|
||||||
|
if nodes[i].item_attachments == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
fk := *nodes[i].item_attachments
|
||||||
|
if _, ok := nodeids[fk]; !ok {
|
||||||
|
ids = append(ids, fk)
|
||||||
|
}
|
||||||
|
nodeids[fk] = append(nodeids[fk], nodes[i])
|
||||||
|
}
|
||||||
|
query.Where(item.IDIn(ids...))
|
||||||
|
neighbors, err := query.All(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
for _, n := range neighbors {
|
||||||
|
nodes, ok := nodeids[n.ID]
|
||||||
|
if !ok {
|
||||||
|
return fmt.Errorf(`unexpected foreign-key "item_attachments" returned %v`, n.ID)
|
||||||
|
}
|
||||||
|
for i := range nodes {
|
||||||
|
assign(nodes[i], n)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
func (aq *AttachmentQuery) loadDocument(ctx context.Context, query *DocumentQuery, nodes []*Attachment, init func(*Attachment), assign func(*Attachment, *Document)) error {
|
||||||
|
ids := make([]uuid.UUID, 0, len(nodes))
|
||||||
|
nodeids := make(map[uuid.UUID][]*Attachment)
|
||||||
|
for i := range nodes {
|
||||||
|
if nodes[i].document_attachments == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
fk := *nodes[i].document_attachments
|
||||||
|
if _, ok := nodeids[fk]; !ok {
|
||||||
|
ids = append(ids, fk)
|
||||||
|
}
|
||||||
|
nodeids[fk] = append(nodeids[fk], nodes[i])
|
||||||
|
}
|
||||||
|
query.Where(document.IDIn(ids...))
|
||||||
|
neighbors, err := query.All(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
for _, n := range neighbors {
|
||||||
|
nodes, ok := nodeids[n.ID]
|
||||||
|
if !ok {
|
||||||
|
return fmt.Errorf(`unexpected foreign-key "document_attachments" returned %v`, n.ID)
|
||||||
|
}
|
||||||
|
for i := range nodes {
|
||||||
|
assign(nodes[i], n)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (aq *AttachmentQuery) sqlCount(ctx context.Context) (int, error) {
|
||||||
|
_spec := aq.querySpec()
|
||||||
|
_spec.Node.Columns = aq.fields
|
||||||
|
if len(aq.fields) > 0 {
|
||||||
|
_spec.Unique = aq.unique != nil && *aq.unique
|
||||||
|
}
|
||||||
|
return sqlgraph.CountNodes(ctx, aq.driver, _spec)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (aq *AttachmentQuery) sqlExist(ctx context.Context) (bool, error) {
|
||||||
|
n, err := aq.sqlCount(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return false, fmt.Errorf("ent: check existence: %w", err)
|
||||||
|
}
|
||||||
|
return n > 0, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (aq *AttachmentQuery) querySpec() *sqlgraph.QuerySpec {
|
||||||
|
_spec := &sqlgraph.QuerySpec{
|
||||||
|
Node: &sqlgraph.NodeSpec{
|
||||||
|
Table: attachment.Table,
|
||||||
|
Columns: attachment.Columns,
|
||||||
|
ID: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: attachment.FieldID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
From: aq.sql,
|
||||||
|
Unique: true,
|
||||||
|
}
|
||||||
|
if unique := aq.unique; unique != nil {
|
||||||
|
_spec.Unique = *unique
|
||||||
|
}
|
||||||
|
if fields := aq.fields; len(fields) > 0 {
|
||||||
|
_spec.Node.Columns = make([]string, 0, len(fields))
|
||||||
|
_spec.Node.Columns = append(_spec.Node.Columns, attachment.FieldID)
|
||||||
|
for i := range fields {
|
||||||
|
if fields[i] != attachment.FieldID {
|
||||||
|
_spec.Node.Columns = append(_spec.Node.Columns, fields[i])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if ps := aq.predicates; len(ps) > 0 {
|
||||||
|
_spec.Predicate = func(selector *sql.Selector) {
|
||||||
|
for i := range ps {
|
||||||
|
ps[i](selector)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if limit := aq.limit; limit != nil {
|
||||||
|
_spec.Limit = *limit
|
||||||
|
}
|
||||||
|
if offset := aq.offset; offset != nil {
|
||||||
|
_spec.Offset = *offset
|
||||||
|
}
|
||||||
|
if ps := aq.order; len(ps) > 0 {
|
||||||
|
_spec.Order = func(selector *sql.Selector) {
|
||||||
|
for i := range ps {
|
||||||
|
ps[i](selector)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return _spec
|
||||||
|
}
|
||||||
|
|
||||||
|
func (aq *AttachmentQuery) sqlQuery(ctx context.Context) *sql.Selector {
|
||||||
|
builder := sql.Dialect(aq.driver.Dialect())
|
||||||
|
t1 := builder.Table(attachment.Table)
|
||||||
|
columns := aq.fields
|
||||||
|
if len(columns) == 0 {
|
||||||
|
columns = attachment.Columns
|
||||||
|
}
|
||||||
|
selector := builder.Select(t1.Columns(columns...)...).From(t1)
|
||||||
|
if aq.sql != nil {
|
||||||
|
selector = aq.sql
|
||||||
|
selector.Select(selector.Columns(columns...)...)
|
||||||
|
}
|
||||||
|
if aq.unique != nil && *aq.unique {
|
||||||
|
selector.Distinct()
|
||||||
|
}
|
||||||
|
for _, p := range aq.predicates {
|
||||||
|
p(selector)
|
||||||
|
}
|
||||||
|
for _, p := range aq.order {
|
||||||
|
p(selector)
|
||||||
|
}
|
||||||
|
if offset := aq.offset; offset != nil {
|
||||||
|
// limit is mandatory for offset clause. We start
|
||||||
|
// with default value, and override it below if needed.
|
||||||
|
selector.Offset(*offset).Limit(math.MaxInt32)
|
||||||
|
}
|
||||||
|
if limit := aq.limit; limit != nil {
|
||||||
|
selector.Limit(*limit)
|
||||||
|
}
|
||||||
|
return selector
|
||||||
|
}
|
||||||
|
|
||||||
|
// AttachmentGroupBy is the group-by builder for Attachment entities.
|
||||||
|
type AttachmentGroupBy struct {
|
||||||
|
config
|
||||||
|
selector
|
||||||
|
fields []string
|
||||||
|
fns []AggregateFunc
|
||||||
|
// intermediate query (i.e. traversal path).
|
||||||
|
sql *sql.Selector
|
||||||
|
path func(context.Context) (*sql.Selector, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Aggregate adds the given aggregation functions to the group-by query.
|
||||||
|
func (agb *AttachmentGroupBy) Aggregate(fns ...AggregateFunc) *AttachmentGroupBy {
|
||||||
|
agb.fns = append(agb.fns, fns...)
|
||||||
|
return agb
|
||||||
|
}
|
||||||
|
|
||||||
|
// Scan applies the group-by query and scans the result into the given value.
|
||||||
|
func (agb *AttachmentGroupBy) Scan(ctx context.Context, v interface{}) error {
|
||||||
|
query, err := agb.path(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
agb.sql = query
|
||||||
|
return agb.sqlScan(ctx, v)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (agb *AttachmentGroupBy) sqlScan(ctx context.Context, v interface{}) error {
|
||||||
|
for _, f := range agb.fields {
|
||||||
|
if !attachment.ValidColumn(f) {
|
||||||
|
return &ValidationError{Name: f, err: fmt.Errorf("invalid field %q for group-by", f)}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
selector := agb.sqlQuery()
|
||||||
|
if err := selector.Err(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
rows := &sql.Rows{}
|
||||||
|
query, args := selector.Query()
|
||||||
|
if err := agb.driver.Query(ctx, query, args, rows); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
return sql.ScanSlice(rows, v)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (agb *AttachmentGroupBy) sqlQuery() *sql.Selector {
|
||||||
|
selector := agb.sql.Select()
|
||||||
|
aggregation := make([]string, 0, len(agb.fns))
|
||||||
|
for _, fn := range agb.fns {
|
||||||
|
aggregation = append(aggregation, fn(selector))
|
||||||
|
}
|
||||||
|
// If no columns were selected in a custom aggregation function, the default
|
||||||
|
// selection is the fields used for "group-by", and the aggregation functions.
|
||||||
|
if len(selector.SelectedColumns()) == 0 {
|
||||||
|
columns := make([]string, 0, len(agb.fields)+len(agb.fns))
|
||||||
|
for _, f := range agb.fields {
|
||||||
|
columns = append(columns, selector.C(f))
|
||||||
|
}
|
||||||
|
columns = append(columns, aggregation...)
|
||||||
|
selector.Select(columns...)
|
||||||
|
}
|
||||||
|
return selector.GroupBy(selector.Columns(agb.fields...)...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// AttachmentSelect is the builder for selecting fields of Attachment entities.
|
||||||
|
type AttachmentSelect struct {
|
||||||
|
*AttachmentQuery
|
||||||
|
selector
|
||||||
|
// intermediate query (i.e. traversal path).
|
||||||
|
sql *sql.Selector
|
||||||
|
}
|
||||||
|
|
||||||
|
// Scan applies the selector query and scans the result into the given value.
|
||||||
|
func (as *AttachmentSelect) Scan(ctx context.Context, v interface{}) error {
|
||||||
|
if err := as.prepareQuery(ctx); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
as.sql = as.AttachmentQuery.sqlQuery(ctx)
|
||||||
|
return as.sqlScan(ctx, v)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (as *AttachmentSelect) sqlScan(ctx context.Context, v interface{}) error {
|
||||||
|
rows := &sql.Rows{}
|
||||||
|
query, args := as.sql.Query()
|
||||||
|
if err := as.driver.Query(ctx, query, args, rows); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
return sql.ScanSlice(rows, v)
|
||||||
|
}
|
587
backend/ent/attachment_update.go
Normal file
587
backend/ent/attachment_update.go
Normal file
|
@ -0,0 +1,587 @@
|
||||||
|
// Code generated by ent, DO NOT EDIT.
|
||||||
|
|
||||||
|
package ent
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"entgo.io/ent/dialect/sql"
|
||||||
|
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||||
|
"entgo.io/ent/schema/field"
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/hay-kot/content/backend/ent/attachment"
|
||||||
|
"github.com/hay-kot/content/backend/ent/document"
|
||||||
|
"github.com/hay-kot/content/backend/ent/item"
|
||||||
|
"github.com/hay-kot/content/backend/ent/predicate"
|
||||||
|
)
|
||||||
|
|
||||||
|
// AttachmentUpdate is the builder for updating Attachment entities.
|
||||||
|
type AttachmentUpdate struct {
|
||||||
|
config
|
||||||
|
hooks []Hook
|
||||||
|
mutation *AttachmentMutation
|
||||||
|
}
|
||||||
|
|
||||||
|
// Where appends a list predicates to the AttachmentUpdate builder.
|
||||||
|
func (au *AttachmentUpdate) Where(ps ...predicate.Attachment) *AttachmentUpdate {
|
||||||
|
au.mutation.Where(ps...)
|
||||||
|
return au
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetUpdatedAt sets the "updated_at" field.
|
||||||
|
func (au *AttachmentUpdate) SetUpdatedAt(t time.Time) *AttachmentUpdate {
|
||||||
|
au.mutation.SetUpdatedAt(t)
|
||||||
|
return au
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetType sets the "type" field.
|
||||||
|
func (au *AttachmentUpdate) SetType(a attachment.Type) *AttachmentUpdate {
|
||||||
|
au.mutation.SetType(a)
|
||||||
|
return au
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableType sets the "type" field if the given value is not nil.
|
||||||
|
func (au *AttachmentUpdate) SetNillableType(a *attachment.Type) *AttachmentUpdate {
|
||||||
|
if a != nil {
|
||||||
|
au.SetType(*a)
|
||||||
|
}
|
||||||
|
return au
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetItemID sets the "item" edge to the Item entity by ID.
|
||||||
|
func (au *AttachmentUpdate) SetItemID(id uuid.UUID) *AttachmentUpdate {
|
||||||
|
au.mutation.SetItemID(id)
|
||||||
|
return au
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetItem sets the "item" edge to the Item entity.
|
||||||
|
func (au *AttachmentUpdate) SetItem(i *Item) *AttachmentUpdate {
|
||||||
|
return au.SetItemID(i.ID)
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetDocumentID sets the "document" edge to the Document entity by ID.
|
||||||
|
func (au *AttachmentUpdate) SetDocumentID(id uuid.UUID) *AttachmentUpdate {
|
||||||
|
au.mutation.SetDocumentID(id)
|
||||||
|
return au
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetDocument sets the "document" edge to the Document entity.
|
||||||
|
func (au *AttachmentUpdate) SetDocument(d *Document) *AttachmentUpdate {
|
||||||
|
return au.SetDocumentID(d.ID)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Mutation returns the AttachmentMutation object of the builder.
|
||||||
|
func (au *AttachmentUpdate) Mutation() *AttachmentMutation {
|
||||||
|
return au.mutation
|
||||||
|
}
|
||||||
|
|
||||||
|
// ClearItem clears the "item" edge to the Item entity.
|
||||||
|
func (au *AttachmentUpdate) ClearItem() *AttachmentUpdate {
|
||||||
|
au.mutation.ClearItem()
|
||||||
|
return au
|
||||||
|
}
|
||||||
|
|
||||||
|
// ClearDocument clears the "document" edge to the Document entity.
|
||||||
|
func (au *AttachmentUpdate) ClearDocument() *AttachmentUpdate {
|
||||||
|
au.mutation.ClearDocument()
|
||||||
|
return au
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save executes the query and returns the number of nodes affected by the update operation.
|
||||||
|
func (au *AttachmentUpdate) Save(ctx context.Context) (int, error) {
|
||||||
|
var (
|
||||||
|
err error
|
||||||
|
affected int
|
||||||
|
)
|
||||||
|
au.defaults()
|
||||||
|
if len(au.hooks) == 0 {
|
||||||
|
if err = au.check(); err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
affected, err = au.sqlSave(ctx)
|
||||||
|
} else {
|
||||||
|
var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) {
|
||||||
|
mutation, ok := m.(*AttachmentMutation)
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("unexpected mutation type %T", m)
|
||||||
|
}
|
||||||
|
if err = au.check(); err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
au.mutation = mutation
|
||||||
|
affected, err = au.sqlSave(ctx)
|
||||||
|
mutation.done = true
|
||||||
|
return affected, err
|
||||||
|
})
|
||||||
|
for i := len(au.hooks) - 1; i >= 0; i-- {
|
||||||
|
if au.hooks[i] == nil {
|
||||||
|
return 0, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)")
|
||||||
|
}
|
||||||
|
mut = au.hooks[i](mut)
|
||||||
|
}
|
||||||
|
if _, err := mut.Mutate(ctx, au.mutation); err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return affected, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// SaveX is like Save, but panics if an error occurs.
|
||||||
|
func (au *AttachmentUpdate) SaveX(ctx context.Context) int {
|
||||||
|
affected, err := au.Save(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return affected
|
||||||
|
}
|
||||||
|
|
||||||
|
// Exec executes the query.
|
||||||
|
func (au *AttachmentUpdate) Exec(ctx context.Context) error {
|
||||||
|
_, err := au.Save(ctx)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExecX is like Exec, but panics if an error occurs.
|
||||||
|
func (au *AttachmentUpdate) ExecX(ctx context.Context) {
|
||||||
|
if err := au.Exec(ctx); err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// defaults sets the default values of the builder before save.
|
||||||
|
func (au *AttachmentUpdate) defaults() {
|
||||||
|
if _, ok := au.mutation.UpdatedAt(); !ok {
|
||||||
|
v := attachment.UpdateDefaultUpdatedAt()
|
||||||
|
au.mutation.SetUpdatedAt(v)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// check runs all checks and user-defined validators on the builder.
|
||||||
|
func (au *AttachmentUpdate) check() error {
|
||||||
|
if v, ok := au.mutation.GetType(); ok {
|
||||||
|
if err := attachment.TypeValidator(v); err != nil {
|
||||||
|
return &ValidationError{Name: "type", err: fmt.Errorf(`ent: validator failed for field "Attachment.type": %w`, err)}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if _, ok := au.mutation.ItemID(); au.mutation.ItemCleared() && !ok {
|
||||||
|
return errors.New(`ent: clearing a required unique edge "Attachment.item"`)
|
||||||
|
}
|
||||||
|
if _, ok := au.mutation.DocumentID(); au.mutation.DocumentCleared() && !ok {
|
||||||
|
return errors.New(`ent: clearing a required unique edge "Attachment.document"`)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (au *AttachmentUpdate) sqlSave(ctx context.Context) (n int, err error) {
|
||||||
|
_spec := &sqlgraph.UpdateSpec{
|
||||||
|
Node: &sqlgraph.NodeSpec{
|
||||||
|
Table: attachment.Table,
|
||||||
|
Columns: attachment.Columns,
|
||||||
|
ID: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: attachment.FieldID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
if ps := au.mutation.predicates; len(ps) > 0 {
|
||||||
|
_spec.Predicate = func(selector *sql.Selector) {
|
||||||
|
for i := range ps {
|
||||||
|
ps[i](selector)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if value, ok := au.mutation.UpdatedAt(); ok {
|
||||||
|
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeTime,
|
||||||
|
Value: value,
|
||||||
|
Column: attachment.FieldUpdatedAt,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if value, ok := au.mutation.GetType(); ok {
|
||||||
|
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeEnum,
|
||||||
|
Value: value,
|
||||||
|
Column: attachment.FieldType,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if au.mutation.ItemCleared() {
|
||||||
|
edge := &sqlgraph.EdgeSpec{
|
||||||
|
Rel: sqlgraph.M2O,
|
||||||
|
Inverse: true,
|
||||||
|
Table: attachment.ItemTable,
|
||||||
|
Columns: []string{attachment.ItemColumn},
|
||||||
|
Bidi: false,
|
||||||
|
Target: &sqlgraph.EdgeTarget{
|
||||||
|
IDSpec: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: item.FieldID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
_spec.Edges.Clear = append(_spec.Edges.Clear, edge)
|
||||||
|
}
|
||||||
|
if nodes := au.mutation.ItemIDs(); len(nodes) > 0 {
|
||||||
|
edge := &sqlgraph.EdgeSpec{
|
||||||
|
Rel: sqlgraph.M2O,
|
||||||
|
Inverse: true,
|
||||||
|
Table: attachment.ItemTable,
|
||||||
|
Columns: []string{attachment.ItemColumn},
|
||||||
|
Bidi: false,
|
||||||
|
Target: &sqlgraph.EdgeTarget{
|
||||||
|
IDSpec: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: item.FieldID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, k := range nodes {
|
||||||
|
edge.Target.Nodes = append(edge.Target.Nodes, k)
|
||||||
|
}
|
||||||
|
_spec.Edges.Add = append(_spec.Edges.Add, edge)
|
||||||
|
}
|
||||||
|
if au.mutation.DocumentCleared() {
|
||||||
|
edge := &sqlgraph.EdgeSpec{
|
||||||
|
Rel: sqlgraph.M2O,
|
||||||
|
Inverse: true,
|
||||||
|
Table: attachment.DocumentTable,
|
||||||
|
Columns: []string{attachment.DocumentColumn},
|
||||||
|
Bidi: false,
|
||||||
|
Target: &sqlgraph.EdgeTarget{
|
||||||
|
IDSpec: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: document.FieldID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
_spec.Edges.Clear = append(_spec.Edges.Clear, edge)
|
||||||
|
}
|
||||||
|
if nodes := au.mutation.DocumentIDs(); len(nodes) > 0 {
|
||||||
|
edge := &sqlgraph.EdgeSpec{
|
||||||
|
Rel: sqlgraph.M2O,
|
||||||
|
Inverse: true,
|
||||||
|
Table: attachment.DocumentTable,
|
||||||
|
Columns: []string{attachment.DocumentColumn},
|
||||||
|
Bidi: false,
|
||||||
|
Target: &sqlgraph.EdgeTarget{
|
||||||
|
IDSpec: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: document.FieldID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, k := range nodes {
|
||||||
|
edge.Target.Nodes = append(edge.Target.Nodes, k)
|
||||||
|
}
|
||||||
|
_spec.Edges.Add = append(_spec.Edges.Add, edge)
|
||||||
|
}
|
||||||
|
if n, err = sqlgraph.UpdateNodes(ctx, au.driver, _spec); err != nil {
|
||||||
|
if _, ok := err.(*sqlgraph.NotFoundError); ok {
|
||||||
|
err = &NotFoundError{attachment.Label}
|
||||||
|
} else if sqlgraph.IsConstraintError(err) {
|
||||||
|
err = &ConstraintError{msg: err.Error(), wrap: err}
|
||||||
|
}
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
return n, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// AttachmentUpdateOne is the builder for updating a single Attachment entity.
|
||||||
|
type AttachmentUpdateOne struct {
|
||||||
|
config
|
||||||
|
fields []string
|
||||||
|
hooks []Hook
|
||||||
|
mutation *AttachmentMutation
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetUpdatedAt sets the "updated_at" field.
|
||||||
|
func (auo *AttachmentUpdateOne) SetUpdatedAt(t time.Time) *AttachmentUpdateOne {
|
||||||
|
auo.mutation.SetUpdatedAt(t)
|
||||||
|
return auo
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetType sets the "type" field.
|
||||||
|
func (auo *AttachmentUpdateOne) SetType(a attachment.Type) *AttachmentUpdateOne {
|
||||||
|
auo.mutation.SetType(a)
|
||||||
|
return auo
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableType sets the "type" field if the given value is not nil.
|
||||||
|
func (auo *AttachmentUpdateOne) SetNillableType(a *attachment.Type) *AttachmentUpdateOne {
|
||||||
|
if a != nil {
|
||||||
|
auo.SetType(*a)
|
||||||
|
}
|
||||||
|
return auo
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetItemID sets the "item" edge to the Item entity by ID.
|
||||||
|
func (auo *AttachmentUpdateOne) SetItemID(id uuid.UUID) *AttachmentUpdateOne {
|
||||||
|
auo.mutation.SetItemID(id)
|
||||||
|
return auo
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetItem sets the "item" edge to the Item entity.
|
||||||
|
func (auo *AttachmentUpdateOne) SetItem(i *Item) *AttachmentUpdateOne {
|
||||||
|
return auo.SetItemID(i.ID)
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetDocumentID sets the "document" edge to the Document entity by ID.
|
||||||
|
func (auo *AttachmentUpdateOne) SetDocumentID(id uuid.UUID) *AttachmentUpdateOne {
|
||||||
|
auo.mutation.SetDocumentID(id)
|
||||||
|
return auo
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetDocument sets the "document" edge to the Document entity.
|
||||||
|
func (auo *AttachmentUpdateOne) SetDocument(d *Document) *AttachmentUpdateOne {
|
||||||
|
return auo.SetDocumentID(d.ID)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Mutation returns the AttachmentMutation object of the builder.
|
||||||
|
func (auo *AttachmentUpdateOne) Mutation() *AttachmentMutation {
|
||||||
|
return auo.mutation
|
||||||
|
}
|
||||||
|
|
||||||
|
// ClearItem clears the "item" edge to the Item entity.
|
||||||
|
func (auo *AttachmentUpdateOne) ClearItem() *AttachmentUpdateOne {
|
||||||
|
auo.mutation.ClearItem()
|
||||||
|
return auo
|
||||||
|
}
|
||||||
|
|
||||||
|
// ClearDocument clears the "document" edge to the Document entity.
|
||||||
|
func (auo *AttachmentUpdateOne) ClearDocument() *AttachmentUpdateOne {
|
||||||
|
auo.mutation.ClearDocument()
|
||||||
|
return auo
|
||||||
|
}
|
||||||
|
|
||||||
|
// Select allows selecting one or more fields (columns) of the returned entity.
|
||||||
|
// The default is selecting all fields defined in the entity schema.
|
||||||
|
func (auo *AttachmentUpdateOne) Select(field string, fields ...string) *AttachmentUpdateOne {
|
||||||
|
auo.fields = append([]string{field}, fields...)
|
||||||
|
return auo
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save executes the query and returns the updated Attachment entity.
|
||||||
|
func (auo *AttachmentUpdateOne) Save(ctx context.Context) (*Attachment, error) {
|
||||||
|
var (
|
||||||
|
err error
|
||||||
|
node *Attachment
|
||||||
|
)
|
||||||
|
auo.defaults()
|
||||||
|
if len(auo.hooks) == 0 {
|
||||||
|
if err = auo.check(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
node, err = auo.sqlSave(ctx)
|
||||||
|
} else {
|
||||||
|
var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) {
|
||||||
|
mutation, ok := m.(*AttachmentMutation)
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("unexpected mutation type %T", m)
|
||||||
|
}
|
||||||
|
if err = auo.check(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
auo.mutation = mutation
|
||||||
|
node, err = auo.sqlSave(ctx)
|
||||||
|
mutation.done = true
|
||||||
|
return node, err
|
||||||
|
})
|
||||||
|
for i := len(auo.hooks) - 1; i >= 0; i-- {
|
||||||
|
if auo.hooks[i] == nil {
|
||||||
|
return nil, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)")
|
||||||
|
}
|
||||||
|
mut = auo.hooks[i](mut)
|
||||||
|
}
|
||||||
|
v, err := mut.Mutate(ctx, auo.mutation)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
nv, ok := v.(*Attachment)
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("unexpected node type %T returned from AttachmentMutation", v)
|
||||||
|
}
|
||||||
|
node = nv
|
||||||
|
}
|
||||||
|
return node, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// SaveX is like Save, but panics if an error occurs.
|
||||||
|
func (auo *AttachmentUpdateOne) SaveX(ctx context.Context) *Attachment {
|
||||||
|
node, err := auo.Save(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return node
|
||||||
|
}
|
||||||
|
|
||||||
|
// Exec executes the query on the entity.
|
||||||
|
func (auo *AttachmentUpdateOne) Exec(ctx context.Context) error {
|
||||||
|
_, err := auo.Save(ctx)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExecX is like Exec, but panics if an error occurs.
|
||||||
|
func (auo *AttachmentUpdateOne) ExecX(ctx context.Context) {
|
||||||
|
if err := auo.Exec(ctx); err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// defaults sets the default values of the builder before save.
|
||||||
|
func (auo *AttachmentUpdateOne) defaults() {
|
||||||
|
if _, ok := auo.mutation.UpdatedAt(); !ok {
|
||||||
|
v := attachment.UpdateDefaultUpdatedAt()
|
||||||
|
auo.mutation.SetUpdatedAt(v)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// check runs all checks and user-defined validators on the builder.
|
||||||
|
func (auo *AttachmentUpdateOne) check() error {
|
||||||
|
if v, ok := auo.mutation.GetType(); ok {
|
||||||
|
if err := attachment.TypeValidator(v); err != nil {
|
||||||
|
return &ValidationError{Name: "type", err: fmt.Errorf(`ent: validator failed for field "Attachment.type": %w`, err)}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if _, ok := auo.mutation.ItemID(); auo.mutation.ItemCleared() && !ok {
|
||||||
|
return errors.New(`ent: clearing a required unique edge "Attachment.item"`)
|
||||||
|
}
|
||||||
|
if _, ok := auo.mutation.DocumentID(); auo.mutation.DocumentCleared() && !ok {
|
||||||
|
return errors.New(`ent: clearing a required unique edge "Attachment.document"`)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (auo *AttachmentUpdateOne) sqlSave(ctx context.Context) (_node *Attachment, err error) {
|
||||||
|
_spec := &sqlgraph.UpdateSpec{
|
||||||
|
Node: &sqlgraph.NodeSpec{
|
||||||
|
Table: attachment.Table,
|
||||||
|
Columns: attachment.Columns,
|
||||||
|
ID: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: attachment.FieldID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
id, ok := auo.mutation.ID()
|
||||||
|
if !ok {
|
||||||
|
return nil, &ValidationError{Name: "id", err: errors.New(`ent: missing "Attachment.id" for update`)}
|
||||||
|
}
|
||||||
|
_spec.Node.ID.Value = id
|
||||||
|
if fields := auo.fields; len(fields) > 0 {
|
||||||
|
_spec.Node.Columns = make([]string, 0, len(fields))
|
||||||
|
_spec.Node.Columns = append(_spec.Node.Columns, attachment.FieldID)
|
||||||
|
for _, f := range fields {
|
||||||
|
if !attachment.ValidColumn(f) {
|
||||||
|
return nil, &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)}
|
||||||
|
}
|
||||||
|
if f != attachment.FieldID {
|
||||||
|
_spec.Node.Columns = append(_spec.Node.Columns, f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if ps := auo.mutation.predicates; len(ps) > 0 {
|
||||||
|
_spec.Predicate = func(selector *sql.Selector) {
|
||||||
|
for i := range ps {
|
||||||
|
ps[i](selector)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if value, ok := auo.mutation.UpdatedAt(); ok {
|
||||||
|
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeTime,
|
||||||
|
Value: value,
|
||||||
|
Column: attachment.FieldUpdatedAt,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if value, ok := auo.mutation.GetType(); ok {
|
||||||
|
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeEnum,
|
||||||
|
Value: value,
|
||||||
|
Column: attachment.FieldType,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if auo.mutation.ItemCleared() {
|
||||||
|
edge := &sqlgraph.EdgeSpec{
|
||||||
|
Rel: sqlgraph.M2O,
|
||||||
|
Inverse: true,
|
||||||
|
Table: attachment.ItemTable,
|
||||||
|
Columns: []string{attachment.ItemColumn},
|
||||||
|
Bidi: false,
|
||||||
|
Target: &sqlgraph.EdgeTarget{
|
||||||
|
IDSpec: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: item.FieldID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
_spec.Edges.Clear = append(_spec.Edges.Clear, edge)
|
||||||
|
}
|
||||||
|
if nodes := auo.mutation.ItemIDs(); len(nodes) > 0 {
|
||||||
|
edge := &sqlgraph.EdgeSpec{
|
||||||
|
Rel: sqlgraph.M2O,
|
||||||
|
Inverse: true,
|
||||||
|
Table: attachment.ItemTable,
|
||||||
|
Columns: []string{attachment.ItemColumn},
|
||||||
|
Bidi: false,
|
||||||
|
Target: &sqlgraph.EdgeTarget{
|
||||||
|
IDSpec: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: item.FieldID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, k := range nodes {
|
||||||
|
edge.Target.Nodes = append(edge.Target.Nodes, k)
|
||||||
|
}
|
||||||
|
_spec.Edges.Add = append(_spec.Edges.Add, edge)
|
||||||
|
}
|
||||||
|
if auo.mutation.DocumentCleared() {
|
||||||
|
edge := &sqlgraph.EdgeSpec{
|
||||||
|
Rel: sqlgraph.M2O,
|
||||||
|
Inverse: true,
|
||||||
|
Table: attachment.DocumentTable,
|
||||||
|
Columns: []string{attachment.DocumentColumn},
|
||||||
|
Bidi: false,
|
||||||
|
Target: &sqlgraph.EdgeTarget{
|
||||||
|
IDSpec: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: document.FieldID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
_spec.Edges.Clear = append(_spec.Edges.Clear, edge)
|
||||||
|
}
|
||||||
|
if nodes := auo.mutation.DocumentIDs(); len(nodes) > 0 {
|
||||||
|
edge := &sqlgraph.EdgeSpec{
|
||||||
|
Rel: sqlgraph.M2O,
|
||||||
|
Inverse: true,
|
||||||
|
Table: attachment.DocumentTable,
|
||||||
|
Columns: []string{attachment.DocumentColumn},
|
||||||
|
Bidi: false,
|
||||||
|
Target: &sqlgraph.EdgeTarget{
|
||||||
|
IDSpec: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: document.FieldID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, k := range nodes {
|
||||||
|
edge.Target.Nodes = append(edge.Target.Nodes, k)
|
||||||
|
}
|
||||||
|
_spec.Edges.Add = append(_spec.Edges.Add, edge)
|
||||||
|
}
|
||||||
|
_node = &Attachment{config: auo.config}
|
||||||
|
_spec.Assign = _node.assignValues
|
||||||
|
_spec.ScanValues = _node.scanValues
|
||||||
|
if err = sqlgraph.UpdateNode(ctx, auo.driver, _spec); err != nil {
|
||||||
|
if _, ok := err.(*sqlgraph.NotFoundError); ok {
|
||||||
|
err = &NotFoundError{attachment.Label}
|
||||||
|
} else if sqlgraph.IsConstraintError(err) {
|
||||||
|
err = &ConstraintError{msg: err.Error(), wrap: err}
|
||||||
|
}
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return _node, nil
|
||||||
|
}
|
|
@ -11,7 +11,10 @@ import (
|
||||||
"github.com/google/uuid"
|
"github.com/google/uuid"
|
||||||
"github.com/hay-kot/content/backend/ent/migrate"
|
"github.com/hay-kot/content/backend/ent/migrate"
|
||||||
|
|
||||||
|
"github.com/hay-kot/content/backend/ent/attachment"
|
||||||
"github.com/hay-kot/content/backend/ent/authtokens"
|
"github.com/hay-kot/content/backend/ent/authtokens"
|
||||||
|
"github.com/hay-kot/content/backend/ent/document"
|
||||||
|
"github.com/hay-kot/content/backend/ent/documenttoken"
|
||||||
"github.com/hay-kot/content/backend/ent/group"
|
"github.com/hay-kot/content/backend/ent/group"
|
||||||
"github.com/hay-kot/content/backend/ent/item"
|
"github.com/hay-kot/content/backend/ent/item"
|
||||||
"github.com/hay-kot/content/backend/ent/itemfield"
|
"github.com/hay-kot/content/backend/ent/itemfield"
|
||||||
|
@ -29,8 +32,14 @@ type Client struct {
|
||||||
config
|
config
|
||||||
// Schema is the client for creating, migrating and dropping schema.
|
// Schema is the client for creating, migrating and dropping schema.
|
||||||
Schema *migrate.Schema
|
Schema *migrate.Schema
|
||||||
|
// Attachment is the client for interacting with the Attachment builders.
|
||||||
|
Attachment *AttachmentClient
|
||||||
// AuthTokens is the client for interacting with the AuthTokens builders.
|
// AuthTokens is the client for interacting with the AuthTokens builders.
|
||||||
AuthTokens *AuthTokensClient
|
AuthTokens *AuthTokensClient
|
||||||
|
// Document is the client for interacting with the Document builders.
|
||||||
|
Document *DocumentClient
|
||||||
|
// DocumentToken is the client for interacting with the DocumentToken builders.
|
||||||
|
DocumentToken *DocumentTokenClient
|
||||||
// Group is the client for interacting with the Group builders.
|
// Group is the client for interacting with the Group builders.
|
||||||
Group *GroupClient
|
Group *GroupClient
|
||||||
// Item is the client for interacting with the Item builders.
|
// Item is the client for interacting with the Item builders.
|
||||||
|
@ -56,7 +65,10 @@ func NewClient(opts ...Option) *Client {
|
||||||
|
|
||||||
func (c *Client) init() {
|
func (c *Client) init() {
|
||||||
c.Schema = migrate.NewSchema(c.driver)
|
c.Schema = migrate.NewSchema(c.driver)
|
||||||
|
c.Attachment = NewAttachmentClient(c.config)
|
||||||
c.AuthTokens = NewAuthTokensClient(c.config)
|
c.AuthTokens = NewAuthTokensClient(c.config)
|
||||||
|
c.Document = NewDocumentClient(c.config)
|
||||||
|
c.DocumentToken = NewDocumentTokenClient(c.config)
|
||||||
c.Group = NewGroupClient(c.config)
|
c.Group = NewGroupClient(c.config)
|
||||||
c.Item = NewItemClient(c.config)
|
c.Item = NewItemClient(c.config)
|
||||||
c.ItemField = NewItemFieldClient(c.config)
|
c.ItemField = NewItemFieldClient(c.config)
|
||||||
|
@ -94,15 +106,18 @@ func (c *Client) Tx(ctx context.Context) (*Tx, error) {
|
||||||
cfg := c.config
|
cfg := c.config
|
||||||
cfg.driver = tx
|
cfg.driver = tx
|
||||||
return &Tx{
|
return &Tx{
|
||||||
ctx: ctx,
|
ctx: ctx,
|
||||||
config: cfg,
|
config: cfg,
|
||||||
AuthTokens: NewAuthTokensClient(cfg),
|
Attachment: NewAttachmentClient(cfg),
|
||||||
Group: NewGroupClient(cfg),
|
AuthTokens: NewAuthTokensClient(cfg),
|
||||||
Item: NewItemClient(cfg),
|
Document: NewDocumentClient(cfg),
|
||||||
ItemField: NewItemFieldClient(cfg),
|
DocumentToken: NewDocumentTokenClient(cfg),
|
||||||
Label: NewLabelClient(cfg),
|
Group: NewGroupClient(cfg),
|
||||||
Location: NewLocationClient(cfg),
|
Item: NewItemClient(cfg),
|
||||||
User: NewUserClient(cfg),
|
ItemField: NewItemFieldClient(cfg),
|
||||||
|
Label: NewLabelClient(cfg),
|
||||||
|
Location: NewLocationClient(cfg),
|
||||||
|
User: NewUserClient(cfg),
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -120,22 +135,25 @@ func (c *Client) BeginTx(ctx context.Context, opts *sql.TxOptions) (*Tx, error)
|
||||||
cfg := c.config
|
cfg := c.config
|
||||||
cfg.driver = &txDriver{tx: tx, drv: c.driver}
|
cfg.driver = &txDriver{tx: tx, drv: c.driver}
|
||||||
return &Tx{
|
return &Tx{
|
||||||
ctx: ctx,
|
ctx: ctx,
|
||||||
config: cfg,
|
config: cfg,
|
||||||
AuthTokens: NewAuthTokensClient(cfg),
|
Attachment: NewAttachmentClient(cfg),
|
||||||
Group: NewGroupClient(cfg),
|
AuthTokens: NewAuthTokensClient(cfg),
|
||||||
Item: NewItemClient(cfg),
|
Document: NewDocumentClient(cfg),
|
||||||
ItemField: NewItemFieldClient(cfg),
|
DocumentToken: NewDocumentTokenClient(cfg),
|
||||||
Label: NewLabelClient(cfg),
|
Group: NewGroupClient(cfg),
|
||||||
Location: NewLocationClient(cfg),
|
Item: NewItemClient(cfg),
|
||||||
User: NewUserClient(cfg),
|
ItemField: NewItemFieldClient(cfg),
|
||||||
|
Label: NewLabelClient(cfg),
|
||||||
|
Location: NewLocationClient(cfg),
|
||||||
|
User: NewUserClient(cfg),
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Debug returns a new debug-client. It's used to get verbose logging on specific operations.
|
// Debug returns a new debug-client. It's used to get verbose logging on specific operations.
|
||||||
//
|
//
|
||||||
// client.Debug().
|
// client.Debug().
|
||||||
// AuthTokens.
|
// Attachment.
|
||||||
// Query().
|
// Query().
|
||||||
// Count(ctx)
|
// Count(ctx)
|
||||||
func (c *Client) Debug() *Client {
|
func (c *Client) Debug() *Client {
|
||||||
|
@ -157,7 +175,10 @@ func (c *Client) Close() error {
|
||||||
// Use adds the mutation hooks to all the entity clients.
|
// Use adds the mutation hooks to all the entity clients.
|
||||||
// In order to add hooks to a specific client, call: `client.Node.Use(...)`.
|
// In order to add hooks to a specific client, call: `client.Node.Use(...)`.
|
||||||
func (c *Client) Use(hooks ...Hook) {
|
func (c *Client) Use(hooks ...Hook) {
|
||||||
|
c.Attachment.Use(hooks...)
|
||||||
c.AuthTokens.Use(hooks...)
|
c.AuthTokens.Use(hooks...)
|
||||||
|
c.Document.Use(hooks...)
|
||||||
|
c.DocumentToken.Use(hooks...)
|
||||||
c.Group.Use(hooks...)
|
c.Group.Use(hooks...)
|
||||||
c.Item.Use(hooks...)
|
c.Item.Use(hooks...)
|
||||||
c.ItemField.Use(hooks...)
|
c.ItemField.Use(hooks...)
|
||||||
|
@ -166,6 +187,128 @@ func (c *Client) Use(hooks ...Hook) {
|
||||||
c.User.Use(hooks...)
|
c.User.Use(hooks...)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// AttachmentClient is a client for the Attachment schema.
|
||||||
|
type AttachmentClient struct {
|
||||||
|
config
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewAttachmentClient returns a client for the Attachment from the given config.
|
||||||
|
func NewAttachmentClient(c config) *AttachmentClient {
|
||||||
|
return &AttachmentClient{config: c}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use adds a list of mutation hooks to the hooks stack.
|
||||||
|
// A call to `Use(f, g, h)` equals to `attachment.Hooks(f(g(h())))`.
|
||||||
|
func (c *AttachmentClient) Use(hooks ...Hook) {
|
||||||
|
c.hooks.Attachment = append(c.hooks.Attachment, hooks...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create returns a builder for creating a Attachment entity.
|
||||||
|
func (c *AttachmentClient) Create() *AttachmentCreate {
|
||||||
|
mutation := newAttachmentMutation(c.config, OpCreate)
|
||||||
|
return &AttachmentCreate{config: c.config, hooks: c.Hooks(), mutation: mutation}
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreateBulk returns a builder for creating a bulk of Attachment entities.
|
||||||
|
func (c *AttachmentClient) CreateBulk(builders ...*AttachmentCreate) *AttachmentCreateBulk {
|
||||||
|
return &AttachmentCreateBulk{config: c.config, builders: builders}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update returns an update builder for Attachment.
|
||||||
|
func (c *AttachmentClient) Update() *AttachmentUpdate {
|
||||||
|
mutation := newAttachmentMutation(c.config, OpUpdate)
|
||||||
|
return &AttachmentUpdate{config: c.config, hooks: c.Hooks(), mutation: mutation}
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateOne returns an update builder for the given entity.
|
||||||
|
func (c *AttachmentClient) UpdateOne(a *Attachment) *AttachmentUpdateOne {
|
||||||
|
mutation := newAttachmentMutation(c.config, OpUpdateOne, withAttachment(a))
|
||||||
|
return &AttachmentUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation}
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateOneID returns an update builder for the given id.
|
||||||
|
func (c *AttachmentClient) UpdateOneID(id uuid.UUID) *AttachmentUpdateOne {
|
||||||
|
mutation := newAttachmentMutation(c.config, OpUpdateOne, withAttachmentID(id))
|
||||||
|
return &AttachmentUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delete returns a delete builder for Attachment.
|
||||||
|
func (c *AttachmentClient) Delete() *AttachmentDelete {
|
||||||
|
mutation := newAttachmentMutation(c.config, OpDelete)
|
||||||
|
return &AttachmentDelete{config: c.config, hooks: c.Hooks(), mutation: mutation}
|
||||||
|
}
|
||||||
|
|
||||||
|
// DeleteOne returns a builder for deleting the given entity.
|
||||||
|
func (c *AttachmentClient) DeleteOne(a *Attachment) *AttachmentDeleteOne {
|
||||||
|
return c.DeleteOneID(a.ID)
|
||||||
|
}
|
||||||
|
|
||||||
|
// DeleteOne returns a builder for deleting the given entity by its id.
|
||||||
|
func (c *AttachmentClient) DeleteOneID(id uuid.UUID) *AttachmentDeleteOne {
|
||||||
|
builder := c.Delete().Where(attachment.ID(id))
|
||||||
|
builder.mutation.id = &id
|
||||||
|
builder.mutation.op = OpDeleteOne
|
||||||
|
return &AttachmentDeleteOne{builder}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Query returns a query builder for Attachment.
|
||||||
|
func (c *AttachmentClient) Query() *AttachmentQuery {
|
||||||
|
return &AttachmentQuery{
|
||||||
|
config: c.config,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get returns a Attachment entity by its id.
|
||||||
|
func (c *AttachmentClient) Get(ctx context.Context, id uuid.UUID) (*Attachment, error) {
|
||||||
|
return c.Query().Where(attachment.ID(id)).Only(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetX is like Get, but panics if an error occurs.
|
||||||
|
func (c *AttachmentClient) GetX(ctx context.Context, id uuid.UUID) *Attachment {
|
||||||
|
obj, err := c.Get(ctx, id)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return obj
|
||||||
|
}
|
||||||
|
|
||||||
|
// QueryItem queries the item edge of a Attachment.
|
||||||
|
func (c *AttachmentClient) QueryItem(a *Attachment) *ItemQuery {
|
||||||
|
query := &ItemQuery{config: c.config}
|
||||||
|
query.path = func(ctx context.Context) (fromV *sql.Selector, _ error) {
|
||||||
|
id := a.ID
|
||||||
|
step := sqlgraph.NewStep(
|
||||||
|
sqlgraph.From(attachment.Table, attachment.FieldID, id),
|
||||||
|
sqlgraph.To(item.Table, item.FieldID),
|
||||||
|
sqlgraph.Edge(sqlgraph.M2O, true, attachment.ItemTable, attachment.ItemColumn),
|
||||||
|
)
|
||||||
|
fromV = sqlgraph.Neighbors(a.driver.Dialect(), step)
|
||||||
|
return fromV, nil
|
||||||
|
}
|
||||||
|
return query
|
||||||
|
}
|
||||||
|
|
||||||
|
// QueryDocument queries the document edge of a Attachment.
|
||||||
|
func (c *AttachmentClient) QueryDocument(a *Attachment) *DocumentQuery {
|
||||||
|
query := &DocumentQuery{config: c.config}
|
||||||
|
query.path = func(ctx context.Context) (fromV *sql.Selector, _ error) {
|
||||||
|
id := a.ID
|
||||||
|
step := sqlgraph.NewStep(
|
||||||
|
sqlgraph.From(attachment.Table, attachment.FieldID, id),
|
||||||
|
sqlgraph.To(document.Table, document.FieldID),
|
||||||
|
sqlgraph.Edge(sqlgraph.M2O, true, attachment.DocumentTable, attachment.DocumentColumn),
|
||||||
|
)
|
||||||
|
fromV = sqlgraph.Neighbors(a.driver.Dialect(), step)
|
||||||
|
return fromV, nil
|
||||||
|
}
|
||||||
|
return query
|
||||||
|
}
|
||||||
|
|
||||||
|
// Hooks returns the client hooks.
|
||||||
|
func (c *AttachmentClient) Hooks() []Hook {
|
||||||
|
return c.hooks.Attachment
|
||||||
|
}
|
||||||
|
|
||||||
// AuthTokensClient is a client for the AuthTokens schema.
|
// AuthTokensClient is a client for the AuthTokens schema.
|
||||||
type AuthTokensClient struct {
|
type AuthTokensClient struct {
|
||||||
config
|
config
|
||||||
|
@ -272,6 +415,250 @@ func (c *AuthTokensClient) Hooks() []Hook {
|
||||||
return c.hooks.AuthTokens
|
return c.hooks.AuthTokens
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// DocumentClient is a client for the Document schema.
|
||||||
|
type DocumentClient struct {
|
||||||
|
config
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewDocumentClient returns a client for the Document from the given config.
|
||||||
|
func NewDocumentClient(c config) *DocumentClient {
|
||||||
|
return &DocumentClient{config: c}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use adds a list of mutation hooks to the hooks stack.
|
||||||
|
// A call to `Use(f, g, h)` equals to `document.Hooks(f(g(h())))`.
|
||||||
|
func (c *DocumentClient) Use(hooks ...Hook) {
|
||||||
|
c.hooks.Document = append(c.hooks.Document, hooks...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create returns a builder for creating a Document entity.
|
||||||
|
func (c *DocumentClient) Create() *DocumentCreate {
|
||||||
|
mutation := newDocumentMutation(c.config, OpCreate)
|
||||||
|
return &DocumentCreate{config: c.config, hooks: c.Hooks(), mutation: mutation}
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreateBulk returns a builder for creating a bulk of Document entities.
|
||||||
|
func (c *DocumentClient) CreateBulk(builders ...*DocumentCreate) *DocumentCreateBulk {
|
||||||
|
return &DocumentCreateBulk{config: c.config, builders: builders}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update returns an update builder for Document.
|
||||||
|
func (c *DocumentClient) Update() *DocumentUpdate {
|
||||||
|
mutation := newDocumentMutation(c.config, OpUpdate)
|
||||||
|
return &DocumentUpdate{config: c.config, hooks: c.Hooks(), mutation: mutation}
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateOne returns an update builder for the given entity.
|
||||||
|
func (c *DocumentClient) UpdateOne(d *Document) *DocumentUpdateOne {
|
||||||
|
mutation := newDocumentMutation(c.config, OpUpdateOne, withDocument(d))
|
||||||
|
return &DocumentUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation}
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateOneID returns an update builder for the given id.
|
||||||
|
func (c *DocumentClient) UpdateOneID(id uuid.UUID) *DocumentUpdateOne {
|
||||||
|
mutation := newDocumentMutation(c.config, OpUpdateOne, withDocumentID(id))
|
||||||
|
return &DocumentUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delete returns a delete builder for Document.
|
||||||
|
func (c *DocumentClient) Delete() *DocumentDelete {
|
||||||
|
mutation := newDocumentMutation(c.config, OpDelete)
|
||||||
|
return &DocumentDelete{config: c.config, hooks: c.Hooks(), mutation: mutation}
|
||||||
|
}
|
||||||
|
|
||||||
|
// DeleteOne returns a builder for deleting the given entity.
|
||||||
|
func (c *DocumentClient) DeleteOne(d *Document) *DocumentDeleteOne {
|
||||||
|
return c.DeleteOneID(d.ID)
|
||||||
|
}
|
||||||
|
|
||||||
|
// DeleteOne returns a builder for deleting the given entity by its id.
|
||||||
|
func (c *DocumentClient) DeleteOneID(id uuid.UUID) *DocumentDeleteOne {
|
||||||
|
builder := c.Delete().Where(document.ID(id))
|
||||||
|
builder.mutation.id = &id
|
||||||
|
builder.mutation.op = OpDeleteOne
|
||||||
|
return &DocumentDeleteOne{builder}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Query returns a query builder for Document.
|
||||||
|
func (c *DocumentClient) Query() *DocumentQuery {
|
||||||
|
return &DocumentQuery{
|
||||||
|
config: c.config,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get returns a Document entity by its id.
|
||||||
|
func (c *DocumentClient) Get(ctx context.Context, id uuid.UUID) (*Document, error) {
|
||||||
|
return c.Query().Where(document.ID(id)).Only(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetX is like Get, but panics if an error occurs.
|
||||||
|
func (c *DocumentClient) GetX(ctx context.Context, id uuid.UUID) *Document {
|
||||||
|
obj, err := c.Get(ctx, id)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return obj
|
||||||
|
}
|
||||||
|
|
||||||
|
// QueryGroup queries the group edge of a Document.
|
||||||
|
func (c *DocumentClient) QueryGroup(d *Document) *GroupQuery {
|
||||||
|
query := &GroupQuery{config: c.config}
|
||||||
|
query.path = func(ctx context.Context) (fromV *sql.Selector, _ error) {
|
||||||
|
id := d.ID
|
||||||
|
step := sqlgraph.NewStep(
|
||||||
|
sqlgraph.From(document.Table, document.FieldID, id),
|
||||||
|
sqlgraph.To(group.Table, group.FieldID),
|
||||||
|
sqlgraph.Edge(sqlgraph.M2O, true, document.GroupTable, document.GroupColumn),
|
||||||
|
)
|
||||||
|
fromV = sqlgraph.Neighbors(d.driver.Dialect(), step)
|
||||||
|
return fromV, nil
|
||||||
|
}
|
||||||
|
return query
|
||||||
|
}
|
||||||
|
|
||||||
|
// QueryDocumentTokens queries the document_tokens edge of a Document.
|
||||||
|
func (c *DocumentClient) QueryDocumentTokens(d *Document) *DocumentTokenQuery {
|
||||||
|
query := &DocumentTokenQuery{config: c.config}
|
||||||
|
query.path = func(ctx context.Context) (fromV *sql.Selector, _ error) {
|
||||||
|
id := d.ID
|
||||||
|
step := sqlgraph.NewStep(
|
||||||
|
sqlgraph.From(document.Table, document.FieldID, id),
|
||||||
|
sqlgraph.To(documenttoken.Table, documenttoken.FieldID),
|
||||||
|
sqlgraph.Edge(sqlgraph.O2M, false, document.DocumentTokensTable, document.DocumentTokensColumn),
|
||||||
|
)
|
||||||
|
fromV = sqlgraph.Neighbors(d.driver.Dialect(), step)
|
||||||
|
return fromV, nil
|
||||||
|
}
|
||||||
|
return query
|
||||||
|
}
|
||||||
|
|
||||||
|
// QueryAttachments queries the attachments edge of a Document.
|
||||||
|
func (c *DocumentClient) QueryAttachments(d *Document) *AttachmentQuery {
|
||||||
|
query := &AttachmentQuery{config: c.config}
|
||||||
|
query.path = func(ctx context.Context) (fromV *sql.Selector, _ error) {
|
||||||
|
id := d.ID
|
||||||
|
step := sqlgraph.NewStep(
|
||||||
|
sqlgraph.From(document.Table, document.FieldID, id),
|
||||||
|
sqlgraph.To(attachment.Table, attachment.FieldID),
|
||||||
|
sqlgraph.Edge(sqlgraph.O2M, false, document.AttachmentsTable, document.AttachmentsColumn),
|
||||||
|
)
|
||||||
|
fromV = sqlgraph.Neighbors(d.driver.Dialect(), step)
|
||||||
|
return fromV, nil
|
||||||
|
}
|
||||||
|
return query
|
||||||
|
}
|
||||||
|
|
||||||
|
// Hooks returns the client hooks.
|
||||||
|
func (c *DocumentClient) Hooks() []Hook {
|
||||||
|
return c.hooks.Document
|
||||||
|
}
|
||||||
|
|
||||||
|
// DocumentTokenClient is a client for the DocumentToken schema.
|
||||||
|
type DocumentTokenClient struct {
|
||||||
|
config
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewDocumentTokenClient returns a client for the DocumentToken from the given config.
|
||||||
|
func NewDocumentTokenClient(c config) *DocumentTokenClient {
|
||||||
|
return &DocumentTokenClient{config: c}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use adds a list of mutation hooks to the hooks stack.
|
||||||
|
// A call to `Use(f, g, h)` equals to `documenttoken.Hooks(f(g(h())))`.
|
||||||
|
func (c *DocumentTokenClient) Use(hooks ...Hook) {
|
||||||
|
c.hooks.DocumentToken = append(c.hooks.DocumentToken, hooks...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create returns a builder for creating a DocumentToken entity.
|
||||||
|
func (c *DocumentTokenClient) Create() *DocumentTokenCreate {
|
||||||
|
mutation := newDocumentTokenMutation(c.config, OpCreate)
|
||||||
|
return &DocumentTokenCreate{config: c.config, hooks: c.Hooks(), mutation: mutation}
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreateBulk returns a builder for creating a bulk of DocumentToken entities.
|
||||||
|
func (c *DocumentTokenClient) CreateBulk(builders ...*DocumentTokenCreate) *DocumentTokenCreateBulk {
|
||||||
|
return &DocumentTokenCreateBulk{config: c.config, builders: builders}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update returns an update builder for DocumentToken.
|
||||||
|
func (c *DocumentTokenClient) Update() *DocumentTokenUpdate {
|
||||||
|
mutation := newDocumentTokenMutation(c.config, OpUpdate)
|
||||||
|
return &DocumentTokenUpdate{config: c.config, hooks: c.Hooks(), mutation: mutation}
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateOne returns an update builder for the given entity.
|
||||||
|
func (c *DocumentTokenClient) UpdateOne(dt *DocumentToken) *DocumentTokenUpdateOne {
|
||||||
|
mutation := newDocumentTokenMutation(c.config, OpUpdateOne, withDocumentToken(dt))
|
||||||
|
return &DocumentTokenUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation}
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateOneID returns an update builder for the given id.
|
||||||
|
func (c *DocumentTokenClient) UpdateOneID(id uuid.UUID) *DocumentTokenUpdateOne {
|
||||||
|
mutation := newDocumentTokenMutation(c.config, OpUpdateOne, withDocumentTokenID(id))
|
||||||
|
return &DocumentTokenUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delete returns a delete builder for DocumentToken.
|
||||||
|
func (c *DocumentTokenClient) Delete() *DocumentTokenDelete {
|
||||||
|
mutation := newDocumentTokenMutation(c.config, OpDelete)
|
||||||
|
return &DocumentTokenDelete{config: c.config, hooks: c.Hooks(), mutation: mutation}
|
||||||
|
}
|
||||||
|
|
||||||
|
// DeleteOne returns a builder for deleting the given entity.
|
||||||
|
func (c *DocumentTokenClient) DeleteOne(dt *DocumentToken) *DocumentTokenDeleteOne {
|
||||||
|
return c.DeleteOneID(dt.ID)
|
||||||
|
}
|
||||||
|
|
||||||
|
// DeleteOne returns a builder for deleting the given entity by its id.
|
||||||
|
func (c *DocumentTokenClient) DeleteOneID(id uuid.UUID) *DocumentTokenDeleteOne {
|
||||||
|
builder := c.Delete().Where(documenttoken.ID(id))
|
||||||
|
builder.mutation.id = &id
|
||||||
|
builder.mutation.op = OpDeleteOne
|
||||||
|
return &DocumentTokenDeleteOne{builder}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Query returns a query builder for DocumentToken.
|
||||||
|
func (c *DocumentTokenClient) Query() *DocumentTokenQuery {
|
||||||
|
return &DocumentTokenQuery{
|
||||||
|
config: c.config,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get returns a DocumentToken entity by its id.
|
||||||
|
func (c *DocumentTokenClient) Get(ctx context.Context, id uuid.UUID) (*DocumentToken, error) {
|
||||||
|
return c.Query().Where(documenttoken.ID(id)).Only(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetX is like Get, but panics if an error occurs.
|
||||||
|
func (c *DocumentTokenClient) GetX(ctx context.Context, id uuid.UUID) *DocumentToken {
|
||||||
|
obj, err := c.Get(ctx, id)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return obj
|
||||||
|
}
|
||||||
|
|
||||||
|
// QueryDocument queries the document edge of a DocumentToken.
|
||||||
|
func (c *DocumentTokenClient) QueryDocument(dt *DocumentToken) *DocumentQuery {
|
||||||
|
query := &DocumentQuery{config: c.config}
|
||||||
|
query.path = func(ctx context.Context) (fromV *sql.Selector, _ error) {
|
||||||
|
id := dt.ID
|
||||||
|
step := sqlgraph.NewStep(
|
||||||
|
sqlgraph.From(documenttoken.Table, documenttoken.FieldID, id),
|
||||||
|
sqlgraph.To(document.Table, document.FieldID),
|
||||||
|
sqlgraph.Edge(sqlgraph.M2O, true, documenttoken.DocumentTable, documenttoken.DocumentColumn),
|
||||||
|
)
|
||||||
|
fromV = sqlgraph.Neighbors(dt.driver.Dialect(), step)
|
||||||
|
return fromV, nil
|
||||||
|
}
|
||||||
|
return query
|
||||||
|
}
|
||||||
|
|
||||||
|
// Hooks returns the client hooks.
|
||||||
|
func (c *DocumentTokenClient) Hooks() []Hook {
|
||||||
|
return c.hooks.DocumentToken
|
||||||
|
}
|
||||||
|
|
||||||
// GroupClient is a client for the Group schema.
|
// GroupClient is a client for the Group schema.
|
||||||
type GroupClient struct {
|
type GroupClient struct {
|
||||||
config
|
config
|
||||||
|
@ -421,6 +808,22 @@ func (c *GroupClient) QueryLabels(gr *Group) *LabelQuery {
|
||||||
return query
|
return query
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// QueryDocuments queries the documents edge of a Group.
|
||||||
|
func (c *GroupClient) QueryDocuments(gr *Group) *DocumentQuery {
|
||||||
|
query := &DocumentQuery{config: c.config}
|
||||||
|
query.path = func(ctx context.Context) (fromV *sql.Selector, _ error) {
|
||||||
|
id := gr.ID
|
||||||
|
step := sqlgraph.NewStep(
|
||||||
|
sqlgraph.From(group.Table, group.FieldID, id),
|
||||||
|
sqlgraph.To(document.Table, document.FieldID),
|
||||||
|
sqlgraph.Edge(sqlgraph.O2M, false, group.DocumentsTable, group.DocumentsColumn),
|
||||||
|
)
|
||||||
|
fromV = sqlgraph.Neighbors(gr.driver.Dialect(), step)
|
||||||
|
return fromV, nil
|
||||||
|
}
|
||||||
|
return query
|
||||||
|
}
|
||||||
|
|
||||||
// Hooks returns the client hooks.
|
// Hooks returns the client hooks.
|
||||||
func (c *GroupClient) Hooks() []Hook {
|
func (c *GroupClient) Hooks() []Hook {
|
||||||
return c.hooks.Group
|
return c.hooks.Group
|
||||||
|
@ -575,6 +978,22 @@ func (c *ItemClient) QueryLabel(i *Item) *LabelQuery {
|
||||||
return query
|
return query
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// QueryAttachments queries the attachments edge of a Item.
|
||||||
|
func (c *ItemClient) QueryAttachments(i *Item) *AttachmentQuery {
|
||||||
|
query := &AttachmentQuery{config: c.config}
|
||||||
|
query.path = func(ctx context.Context) (fromV *sql.Selector, _ error) {
|
||||||
|
id := i.ID
|
||||||
|
step := sqlgraph.NewStep(
|
||||||
|
sqlgraph.From(item.Table, item.FieldID, id),
|
||||||
|
sqlgraph.To(attachment.Table, attachment.FieldID),
|
||||||
|
sqlgraph.Edge(sqlgraph.O2M, false, item.AttachmentsTable, item.AttachmentsColumn),
|
||||||
|
)
|
||||||
|
fromV = sqlgraph.Neighbors(i.driver.Dialect(), step)
|
||||||
|
return fromV, nil
|
||||||
|
}
|
||||||
|
return query
|
||||||
|
}
|
||||||
|
|
||||||
// Hooks returns the client hooks.
|
// Hooks returns the client hooks.
|
||||||
func (c *ItemClient) Hooks() []Hook {
|
func (c *ItemClient) Hooks() []Hook {
|
||||||
return c.hooks.Item
|
return c.hooks.Item
|
||||||
|
|
|
@ -24,13 +24,16 @@ type config struct {
|
||||||
|
|
||||||
// hooks per client, for fast access.
|
// hooks per client, for fast access.
|
||||||
type hooks struct {
|
type hooks struct {
|
||||||
AuthTokens []ent.Hook
|
Attachment []ent.Hook
|
||||||
Group []ent.Hook
|
AuthTokens []ent.Hook
|
||||||
Item []ent.Hook
|
Document []ent.Hook
|
||||||
ItemField []ent.Hook
|
DocumentToken []ent.Hook
|
||||||
Label []ent.Hook
|
Group []ent.Hook
|
||||||
Location []ent.Hook
|
Item []ent.Hook
|
||||||
User []ent.Hook
|
ItemField []ent.Hook
|
||||||
|
Label []ent.Hook
|
||||||
|
Location []ent.Hook
|
||||||
|
User []ent.Hook
|
||||||
}
|
}
|
||||||
|
|
||||||
// Options applies the options on the config object.
|
// Options applies the options on the config object.
|
||||||
|
|
209
backend/ent/document.go
Normal file
209
backend/ent/document.go
Normal file
|
@ -0,0 +1,209 @@
|
||||||
|
// Code generated by ent, DO NOT EDIT.
|
||||||
|
|
||||||
|
package ent
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"entgo.io/ent/dialect/sql"
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/hay-kot/content/backend/ent/document"
|
||||||
|
"github.com/hay-kot/content/backend/ent/group"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Document is the model entity for the Document schema.
|
||||||
|
type Document struct {
|
||||||
|
config `json:"-"`
|
||||||
|
// ID of the ent.
|
||||||
|
ID uuid.UUID `json:"id,omitempty"`
|
||||||
|
// CreatedAt holds the value of the "created_at" field.
|
||||||
|
CreatedAt time.Time `json:"created_at,omitempty"`
|
||||||
|
// UpdatedAt holds the value of the "updated_at" field.
|
||||||
|
UpdatedAt time.Time `json:"updated_at,omitempty"`
|
||||||
|
// Title holds the value of the "title" field.
|
||||||
|
Title string `json:"title,omitempty"`
|
||||||
|
// Path holds the value of the "path" field.
|
||||||
|
Path string `json:"path,omitempty"`
|
||||||
|
// Edges holds the relations/edges for other nodes in the graph.
|
||||||
|
// The values are being populated by the DocumentQuery when eager-loading is set.
|
||||||
|
Edges DocumentEdges `json:"edges"`
|
||||||
|
group_documents *uuid.UUID
|
||||||
|
}
|
||||||
|
|
||||||
|
// DocumentEdges holds the relations/edges for other nodes in the graph.
|
||||||
|
type DocumentEdges struct {
|
||||||
|
// Group holds the value of the group edge.
|
||||||
|
Group *Group `json:"group,omitempty"`
|
||||||
|
// DocumentTokens holds the value of the document_tokens edge.
|
||||||
|
DocumentTokens []*DocumentToken `json:"document_tokens,omitempty"`
|
||||||
|
// Attachments holds the value of the attachments edge.
|
||||||
|
Attachments []*Attachment `json:"attachments,omitempty"`
|
||||||
|
// loadedTypes holds the information for reporting if a
|
||||||
|
// type was loaded (or requested) in eager-loading or not.
|
||||||
|
loadedTypes [3]bool
|
||||||
|
}
|
||||||
|
|
||||||
|
// GroupOrErr returns the Group value or an error if the edge
|
||||||
|
// was not loaded in eager-loading, or loaded but was not found.
|
||||||
|
func (e DocumentEdges) GroupOrErr() (*Group, error) {
|
||||||
|
if e.loadedTypes[0] {
|
||||||
|
if e.Group == nil {
|
||||||
|
// Edge was loaded but was not found.
|
||||||
|
return nil, &NotFoundError{label: group.Label}
|
||||||
|
}
|
||||||
|
return e.Group, nil
|
||||||
|
}
|
||||||
|
return nil, &NotLoadedError{edge: "group"}
|
||||||
|
}
|
||||||
|
|
||||||
|
// DocumentTokensOrErr returns the DocumentTokens value or an error if the edge
|
||||||
|
// was not loaded in eager-loading.
|
||||||
|
func (e DocumentEdges) DocumentTokensOrErr() ([]*DocumentToken, error) {
|
||||||
|
if e.loadedTypes[1] {
|
||||||
|
return e.DocumentTokens, nil
|
||||||
|
}
|
||||||
|
return nil, &NotLoadedError{edge: "document_tokens"}
|
||||||
|
}
|
||||||
|
|
||||||
|
// AttachmentsOrErr returns the Attachments value or an error if the edge
|
||||||
|
// was not loaded in eager-loading.
|
||||||
|
func (e DocumentEdges) AttachmentsOrErr() ([]*Attachment, error) {
|
||||||
|
if e.loadedTypes[2] {
|
||||||
|
return e.Attachments, nil
|
||||||
|
}
|
||||||
|
return nil, &NotLoadedError{edge: "attachments"}
|
||||||
|
}
|
||||||
|
|
||||||
|
// scanValues returns the types for scanning values from sql.Rows.
|
||||||
|
func (*Document) scanValues(columns []string) ([]interface{}, error) {
|
||||||
|
values := make([]interface{}, len(columns))
|
||||||
|
for i := range columns {
|
||||||
|
switch columns[i] {
|
||||||
|
case document.FieldTitle, document.FieldPath:
|
||||||
|
values[i] = new(sql.NullString)
|
||||||
|
case document.FieldCreatedAt, document.FieldUpdatedAt:
|
||||||
|
values[i] = new(sql.NullTime)
|
||||||
|
case document.FieldID:
|
||||||
|
values[i] = new(uuid.UUID)
|
||||||
|
case document.ForeignKeys[0]: // group_documents
|
||||||
|
values[i] = &sql.NullScanner{S: new(uuid.UUID)}
|
||||||
|
default:
|
||||||
|
return nil, fmt.Errorf("unexpected column %q for type Document", columns[i])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return values, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// assignValues assigns the values that were returned from sql.Rows (after scanning)
|
||||||
|
// to the Document fields.
|
||||||
|
func (d *Document) assignValues(columns []string, values []interface{}) error {
|
||||||
|
if m, n := len(values), len(columns); m < n {
|
||||||
|
return fmt.Errorf("mismatch number of scan values: %d != %d", m, n)
|
||||||
|
}
|
||||||
|
for i := range columns {
|
||||||
|
switch columns[i] {
|
||||||
|
case document.FieldID:
|
||||||
|
if value, ok := values[i].(*uuid.UUID); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field id", values[i])
|
||||||
|
} else if value != nil {
|
||||||
|
d.ID = *value
|
||||||
|
}
|
||||||
|
case document.FieldCreatedAt:
|
||||||
|
if value, ok := values[i].(*sql.NullTime); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field created_at", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
d.CreatedAt = value.Time
|
||||||
|
}
|
||||||
|
case document.FieldUpdatedAt:
|
||||||
|
if value, ok := values[i].(*sql.NullTime); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field updated_at", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
d.UpdatedAt = value.Time
|
||||||
|
}
|
||||||
|
case document.FieldTitle:
|
||||||
|
if value, ok := values[i].(*sql.NullString); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field title", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
d.Title = value.String
|
||||||
|
}
|
||||||
|
case document.FieldPath:
|
||||||
|
if value, ok := values[i].(*sql.NullString); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field path", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
d.Path = value.String
|
||||||
|
}
|
||||||
|
case document.ForeignKeys[0]:
|
||||||
|
if value, ok := values[i].(*sql.NullScanner); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field group_documents", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
d.group_documents = new(uuid.UUID)
|
||||||
|
*d.group_documents = *value.S.(*uuid.UUID)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// QueryGroup queries the "group" edge of the Document entity.
|
||||||
|
func (d *Document) QueryGroup() *GroupQuery {
|
||||||
|
return (&DocumentClient{config: d.config}).QueryGroup(d)
|
||||||
|
}
|
||||||
|
|
||||||
|
// QueryDocumentTokens queries the "document_tokens" edge of the Document entity.
|
||||||
|
func (d *Document) QueryDocumentTokens() *DocumentTokenQuery {
|
||||||
|
return (&DocumentClient{config: d.config}).QueryDocumentTokens(d)
|
||||||
|
}
|
||||||
|
|
||||||
|
// QueryAttachments queries the "attachments" edge of the Document entity.
|
||||||
|
func (d *Document) QueryAttachments() *AttachmentQuery {
|
||||||
|
return (&DocumentClient{config: d.config}).QueryAttachments(d)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update returns a builder for updating this Document.
|
||||||
|
// Note that you need to call Document.Unwrap() before calling this method if this Document
|
||||||
|
// was returned from a transaction, and the transaction was committed or rolled back.
|
||||||
|
func (d *Document) Update() *DocumentUpdateOne {
|
||||||
|
return (&DocumentClient{config: d.config}).UpdateOne(d)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Unwrap unwraps the Document entity that was returned from a transaction after it was closed,
|
||||||
|
// so that all future queries will be executed through the driver which created the transaction.
|
||||||
|
func (d *Document) Unwrap() *Document {
|
||||||
|
_tx, ok := d.config.driver.(*txDriver)
|
||||||
|
if !ok {
|
||||||
|
panic("ent: Document is not a transactional entity")
|
||||||
|
}
|
||||||
|
d.config.driver = _tx.drv
|
||||||
|
return d
|
||||||
|
}
|
||||||
|
|
||||||
|
// String implements the fmt.Stringer.
|
||||||
|
func (d *Document) String() string {
|
||||||
|
var builder strings.Builder
|
||||||
|
builder.WriteString("Document(")
|
||||||
|
builder.WriteString(fmt.Sprintf("id=%v, ", d.ID))
|
||||||
|
builder.WriteString("created_at=")
|
||||||
|
builder.WriteString(d.CreatedAt.Format(time.ANSIC))
|
||||||
|
builder.WriteString(", ")
|
||||||
|
builder.WriteString("updated_at=")
|
||||||
|
builder.WriteString(d.UpdatedAt.Format(time.ANSIC))
|
||||||
|
builder.WriteString(", ")
|
||||||
|
builder.WriteString("title=")
|
||||||
|
builder.WriteString(d.Title)
|
||||||
|
builder.WriteString(", ")
|
||||||
|
builder.WriteString("path=")
|
||||||
|
builder.WriteString(d.Path)
|
||||||
|
builder.WriteByte(')')
|
||||||
|
return builder.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Documents is a parsable slice of Document.
|
||||||
|
type Documents []*Document
|
||||||
|
|
||||||
|
func (d Documents) config(cfg config) {
|
||||||
|
for _i := range d {
|
||||||
|
d[_i].config = cfg
|
||||||
|
}
|
||||||
|
}
|
98
backend/ent/document/document.go
Normal file
98
backend/ent/document/document.go
Normal file
|
@ -0,0 +1,98 @@
|
||||||
|
// Code generated by ent, DO NOT EDIT.
|
||||||
|
|
||||||
|
package document
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
// Label holds the string label denoting the document type in the database.
|
||||||
|
Label = "document"
|
||||||
|
// FieldID holds the string denoting the id field in the database.
|
||||||
|
FieldID = "id"
|
||||||
|
// FieldCreatedAt holds the string denoting the created_at field in the database.
|
||||||
|
FieldCreatedAt = "created_at"
|
||||||
|
// FieldUpdatedAt holds the string denoting the updated_at field in the database.
|
||||||
|
FieldUpdatedAt = "updated_at"
|
||||||
|
// FieldTitle holds the string denoting the title field in the database.
|
||||||
|
FieldTitle = "title"
|
||||||
|
// FieldPath holds the string denoting the path field in the database.
|
||||||
|
FieldPath = "path"
|
||||||
|
// EdgeGroup holds the string denoting the group edge name in mutations.
|
||||||
|
EdgeGroup = "group"
|
||||||
|
// EdgeDocumentTokens holds the string denoting the document_tokens edge name in mutations.
|
||||||
|
EdgeDocumentTokens = "document_tokens"
|
||||||
|
// EdgeAttachments holds the string denoting the attachments edge name in mutations.
|
||||||
|
EdgeAttachments = "attachments"
|
||||||
|
// Table holds the table name of the document in the database.
|
||||||
|
Table = "documents"
|
||||||
|
// GroupTable is the table that holds the group relation/edge.
|
||||||
|
GroupTable = "documents"
|
||||||
|
// GroupInverseTable is the table name for the Group entity.
|
||||||
|
// It exists in this package in order to avoid circular dependency with the "group" package.
|
||||||
|
GroupInverseTable = "groups"
|
||||||
|
// GroupColumn is the table column denoting the group relation/edge.
|
||||||
|
GroupColumn = "group_documents"
|
||||||
|
// DocumentTokensTable is the table that holds the document_tokens relation/edge.
|
||||||
|
DocumentTokensTable = "document_tokens"
|
||||||
|
// DocumentTokensInverseTable is the table name for the DocumentToken entity.
|
||||||
|
// It exists in this package in order to avoid circular dependency with the "documenttoken" package.
|
||||||
|
DocumentTokensInverseTable = "document_tokens"
|
||||||
|
// DocumentTokensColumn is the table column denoting the document_tokens relation/edge.
|
||||||
|
DocumentTokensColumn = "document_document_tokens"
|
||||||
|
// AttachmentsTable is the table that holds the attachments relation/edge.
|
||||||
|
AttachmentsTable = "attachments"
|
||||||
|
// AttachmentsInverseTable is the table name for the Attachment entity.
|
||||||
|
// It exists in this package in order to avoid circular dependency with the "attachment" package.
|
||||||
|
AttachmentsInverseTable = "attachments"
|
||||||
|
// AttachmentsColumn is the table column denoting the attachments relation/edge.
|
||||||
|
AttachmentsColumn = "document_attachments"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Columns holds all SQL columns for document fields.
|
||||||
|
var Columns = []string{
|
||||||
|
FieldID,
|
||||||
|
FieldCreatedAt,
|
||||||
|
FieldUpdatedAt,
|
||||||
|
FieldTitle,
|
||||||
|
FieldPath,
|
||||||
|
}
|
||||||
|
|
||||||
|
// ForeignKeys holds the SQL foreign-keys that are owned by the "documents"
|
||||||
|
// table and are not defined as standalone fields in the schema.
|
||||||
|
var ForeignKeys = []string{
|
||||||
|
"group_documents",
|
||||||
|
}
|
||||||
|
|
||||||
|
// ValidColumn reports if the column name is valid (part of the table columns).
|
||||||
|
func ValidColumn(column string) bool {
|
||||||
|
for i := range Columns {
|
||||||
|
if column == Columns[i] {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for i := range ForeignKeys {
|
||||||
|
if column == ForeignKeys[i] {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
// DefaultCreatedAt holds the default value on creation for the "created_at" field.
|
||||||
|
DefaultCreatedAt func() time.Time
|
||||||
|
// DefaultUpdatedAt holds the default value on creation for the "updated_at" field.
|
||||||
|
DefaultUpdatedAt func() time.Time
|
||||||
|
// UpdateDefaultUpdatedAt holds the default value on update for the "updated_at" field.
|
||||||
|
UpdateDefaultUpdatedAt func() time.Time
|
||||||
|
// TitleValidator is a validator for the "title" field. It is called by the builders before save.
|
||||||
|
TitleValidator func(string) error
|
||||||
|
// PathValidator is a validator for the "path" field. It is called by the builders before save.
|
||||||
|
PathValidator func(string) error
|
||||||
|
// DefaultID holds the default value on creation for the "id" field.
|
||||||
|
DefaultID func() uuid.UUID
|
||||||
|
)
|
553
backend/ent/document/where.go
Normal file
553
backend/ent/document/where.go
Normal file
|
@ -0,0 +1,553 @@
|
||||||
|
// Code generated by ent, DO NOT EDIT.
|
||||||
|
|
||||||
|
package document
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"entgo.io/ent/dialect/sql"
|
||||||
|
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/hay-kot/content/backend/ent/predicate"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ID filters vertices based on their ID field.
|
||||||
|
func ID(id uuid.UUID) predicate.Document {
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.EQ(s.C(FieldID), id))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDEQ applies the EQ predicate on the ID field.
|
||||||
|
func IDEQ(id uuid.UUID) predicate.Document {
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.EQ(s.C(FieldID), id))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDNEQ applies the NEQ predicate on the ID field.
|
||||||
|
func IDNEQ(id uuid.UUID) predicate.Document {
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.NEQ(s.C(FieldID), id))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDIn applies the In predicate on the ID field.
|
||||||
|
func IDIn(ids ...uuid.UUID) predicate.Document {
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
v := make([]interface{}, len(ids))
|
||||||
|
for i := range v {
|
||||||
|
v[i] = ids[i]
|
||||||
|
}
|
||||||
|
s.Where(sql.In(s.C(FieldID), v...))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDNotIn applies the NotIn predicate on the ID field.
|
||||||
|
func IDNotIn(ids ...uuid.UUID) predicate.Document {
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
v := make([]interface{}, len(ids))
|
||||||
|
for i := range v {
|
||||||
|
v[i] = ids[i]
|
||||||
|
}
|
||||||
|
s.Where(sql.NotIn(s.C(FieldID), v...))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDGT applies the GT predicate on the ID field.
|
||||||
|
func IDGT(id uuid.UUID) predicate.Document {
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.GT(s.C(FieldID), id))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDGTE applies the GTE predicate on the ID field.
|
||||||
|
func IDGTE(id uuid.UUID) predicate.Document {
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.GTE(s.C(FieldID), id))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDLT applies the LT predicate on the ID field.
|
||||||
|
func IDLT(id uuid.UUID) predicate.Document {
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.LT(s.C(FieldID), id))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDLTE applies the LTE predicate on the ID field.
|
||||||
|
func IDLTE(id uuid.UUID) predicate.Document {
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.LTE(s.C(FieldID), id))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatedAt applies equality check predicate on the "created_at" field. It's identical to CreatedAtEQ.
|
||||||
|
func CreatedAt(v time.Time) predicate.Document {
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.EQ(s.C(FieldCreatedAt), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdatedAt applies equality check predicate on the "updated_at" field. It's identical to UpdatedAtEQ.
|
||||||
|
func UpdatedAt(v time.Time) predicate.Document {
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.EQ(s.C(FieldUpdatedAt), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Title applies equality check predicate on the "title" field. It's identical to TitleEQ.
|
||||||
|
func Title(v string) predicate.Document {
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.EQ(s.C(FieldTitle), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Path applies equality check predicate on the "path" field. It's identical to PathEQ.
|
||||||
|
func Path(v string) predicate.Document {
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.EQ(s.C(FieldPath), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatedAtEQ applies the EQ predicate on the "created_at" field.
|
||||||
|
func CreatedAtEQ(v time.Time) predicate.Document {
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.EQ(s.C(FieldCreatedAt), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatedAtNEQ applies the NEQ predicate on the "created_at" field.
|
||||||
|
func CreatedAtNEQ(v time.Time) predicate.Document {
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.NEQ(s.C(FieldCreatedAt), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatedAtIn applies the In predicate on the "created_at" field.
|
||||||
|
func CreatedAtIn(vs ...time.Time) predicate.Document {
|
||||||
|
v := make([]interface{}, len(vs))
|
||||||
|
for i := range v {
|
||||||
|
v[i] = vs[i]
|
||||||
|
}
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.In(s.C(FieldCreatedAt), v...))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatedAtNotIn applies the NotIn predicate on the "created_at" field.
|
||||||
|
func CreatedAtNotIn(vs ...time.Time) predicate.Document {
|
||||||
|
v := make([]interface{}, len(vs))
|
||||||
|
for i := range v {
|
||||||
|
v[i] = vs[i]
|
||||||
|
}
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.NotIn(s.C(FieldCreatedAt), v...))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatedAtGT applies the GT predicate on the "created_at" field.
|
||||||
|
func CreatedAtGT(v time.Time) predicate.Document {
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.GT(s.C(FieldCreatedAt), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatedAtGTE applies the GTE predicate on the "created_at" field.
|
||||||
|
func CreatedAtGTE(v time.Time) predicate.Document {
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.GTE(s.C(FieldCreatedAt), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatedAtLT applies the LT predicate on the "created_at" field.
|
||||||
|
func CreatedAtLT(v time.Time) predicate.Document {
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.LT(s.C(FieldCreatedAt), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatedAtLTE applies the LTE predicate on the "created_at" field.
|
||||||
|
func CreatedAtLTE(v time.Time) predicate.Document {
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.LTE(s.C(FieldCreatedAt), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdatedAtEQ applies the EQ predicate on the "updated_at" field.
|
||||||
|
func UpdatedAtEQ(v time.Time) predicate.Document {
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.EQ(s.C(FieldUpdatedAt), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdatedAtNEQ applies the NEQ predicate on the "updated_at" field.
|
||||||
|
func UpdatedAtNEQ(v time.Time) predicate.Document {
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.NEQ(s.C(FieldUpdatedAt), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdatedAtIn applies the In predicate on the "updated_at" field.
|
||||||
|
func UpdatedAtIn(vs ...time.Time) predicate.Document {
|
||||||
|
v := make([]interface{}, len(vs))
|
||||||
|
for i := range v {
|
||||||
|
v[i] = vs[i]
|
||||||
|
}
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.In(s.C(FieldUpdatedAt), v...))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdatedAtNotIn applies the NotIn predicate on the "updated_at" field.
|
||||||
|
func UpdatedAtNotIn(vs ...time.Time) predicate.Document {
|
||||||
|
v := make([]interface{}, len(vs))
|
||||||
|
for i := range v {
|
||||||
|
v[i] = vs[i]
|
||||||
|
}
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.NotIn(s.C(FieldUpdatedAt), v...))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdatedAtGT applies the GT predicate on the "updated_at" field.
|
||||||
|
func UpdatedAtGT(v time.Time) predicate.Document {
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.GT(s.C(FieldUpdatedAt), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdatedAtGTE applies the GTE predicate on the "updated_at" field.
|
||||||
|
func UpdatedAtGTE(v time.Time) predicate.Document {
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.GTE(s.C(FieldUpdatedAt), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdatedAtLT applies the LT predicate on the "updated_at" field.
|
||||||
|
func UpdatedAtLT(v time.Time) predicate.Document {
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.LT(s.C(FieldUpdatedAt), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdatedAtLTE applies the LTE predicate on the "updated_at" field.
|
||||||
|
func UpdatedAtLTE(v time.Time) predicate.Document {
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.LTE(s.C(FieldUpdatedAt), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// TitleEQ applies the EQ predicate on the "title" field.
|
||||||
|
func TitleEQ(v string) predicate.Document {
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.EQ(s.C(FieldTitle), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// TitleNEQ applies the NEQ predicate on the "title" field.
|
||||||
|
func TitleNEQ(v string) predicate.Document {
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.NEQ(s.C(FieldTitle), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// TitleIn applies the In predicate on the "title" field.
|
||||||
|
func TitleIn(vs ...string) predicate.Document {
|
||||||
|
v := make([]interface{}, len(vs))
|
||||||
|
for i := range v {
|
||||||
|
v[i] = vs[i]
|
||||||
|
}
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.In(s.C(FieldTitle), v...))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// TitleNotIn applies the NotIn predicate on the "title" field.
|
||||||
|
func TitleNotIn(vs ...string) predicate.Document {
|
||||||
|
v := make([]interface{}, len(vs))
|
||||||
|
for i := range v {
|
||||||
|
v[i] = vs[i]
|
||||||
|
}
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.NotIn(s.C(FieldTitle), v...))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// TitleGT applies the GT predicate on the "title" field.
|
||||||
|
func TitleGT(v string) predicate.Document {
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.GT(s.C(FieldTitle), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// TitleGTE applies the GTE predicate on the "title" field.
|
||||||
|
func TitleGTE(v string) predicate.Document {
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.GTE(s.C(FieldTitle), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// TitleLT applies the LT predicate on the "title" field.
|
||||||
|
func TitleLT(v string) predicate.Document {
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.LT(s.C(FieldTitle), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// TitleLTE applies the LTE predicate on the "title" field.
|
||||||
|
func TitleLTE(v string) predicate.Document {
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.LTE(s.C(FieldTitle), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// TitleContains applies the Contains predicate on the "title" field.
|
||||||
|
func TitleContains(v string) predicate.Document {
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.Contains(s.C(FieldTitle), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// TitleHasPrefix applies the HasPrefix predicate on the "title" field.
|
||||||
|
func TitleHasPrefix(v string) predicate.Document {
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.HasPrefix(s.C(FieldTitle), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// TitleHasSuffix applies the HasSuffix predicate on the "title" field.
|
||||||
|
func TitleHasSuffix(v string) predicate.Document {
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.HasSuffix(s.C(FieldTitle), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// TitleEqualFold applies the EqualFold predicate on the "title" field.
|
||||||
|
func TitleEqualFold(v string) predicate.Document {
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.EqualFold(s.C(FieldTitle), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// TitleContainsFold applies the ContainsFold predicate on the "title" field.
|
||||||
|
func TitleContainsFold(v string) predicate.Document {
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.ContainsFold(s.C(FieldTitle), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// PathEQ applies the EQ predicate on the "path" field.
|
||||||
|
func PathEQ(v string) predicate.Document {
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.EQ(s.C(FieldPath), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// PathNEQ applies the NEQ predicate on the "path" field.
|
||||||
|
func PathNEQ(v string) predicate.Document {
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.NEQ(s.C(FieldPath), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// PathIn applies the In predicate on the "path" field.
|
||||||
|
func PathIn(vs ...string) predicate.Document {
|
||||||
|
v := make([]interface{}, len(vs))
|
||||||
|
for i := range v {
|
||||||
|
v[i] = vs[i]
|
||||||
|
}
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.In(s.C(FieldPath), v...))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// PathNotIn applies the NotIn predicate on the "path" field.
|
||||||
|
func PathNotIn(vs ...string) predicate.Document {
|
||||||
|
v := make([]interface{}, len(vs))
|
||||||
|
for i := range v {
|
||||||
|
v[i] = vs[i]
|
||||||
|
}
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.NotIn(s.C(FieldPath), v...))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// PathGT applies the GT predicate on the "path" field.
|
||||||
|
func PathGT(v string) predicate.Document {
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.GT(s.C(FieldPath), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// PathGTE applies the GTE predicate on the "path" field.
|
||||||
|
func PathGTE(v string) predicate.Document {
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.GTE(s.C(FieldPath), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// PathLT applies the LT predicate on the "path" field.
|
||||||
|
func PathLT(v string) predicate.Document {
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.LT(s.C(FieldPath), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// PathLTE applies the LTE predicate on the "path" field.
|
||||||
|
func PathLTE(v string) predicate.Document {
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.LTE(s.C(FieldPath), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// PathContains applies the Contains predicate on the "path" field.
|
||||||
|
func PathContains(v string) predicate.Document {
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.Contains(s.C(FieldPath), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// PathHasPrefix applies the HasPrefix predicate on the "path" field.
|
||||||
|
func PathHasPrefix(v string) predicate.Document {
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.HasPrefix(s.C(FieldPath), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// PathHasSuffix applies the HasSuffix predicate on the "path" field.
|
||||||
|
func PathHasSuffix(v string) predicate.Document {
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.HasSuffix(s.C(FieldPath), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// PathEqualFold applies the EqualFold predicate on the "path" field.
|
||||||
|
func PathEqualFold(v string) predicate.Document {
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.EqualFold(s.C(FieldPath), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// PathContainsFold applies the ContainsFold predicate on the "path" field.
|
||||||
|
func PathContainsFold(v string) predicate.Document {
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.ContainsFold(s.C(FieldPath), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// HasGroup applies the HasEdge predicate on the "group" edge.
|
||||||
|
func HasGroup() predicate.Document {
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
step := sqlgraph.NewStep(
|
||||||
|
sqlgraph.From(Table, FieldID),
|
||||||
|
sqlgraph.To(GroupTable, FieldID),
|
||||||
|
sqlgraph.Edge(sqlgraph.M2O, true, GroupTable, GroupColumn),
|
||||||
|
)
|
||||||
|
sqlgraph.HasNeighbors(s, step)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// HasGroupWith applies the HasEdge predicate on the "group" edge with a given conditions (other predicates).
|
||||||
|
func HasGroupWith(preds ...predicate.Group) predicate.Document {
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
step := sqlgraph.NewStep(
|
||||||
|
sqlgraph.From(Table, FieldID),
|
||||||
|
sqlgraph.To(GroupInverseTable, FieldID),
|
||||||
|
sqlgraph.Edge(sqlgraph.M2O, true, GroupTable, GroupColumn),
|
||||||
|
)
|
||||||
|
sqlgraph.HasNeighborsWith(s, step, func(s *sql.Selector) {
|
||||||
|
for _, p := range preds {
|
||||||
|
p(s)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// HasDocumentTokens applies the HasEdge predicate on the "document_tokens" edge.
|
||||||
|
func HasDocumentTokens() predicate.Document {
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
step := sqlgraph.NewStep(
|
||||||
|
sqlgraph.From(Table, FieldID),
|
||||||
|
sqlgraph.To(DocumentTokensTable, FieldID),
|
||||||
|
sqlgraph.Edge(sqlgraph.O2M, false, DocumentTokensTable, DocumentTokensColumn),
|
||||||
|
)
|
||||||
|
sqlgraph.HasNeighbors(s, step)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// HasDocumentTokensWith applies the HasEdge predicate on the "document_tokens" edge with a given conditions (other predicates).
|
||||||
|
func HasDocumentTokensWith(preds ...predicate.DocumentToken) predicate.Document {
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
step := sqlgraph.NewStep(
|
||||||
|
sqlgraph.From(Table, FieldID),
|
||||||
|
sqlgraph.To(DocumentTokensInverseTable, FieldID),
|
||||||
|
sqlgraph.Edge(sqlgraph.O2M, false, DocumentTokensTable, DocumentTokensColumn),
|
||||||
|
)
|
||||||
|
sqlgraph.HasNeighborsWith(s, step, func(s *sql.Selector) {
|
||||||
|
for _, p := range preds {
|
||||||
|
p(s)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// HasAttachments applies the HasEdge predicate on the "attachments" edge.
|
||||||
|
func HasAttachments() predicate.Document {
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
step := sqlgraph.NewStep(
|
||||||
|
sqlgraph.From(Table, FieldID),
|
||||||
|
sqlgraph.To(AttachmentsTable, FieldID),
|
||||||
|
sqlgraph.Edge(sqlgraph.O2M, false, AttachmentsTable, AttachmentsColumn),
|
||||||
|
)
|
||||||
|
sqlgraph.HasNeighbors(s, step)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// HasAttachmentsWith applies the HasEdge predicate on the "attachments" edge with a given conditions (other predicates).
|
||||||
|
func HasAttachmentsWith(preds ...predicate.Attachment) predicate.Document {
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
step := sqlgraph.NewStep(
|
||||||
|
sqlgraph.From(Table, FieldID),
|
||||||
|
sqlgraph.To(AttachmentsInverseTable, FieldID),
|
||||||
|
sqlgraph.Edge(sqlgraph.O2M, false, AttachmentsTable, AttachmentsColumn),
|
||||||
|
)
|
||||||
|
sqlgraph.HasNeighborsWith(s, step, func(s *sql.Selector) {
|
||||||
|
for _, p := range preds {
|
||||||
|
p(s)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// And groups predicates with the AND operator between them.
|
||||||
|
func And(predicates ...predicate.Document) predicate.Document {
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
s1 := s.Clone().SetP(nil)
|
||||||
|
for _, p := range predicates {
|
||||||
|
p(s1)
|
||||||
|
}
|
||||||
|
s.Where(s1.P())
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Or groups predicates with the OR operator between them.
|
||||||
|
func Or(predicates ...predicate.Document) predicate.Document {
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
s1 := s.Clone().SetP(nil)
|
||||||
|
for i, p := range predicates {
|
||||||
|
if i > 0 {
|
||||||
|
s1.Or()
|
||||||
|
}
|
||||||
|
p(s1)
|
||||||
|
}
|
||||||
|
s.Where(s1.P())
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Not applies the not operator on the given predicate.
|
||||||
|
func Not(p predicate.Document) predicate.Document {
|
||||||
|
return predicate.Document(func(s *sql.Selector) {
|
||||||
|
p(s.Not())
|
||||||
|
})
|
||||||
|
}
|
447
backend/ent/document_create.go
Normal file
447
backend/ent/document_create.go
Normal file
|
@ -0,0 +1,447 @@
|
||||||
|
// Code generated by ent, DO NOT EDIT.
|
||||||
|
|
||||||
|
package ent
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||||
|
"entgo.io/ent/schema/field"
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/hay-kot/content/backend/ent/attachment"
|
||||||
|
"github.com/hay-kot/content/backend/ent/document"
|
||||||
|
"github.com/hay-kot/content/backend/ent/documenttoken"
|
||||||
|
"github.com/hay-kot/content/backend/ent/group"
|
||||||
|
)
|
||||||
|
|
||||||
|
// DocumentCreate is the builder for creating a Document entity.
|
||||||
|
type DocumentCreate struct {
|
||||||
|
config
|
||||||
|
mutation *DocumentMutation
|
||||||
|
hooks []Hook
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetCreatedAt sets the "created_at" field.
|
||||||
|
func (dc *DocumentCreate) SetCreatedAt(t time.Time) *DocumentCreate {
|
||||||
|
dc.mutation.SetCreatedAt(t)
|
||||||
|
return dc
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableCreatedAt sets the "created_at" field if the given value is not nil.
|
||||||
|
func (dc *DocumentCreate) SetNillableCreatedAt(t *time.Time) *DocumentCreate {
|
||||||
|
if t != nil {
|
||||||
|
dc.SetCreatedAt(*t)
|
||||||
|
}
|
||||||
|
return dc
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetUpdatedAt sets the "updated_at" field.
|
||||||
|
func (dc *DocumentCreate) SetUpdatedAt(t time.Time) *DocumentCreate {
|
||||||
|
dc.mutation.SetUpdatedAt(t)
|
||||||
|
return dc
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableUpdatedAt sets the "updated_at" field if the given value is not nil.
|
||||||
|
func (dc *DocumentCreate) SetNillableUpdatedAt(t *time.Time) *DocumentCreate {
|
||||||
|
if t != nil {
|
||||||
|
dc.SetUpdatedAt(*t)
|
||||||
|
}
|
||||||
|
return dc
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetTitle sets the "title" field.
|
||||||
|
func (dc *DocumentCreate) SetTitle(s string) *DocumentCreate {
|
||||||
|
dc.mutation.SetTitle(s)
|
||||||
|
return dc
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetPath sets the "path" field.
|
||||||
|
func (dc *DocumentCreate) SetPath(s string) *DocumentCreate {
|
||||||
|
dc.mutation.SetPath(s)
|
||||||
|
return dc
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetID sets the "id" field.
|
||||||
|
func (dc *DocumentCreate) SetID(u uuid.UUID) *DocumentCreate {
|
||||||
|
dc.mutation.SetID(u)
|
||||||
|
return dc
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableID sets the "id" field if the given value is not nil.
|
||||||
|
func (dc *DocumentCreate) SetNillableID(u *uuid.UUID) *DocumentCreate {
|
||||||
|
if u != nil {
|
||||||
|
dc.SetID(*u)
|
||||||
|
}
|
||||||
|
return dc
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetGroupID sets the "group" edge to the Group entity by ID.
|
||||||
|
func (dc *DocumentCreate) SetGroupID(id uuid.UUID) *DocumentCreate {
|
||||||
|
dc.mutation.SetGroupID(id)
|
||||||
|
return dc
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetGroup sets the "group" edge to the Group entity.
|
||||||
|
func (dc *DocumentCreate) SetGroup(g *Group) *DocumentCreate {
|
||||||
|
return dc.SetGroupID(g.ID)
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddDocumentTokenIDs adds the "document_tokens" edge to the DocumentToken entity by IDs.
|
||||||
|
func (dc *DocumentCreate) AddDocumentTokenIDs(ids ...uuid.UUID) *DocumentCreate {
|
||||||
|
dc.mutation.AddDocumentTokenIDs(ids...)
|
||||||
|
return dc
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddDocumentTokens adds the "document_tokens" edges to the DocumentToken entity.
|
||||||
|
func (dc *DocumentCreate) AddDocumentTokens(d ...*DocumentToken) *DocumentCreate {
|
||||||
|
ids := make([]uuid.UUID, len(d))
|
||||||
|
for i := range d {
|
||||||
|
ids[i] = d[i].ID
|
||||||
|
}
|
||||||
|
return dc.AddDocumentTokenIDs(ids...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddAttachmentIDs adds the "attachments" edge to the Attachment entity by IDs.
|
||||||
|
func (dc *DocumentCreate) AddAttachmentIDs(ids ...uuid.UUID) *DocumentCreate {
|
||||||
|
dc.mutation.AddAttachmentIDs(ids...)
|
||||||
|
return dc
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddAttachments adds the "attachments" edges to the Attachment entity.
|
||||||
|
func (dc *DocumentCreate) AddAttachments(a ...*Attachment) *DocumentCreate {
|
||||||
|
ids := make([]uuid.UUID, len(a))
|
||||||
|
for i := range a {
|
||||||
|
ids[i] = a[i].ID
|
||||||
|
}
|
||||||
|
return dc.AddAttachmentIDs(ids...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Mutation returns the DocumentMutation object of the builder.
|
||||||
|
func (dc *DocumentCreate) Mutation() *DocumentMutation {
|
||||||
|
return dc.mutation
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save creates the Document in the database.
|
||||||
|
func (dc *DocumentCreate) Save(ctx context.Context) (*Document, error) {
|
||||||
|
var (
|
||||||
|
err error
|
||||||
|
node *Document
|
||||||
|
)
|
||||||
|
dc.defaults()
|
||||||
|
if len(dc.hooks) == 0 {
|
||||||
|
if err = dc.check(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
node, err = dc.sqlSave(ctx)
|
||||||
|
} else {
|
||||||
|
var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) {
|
||||||
|
mutation, ok := m.(*DocumentMutation)
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("unexpected mutation type %T", m)
|
||||||
|
}
|
||||||
|
if err = dc.check(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
dc.mutation = mutation
|
||||||
|
if node, err = dc.sqlSave(ctx); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
mutation.id = &node.ID
|
||||||
|
mutation.done = true
|
||||||
|
return node, err
|
||||||
|
})
|
||||||
|
for i := len(dc.hooks) - 1; i >= 0; i-- {
|
||||||
|
if dc.hooks[i] == nil {
|
||||||
|
return nil, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)")
|
||||||
|
}
|
||||||
|
mut = dc.hooks[i](mut)
|
||||||
|
}
|
||||||
|
v, err := mut.Mutate(ctx, dc.mutation)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
nv, ok := v.(*Document)
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("unexpected node type %T returned from DocumentMutation", v)
|
||||||
|
}
|
||||||
|
node = nv
|
||||||
|
}
|
||||||
|
return node, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// SaveX calls Save and panics if Save returns an error.
|
||||||
|
func (dc *DocumentCreate) SaveX(ctx context.Context) *Document {
|
||||||
|
v, err := dc.Save(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return v
|
||||||
|
}
|
||||||
|
|
||||||
|
// Exec executes the query.
|
||||||
|
func (dc *DocumentCreate) Exec(ctx context.Context) error {
|
||||||
|
_, err := dc.Save(ctx)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExecX is like Exec, but panics if an error occurs.
|
||||||
|
func (dc *DocumentCreate) ExecX(ctx context.Context) {
|
||||||
|
if err := dc.Exec(ctx); err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// defaults sets the default values of the builder before save.
|
||||||
|
func (dc *DocumentCreate) defaults() {
|
||||||
|
if _, ok := dc.mutation.CreatedAt(); !ok {
|
||||||
|
v := document.DefaultCreatedAt()
|
||||||
|
dc.mutation.SetCreatedAt(v)
|
||||||
|
}
|
||||||
|
if _, ok := dc.mutation.UpdatedAt(); !ok {
|
||||||
|
v := document.DefaultUpdatedAt()
|
||||||
|
dc.mutation.SetUpdatedAt(v)
|
||||||
|
}
|
||||||
|
if _, ok := dc.mutation.ID(); !ok {
|
||||||
|
v := document.DefaultID()
|
||||||
|
dc.mutation.SetID(v)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// check runs all checks and user-defined validators on the builder.
|
||||||
|
func (dc *DocumentCreate) check() error {
|
||||||
|
if _, ok := dc.mutation.CreatedAt(); !ok {
|
||||||
|
return &ValidationError{Name: "created_at", err: errors.New(`ent: missing required field "Document.created_at"`)}
|
||||||
|
}
|
||||||
|
if _, ok := dc.mutation.UpdatedAt(); !ok {
|
||||||
|
return &ValidationError{Name: "updated_at", err: errors.New(`ent: missing required field "Document.updated_at"`)}
|
||||||
|
}
|
||||||
|
if _, ok := dc.mutation.Title(); !ok {
|
||||||
|
return &ValidationError{Name: "title", err: errors.New(`ent: missing required field "Document.title"`)}
|
||||||
|
}
|
||||||
|
if v, ok := dc.mutation.Title(); ok {
|
||||||
|
if err := document.TitleValidator(v); err != nil {
|
||||||
|
return &ValidationError{Name: "title", err: fmt.Errorf(`ent: validator failed for field "Document.title": %w`, err)}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if _, ok := dc.mutation.Path(); !ok {
|
||||||
|
return &ValidationError{Name: "path", err: errors.New(`ent: missing required field "Document.path"`)}
|
||||||
|
}
|
||||||
|
if v, ok := dc.mutation.Path(); ok {
|
||||||
|
if err := document.PathValidator(v); err != nil {
|
||||||
|
return &ValidationError{Name: "path", err: fmt.Errorf(`ent: validator failed for field "Document.path": %w`, err)}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if _, ok := dc.mutation.GroupID(); !ok {
|
||||||
|
return &ValidationError{Name: "group", err: errors.New(`ent: missing required edge "Document.group"`)}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (dc *DocumentCreate) sqlSave(ctx context.Context) (*Document, error) {
|
||||||
|
_node, _spec := dc.createSpec()
|
||||||
|
if err := sqlgraph.CreateNode(ctx, dc.driver, _spec); err != nil {
|
||||||
|
if sqlgraph.IsConstraintError(err) {
|
||||||
|
err = &ConstraintError{msg: err.Error(), wrap: err}
|
||||||
|
}
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if _spec.ID.Value != nil {
|
||||||
|
if id, ok := _spec.ID.Value.(*uuid.UUID); ok {
|
||||||
|
_node.ID = *id
|
||||||
|
} else if err := _node.ID.Scan(_spec.ID.Value); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return _node, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (dc *DocumentCreate) createSpec() (*Document, *sqlgraph.CreateSpec) {
|
||||||
|
var (
|
||||||
|
_node = &Document{config: dc.config}
|
||||||
|
_spec = &sqlgraph.CreateSpec{
|
||||||
|
Table: document.Table,
|
||||||
|
ID: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: document.FieldID,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
if id, ok := dc.mutation.ID(); ok {
|
||||||
|
_node.ID = id
|
||||||
|
_spec.ID.Value = &id
|
||||||
|
}
|
||||||
|
if value, ok := dc.mutation.CreatedAt(); ok {
|
||||||
|
_spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeTime,
|
||||||
|
Value: value,
|
||||||
|
Column: document.FieldCreatedAt,
|
||||||
|
})
|
||||||
|
_node.CreatedAt = value
|
||||||
|
}
|
||||||
|
if value, ok := dc.mutation.UpdatedAt(); ok {
|
||||||
|
_spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeTime,
|
||||||
|
Value: value,
|
||||||
|
Column: document.FieldUpdatedAt,
|
||||||
|
})
|
||||||
|
_node.UpdatedAt = value
|
||||||
|
}
|
||||||
|
if value, ok := dc.mutation.Title(); ok {
|
||||||
|
_spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeString,
|
||||||
|
Value: value,
|
||||||
|
Column: document.FieldTitle,
|
||||||
|
})
|
||||||
|
_node.Title = value
|
||||||
|
}
|
||||||
|
if value, ok := dc.mutation.Path(); ok {
|
||||||
|
_spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeString,
|
||||||
|
Value: value,
|
||||||
|
Column: document.FieldPath,
|
||||||
|
})
|
||||||
|
_node.Path = value
|
||||||
|
}
|
||||||
|
if nodes := dc.mutation.GroupIDs(); len(nodes) > 0 {
|
||||||
|
edge := &sqlgraph.EdgeSpec{
|
||||||
|
Rel: sqlgraph.M2O,
|
||||||
|
Inverse: true,
|
||||||
|
Table: document.GroupTable,
|
||||||
|
Columns: []string{document.GroupColumn},
|
||||||
|
Bidi: false,
|
||||||
|
Target: &sqlgraph.EdgeTarget{
|
||||||
|
IDSpec: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: group.FieldID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, k := range nodes {
|
||||||
|
edge.Target.Nodes = append(edge.Target.Nodes, k)
|
||||||
|
}
|
||||||
|
_node.group_documents = &nodes[0]
|
||||||
|
_spec.Edges = append(_spec.Edges, edge)
|
||||||
|
}
|
||||||
|
if nodes := dc.mutation.DocumentTokensIDs(); len(nodes) > 0 {
|
||||||
|
edge := &sqlgraph.EdgeSpec{
|
||||||
|
Rel: sqlgraph.O2M,
|
||||||
|
Inverse: false,
|
||||||
|
Table: document.DocumentTokensTable,
|
||||||
|
Columns: []string{document.DocumentTokensColumn},
|
||||||
|
Bidi: false,
|
||||||
|
Target: &sqlgraph.EdgeTarget{
|
||||||
|
IDSpec: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: documenttoken.FieldID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, k := range nodes {
|
||||||
|
edge.Target.Nodes = append(edge.Target.Nodes, k)
|
||||||
|
}
|
||||||
|
_spec.Edges = append(_spec.Edges, edge)
|
||||||
|
}
|
||||||
|
if nodes := dc.mutation.AttachmentsIDs(); len(nodes) > 0 {
|
||||||
|
edge := &sqlgraph.EdgeSpec{
|
||||||
|
Rel: sqlgraph.O2M,
|
||||||
|
Inverse: false,
|
||||||
|
Table: document.AttachmentsTable,
|
||||||
|
Columns: []string{document.AttachmentsColumn},
|
||||||
|
Bidi: false,
|
||||||
|
Target: &sqlgraph.EdgeTarget{
|
||||||
|
IDSpec: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: attachment.FieldID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, k := range nodes {
|
||||||
|
edge.Target.Nodes = append(edge.Target.Nodes, k)
|
||||||
|
}
|
||||||
|
_spec.Edges = append(_spec.Edges, edge)
|
||||||
|
}
|
||||||
|
return _node, _spec
|
||||||
|
}
|
||||||
|
|
||||||
|
// DocumentCreateBulk is the builder for creating many Document entities in bulk.
|
||||||
|
type DocumentCreateBulk struct {
|
||||||
|
config
|
||||||
|
builders []*DocumentCreate
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save creates the Document entities in the database.
|
||||||
|
func (dcb *DocumentCreateBulk) Save(ctx context.Context) ([]*Document, error) {
|
||||||
|
specs := make([]*sqlgraph.CreateSpec, len(dcb.builders))
|
||||||
|
nodes := make([]*Document, len(dcb.builders))
|
||||||
|
mutators := make([]Mutator, len(dcb.builders))
|
||||||
|
for i := range dcb.builders {
|
||||||
|
func(i int, root context.Context) {
|
||||||
|
builder := dcb.builders[i]
|
||||||
|
builder.defaults()
|
||||||
|
var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) {
|
||||||
|
mutation, ok := m.(*DocumentMutation)
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("unexpected mutation type %T", m)
|
||||||
|
}
|
||||||
|
if err := builder.check(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
builder.mutation = mutation
|
||||||
|
nodes[i], specs[i] = builder.createSpec()
|
||||||
|
var err error
|
||||||
|
if i < len(mutators)-1 {
|
||||||
|
_, err = mutators[i+1].Mutate(root, dcb.builders[i+1].mutation)
|
||||||
|
} else {
|
||||||
|
spec := &sqlgraph.BatchCreateSpec{Nodes: specs}
|
||||||
|
// Invoke the actual operation on the latest mutation in the chain.
|
||||||
|
if err = sqlgraph.BatchCreate(ctx, dcb.driver, spec); err != nil {
|
||||||
|
if sqlgraph.IsConstraintError(err) {
|
||||||
|
err = &ConstraintError{msg: err.Error(), wrap: err}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
mutation.id = &nodes[i].ID
|
||||||
|
mutation.done = true
|
||||||
|
return nodes[i], nil
|
||||||
|
})
|
||||||
|
for i := len(builder.hooks) - 1; i >= 0; i-- {
|
||||||
|
mut = builder.hooks[i](mut)
|
||||||
|
}
|
||||||
|
mutators[i] = mut
|
||||||
|
}(i, ctx)
|
||||||
|
}
|
||||||
|
if len(mutators) > 0 {
|
||||||
|
if _, err := mutators[0].Mutate(ctx, dcb.builders[0].mutation); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nodes, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// SaveX is like Save, but panics if an error occurs.
|
||||||
|
func (dcb *DocumentCreateBulk) SaveX(ctx context.Context) []*Document {
|
||||||
|
v, err := dcb.Save(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return v
|
||||||
|
}
|
||||||
|
|
||||||
|
// Exec executes the query.
|
||||||
|
func (dcb *DocumentCreateBulk) Exec(ctx context.Context) error {
|
||||||
|
_, err := dcb.Save(ctx)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExecX is like Exec, but panics if an error occurs.
|
||||||
|
func (dcb *DocumentCreateBulk) ExecX(ctx context.Context) {
|
||||||
|
if err := dcb.Exec(ctx); err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
115
backend/ent/document_delete.go
Normal file
115
backend/ent/document_delete.go
Normal file
|
@ -0,0 +1,115 @@
|
||||||
|
// Code generated by ent, DO NOT EDIT.
|
||||||
|
|
||||||
|
package ent
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"entgo.io/ent/dialect/sql"
|
||||||
|
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||||
|
"entgo.io/ent/schema/field"
|
||||||
|
"github.com/hay-kot/content/backend/ent/document"
|
||||||
|
"github.com/hay-kot/content/backend/ent/predicate"
|
||||||
|
)
|
||||||
|
|
||||||
|
// DocumentDelete is the builder for deleting a Document entity.
|
||||||
|
type DocumentDelete struct {
|
||||||
|
config
|
||||||
|
hooks []Hook
|
||||||
|
mutation *DocumentMutation
|
||||||
|
}
|
||||||
|
|
||||||
|
// Where appends a list predicates to the DocumentDelete builder.
|
||||||
|
func (dd *DocumentDelete) Where(ps ...predicate.Document) *DocumentDelete {
|
||||||
|
dd.mutation.Where(ps...)
|
||||||
|
return dd
|
||||||
|
}
|
||||||
|
|
||||||
|
// Exec executes the deletion query and returns how many vertices were deleted.
|
||||||
|
func (dd *DocumentDelete) Exec(ctx context.Context) (int, error) {
|
||||||
|
var (
|
||||||
|
err error
|
||||||
|
affected int
|
||||||
|
)
|
||||||
|
if len(dd.hooks) == 0 {
|
||||||
|
affected, err = dd.sqlExec(ctx)
|
||||||
|
} else {
|
||||||
|
var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) {
|
||||||
|
mutation, ok := m.(*DocumentMutation)
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("unexpected mutation type %T", m)
|
||||||
|
}
|
||||||
|
dd.mutation = mutation
|
||||||
|
affected, err = dd.sqlExec(ctx)
|
||||||
|
mutation.done = true
|
||||||
|
return affected, err
|
||||||
|
})
|
||||||
|
for i := len(dd.hooks) - 1; i >= 0; i-- {
|
||||||
|
if dd.hooks[i] == nil {
|
||||||
|
return 0, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)")
|
||||||
|
}
|
||||||
|
mut = dd.hooks[i](mut)
|
||||||
|
}
|
||||||
|
if _, err := mut.Mutate(ctx, dd.mutation); err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return affected, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExecX is like Exec, but panics if an error occurs.
|
||||||
|
func (dd *DocumentDelete) ExecX(ctx context.Context) int {
|
||||||
|
n, err := dd.Exec(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return n
|
||||||
|
}
|
||||||
|
|
||||||
|
func (dd *DocumentDelete) sqlExec(ctx context.Context) (int, error) {
|
||||||
|
_spec := &sqlgraph.DeleteSpec{
|
||||||
|
Node: &sqlgraph.NodeSpec{
|
||||||
|
Table: document.Table,
|
||||||
|
ID: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: document.FieldID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
if ps := dd.mutation.predicates; len(ps) > 0 {
|
||||||
|
_spec.Predicate = func(selector *sql.Selector) {
|
||||||
|
for i := range ps {
|
||||||
|
ps[i](selector)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
affected, err := sqlgraph.DeleteNodes(ctx, dd.driver, _spec)
|
||||||
|
if err != nil && sqlgraph.IsConstraintError(err) {
|
||||||
|
err = &ConstraintError{msg: err.Error(), wrap: err}
|
||||||
|
}
|
||||||
|
return affected, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// DocumentDeleteOne is the builder for deleting a single Document entity.
|
||||||
|
type DocumentDeleteOne struct {
|
||||||
|
dd *DocumentDelete
|
||||||
|
}
|
||||||
|
|
||||||
|
// Exec executes the deletion query.
|
||||||
|
func (ddo *DocumentDeleteOne) Exec(ctx context.Context) error {
|
||||||
|
n, err := ddo.dd.Exec(ctx)
|
||||||
|
switch {
|
||||||
|
case err != nil:
|
||||||
|
return err
|
||||||
|
case n == 0:
|
||||||
|
return &NotFoundError{document.Label}
|
||||||
|
default:
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExecX is like Exec, but panics if an error occurs.
|
||||||
|
func (ddo *DocumentDeleteOne) ExecX(ctx context.Context) {
|
||||||
|
ddo.dd.ExecX(ctx)
|
||||||
|
}
|
762
backend/ent/document_query.go
Normal file
762
backend/ent/document_query.go
Normal file
|
@ -0,0 +1,762 @@
|
||||||
|
// Code generated by ent, DO NOT EDIT.
|
||||||
|
|
||||||
|
package ent
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"database/sql/driver"
|
||||||
|
"fmt"
|
||||||
|
"math"
|
||||||
|
|
||||||
|
"entgo.io/ent/dialect/sql"
|
||||||
|
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||||
|
"entgo.io/ent/schema/field"
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/hay-kot/content/backend/ent/attachment"
|
||||||
|
"github.com/hay-kot/content/backend/ent/document"
|
||||||
|
"github.com/hay-kot/content/backend/ent/documenttoken"
|
||||||
|
"github.com/hay-kot/content/backend/ent/group"
|
||||||
|
"github.com/hay-kot/content/backend/ent/predicate"
|
||||||
|
)
|
||||||
|
|
||||||
|
// DocumentQuery is the builder for querying Document entities.
|
||||||
|
type DocumentQuery struct {
|
||||||
|
config
|
||||||
|
limit *int
|
||||||
|
offset *int
|
||||||
|
unique *bool
|
||||||
|
order []OrderFunc
|
||||||
|
fields []string
|
||||||
|
predicates []predicate.Document
|
||||||
|
withGroup *GroupQuery
|
||||||
|
withDocumentTokens *DocumentTokenQuery
|
||||||
|
withAttachments *AttachmentQuery
|
||||||
|
withFKs bool
|
||||||
|
// intermediate query (i.e. traversal path).
|
||||||
|
sql *sql.Selector
|
||||||
|
path func(context.Context) (*sql.Selector, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Where adds a new predicate for the DocumentQuery builder.
|
||||||
|
func (dq *DocumentQuery) Where(ps ...predicate.Document) *DocumentQuery {
|
||||||
|
dq.predicates = append(dq.predicates, ps...)
|
||||||
|
return dq
|
||||||
|
}
|
||||||
|
|
||||||
|
// Limit adds a limit step to the query.
|
||||||
|
func (dq *DocumentQuery) Limit(limit int) *DocumentQuery {
|
||||||
|
dq.limit = &limit
|
||||||
|
return dq
|
||||||
|
}
|
||||||
|
|
||||||
|
// Offset adds an offset step to the query.
|
||||||
|
func (dq *DocumentQuery) Offset(offset int) *DocumentQuery {
|
||||||
|
dq.offset = &offset
|
||||||
|
return dq
|
||||||
|
}
|
||||||
|
|
||||||
|
// Unique configures the query builder to filter duplicate records on query.
|
||||||
|
// By default, unique is set to true, and can be disabled using this method.
|
||||||
|
func (dq *DocumentQuery) Unique(unique bool) *DocumentQuery {
|
||||||
|
dq.unique = &unique
|
||||||
|
return dq
|
||||||
|
}
|
||||||
|
|
||||||
|
// Order adds an order step to the query.
|
||||||
|
func (dq *DocumentQuery) Order(o ...OrderFunc) *DocumentQuery {
|
||||||
|
dq.order = append(dq.order, o...)
|
||||||
|
return dq
|
||||||
|
}
|
||||||
|
|
||||||
|
// QueryGroup chains the current query on the "group" edge.
|
||||||
|
func (dq *DocumentQuery) QueryGroup() *GroupQuery {
|
||||||
|
query := &GroupQuery{config: dq.config}
|
||||||
|
query.path = func(ctx context.Context) (fromU *sql.Selector, err error) {
|
||||||
|
if err := dq.prepareQuery(ctx); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
selector := dq.sqlQuery(ctx)
|
||||||
|
if err := selector.Err(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
step := sqlgraph.NewStep(
|
||||||
|
sqlgraph.From(document.Table, document.FieldID, selector),
|
||||||
|
sqlgraph.To(group.Table, group.FieldID),
|
||||||
|
sqlgraph.Edge(sqlgraph.M2O, true, document.GroupTable, document.GroupColumn),
|
||||||
|
)
|
||||||
|
fromU = sqlgraph.SetNeighbors(dq.driver.Dialect(), step)
|
||||||
|
return fromU, nil
|
||||||
|
}
|
||||||
|
return query
|
||||||
|
}
|
||||||
|
|
||||||
|
// QueryDocumentTokens chains the current query on the "document_tokens" edge.
|
||||||
|
func (dq *DocumentQuery) QueryDocumentTokens() *DocumentTokenQuery {
|
||||||
|
query := &DocumentTokenQuery{config: dq.config}
|
||||||
|
query.path = func(ctx context.Context) (fromU *sql.Selector, err error) {
|
||||||
|
if err := dq.prepareQuery(ctx); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
selector := dq.sqlQuery(ctx)
|
||||||
|
if err := selector.Err(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
step := sqlgraph.NewStep(
|
||||||
|
sqlgraph.From(document.Table, document.FieldID, selector),
|
||||||
|
sqlgraph.To(documenttoken.Table, documenttoken.FieldID),
|
||||||
|
sqlgraph.Edge(sqlgraph.O2M, false, document.DocumentTokensTable, document.DocumentTokensColumn),
|
||||||
|
)
|
||||||
|
fromU = sqlgraph.SetNeighbors(dq.driver.Dialect(), step)
|
||||||
|
return fromU, nil
|
||||||
|
}
|
||||||
|
return query
|
||||||
|
}
|
||||||
|
|
||||||
|
// QueryAttachments chains the current query on the "attachments" edge.
|
||||||
|
func (dq *DocumentQuery) QueryAttachments() *AttachmentQuery {
|
||||||
|
query := &AttachmentQuery{config: dq.config}
|
||||||
|
query.path = func(ctx context.Context) (fromU *sql.Selector, err error) {
|
||||||
|
if err := dq.prepareQuery(ctx); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
selector := dq.sqlQuery(ctx)
|
||||||
|
if err := selector.Err(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
step := sqlgraph.NewStep(
|
||||||
|
sqlgraph.From(document.Table, document.FieldID, selector),
|
||||||
|
sqlgraph.To(attachment.Table, attachment.FieldID),
|
||||||
|
sqlgraph.Edge(sqlgraph.O2M, false, document.AttachmentsTable, document.AttachmentsColumn),
|
||||||
|
)
|
||||||
|
fromU = sqlgraph.SetNeighbors(dq.driver.Dialect(), step)
|
||||||
|
return fromU, nil
|
||||||
|
}
|
||||||
|
return query
|
||||||
|
}
|
||||||
|
|
||||||
|
// First returns the first Document entity from the query.
|
||||||
|
// Returns a *NotFoundError when no Document was found.
|
||||||
|
func (dq *DocumentQuery) First(ctx context.Context) (*Document, error) {
|
||||||
|
nodes, err := dq.Limit(1).All(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if len(nodes) == 0 {
|
||||||
|
return nil, &NotFoundError{document.Label}
|
||||||
|
}
|
||||||
|
return nodes[0], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// FirstX is like First, but panics if an error occurs.
|
||||||
|
func (dq *DocumentQuery) FirstX(ctx context.Context) *Document {
|
||||||
|
node, err := dq.First(ctx)
|
||||||
|
if err != nil && !IsNotFound(err) {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return node
|
||||||
|
}
|
||||||
|
|
||||||
|
// FirstID returns the first Document ID from the query.
|
||||||
|
// Returns a *NotFoundError when no Document ID was found.
|
||||||
|
func (dq *DocumentQuery) FirstID(ctx context.Context) (id uuid.UUID, err error) {
|
||||||
|
var ids []uuid.UUID
|
||||||
|
if ids, err = dq.Limit(1).IDs(ctx); err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if len(ids) == 0 {
|
||||||
|
err = &NotFoundError{document.Label}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
return ids[0], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// FirstIDX is like FirstID, but panics if an error occurs.
|
||||||
|
func (dq *DocumentQuery) FirstIDX(ctx context.Context) uuid.UUID {
|
||||||
|
id, err := dq.FirstID(ctx)
|
||||||
|
if err != nil && !IsNotFound(err) {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return id
|
||||||
|
}
|
||||||
|
|
||||||
|
// Only returns a single Document entity found by the query, ensuring it only returns one.
|
||||||
|
// Returns a *NotSingularError when more than one Document entity is found.
|
||||||
|
// Returns a *NotFoundError when no Document entities are found.
|
||||||
|
func (dq *DocumentQuery) Only(ctx context.Context) (*Document, error) {
|
||||||
|
nodes, err := dq.Limit(2).All(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
switch len(nodes) {
|
||||||
|
case 1:
|
||||||
|
return nodes[0], nil
|
||||||
|
case 0:
|
||||||
|
return nil, &NotFoundError{document.Label}
|
||||||
|
default:
|
||||||
|
return nil, &NotSingularError{document.Label}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// OnlyX is like Only, but panics if an error occurs.
|
||||||
|
func (dq *DocumentQuery) OnlyX(ctx context.Context) *Document {
|
||||||
|
node, err := dq.Only(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return node
|
||||||
|
}
|
||||||
|
|
||||||
|
// OnlyID is like Only, but returns the only Document ID in the query.
|
||||||
|
// Returns a *NotSingularError when more than one Document ID is found.
|
||||||
|
// Returns a *NotFoundError when no entities are found.
|
||||||
|
func (dq *DocumentQuery) OnlyID(ctx context.Context) (id uuid.UUID, err error) {
|
||||||
|
var ids []uuid.UUID
|
||||||
|
if ids, err = dq.Limit(2).IDs(ctx); err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
switch len(ids) {
|
||||||
|
case 1:
|
||||||
|
id = ids[0]
|
||||||
|
case 0:
|
||||||
|
err = &NotFoundError{document.Label}
|
||||||
|
default:
|
||||||
|
err = &NotSingularError{document.Label}
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// OnlyIDX is like OnlyID, but panics if an error occurs.
|
||||||
|
func (dq *DocumentQuery) OnlyIDX(ctx context.Context) uuid.UUID {
|
||||||
|
id, err := dq.OnlyID(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return id
|
||||||
|
}
|
||||||
|
|
||||||
|
// All executes the query and returns a list of Documents.
|
||||||
|
func (dq *DocumentQuery) All(ctx context.Context) ([]*Document, error) {
|
||||||
|
if err := dq.prepareQuery(ctx); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return dq.sqlAll(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
// AllX is like All, but panics if an error occurs.
|
||||||
|
func (dq *DocumentQuery) AllX(ctx context.Context) []*Document {
|
||||||
|
nodes, err := dq.All(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return nodes
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDs executes the query and returns a list of Document IDs.
|
||||||
|
func (dq *DocumentQuery) IDs(ctx context.Context) ([]uuid.UUID, error) {
|
||||||
|
var ids []uuid.UUID
|
||||||
|
if err := dq.Select(document.FieldID).Scan(ctx, &ids); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return ids, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDsX is like IDs, but panics if an error occurs.
|
||||||
|
func (dq *DocumentQuery) IDsX(ctx context.Context) []uuid.UUID {
|
||||||
|
ids, err := dq.IDs(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return ids
|
||||||
|
}
|
||||||
|
|
||||||
|
// Count returns the count of the given query.
|
||||||
|
func (dq *DocumentQuery) Count(ctx context.Context) (int, error) {
|
||||||
|
if err := dq.prepareQuery(ctx); err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
return dq.sqlCount(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
// CountX is like Count, but panics if an error occurs.
|
||||||
|
func (dq *DocumentQuery) CountX(ctx context.Context) int {
|
||||||
|
count, err := dq.Count(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return count
|
||||||
|
}
|
||||||
|
|
||||||
|
// Exist returns true if the query has elements in the graph.
|
||||||
|
func (dq *DocumentQuery) Exist(ctx context.Context) (bool, error) {
|
||||||
|
if err := dq.prepareQuery(ctx); err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
return dq.sqlExist(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExistX is like Exist, but panics if an error occurs.
|
||||||
|
func (dq *DocumentQuery) ExistX(ctx context.Context) bool {
|
||||||
|
exist, err := dq.Exist(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return exist
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clone returns a duplicate of the DocumentQuery builder, including all associated steps. It can be
|
||||||
|
// used to prepare common query builders and use them differently after the clone is made.
|
||||||
|
func (dq *DocumentQuery) Clone() *DocumentQuery {
|
||||||
|
if dq == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return &DocumentQuery{
|
||||||
|
config: dq.config,
|
||||||
|
limit: dq.limit,
|
||||||
|
offset: dq.offset,
|
||||||
|
order: append([]OrderFunc{}, dq.order...),
|
||||||
|
predicates: append([]predicate.Document{}, dq.predicates...),
|
||||||
|
withGroup: dq.withGroup.Clone(),
|
||||||
|
withDocumentTokens: dq.withDocumentTokens.Clone(),
|
||||||
|
withAttachments: dq.withAttachments.Clone(),
|
||||||
|
// clone intermediate query.
|
||||||
|
sql: dq.sql.Clone(),
|
||||||
|
path: dq.path,
|
||||||
|
unique: dq.unique,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// WithGroup tells the query-builder to eager-load the nodes that are connected to
|
||||||
|
// the "group" edge. The optional arguments are used to configure the query builder of the edge.
|
||||||
|
func (dq *DocumentQuery) WithGroup(opts ...func(*GroupQuery)) *DocumentQuery {
|
||||||
|
query := &GroupQuery{config: dq.config}
|
||||||
|
for _, opt := range opts {
|
||||||
|
opt(query)
|
||||||
|
}
|
||||||
|
dq.withGroup = query
|
||||||
|
return dq
|
||||||
|
}
|
||||||
|
|
||||||
|
// WithDocumentTokens tells the query-builder to eager-load the nodes that are connected to
|
||||||
|
// the "document_tokens" edge. The optional arguments are used to configure the query builder of the edge.
|
||||||
|
func (dq *DocumentQuery) WithDocumentTokens(opts ...func(*DocumentTokenQuery)) *DocumentQuery {
|
||||||
|
query := &DocumentTokenQuery{config: dq.config}
|
||||||
|
for _, opt := range opts {
|
||||||
|
opt(query)
|
||||||
|
}
|
||||||
|
dq.withDocumentTokens = query
|
||||||
|
return dq
|
||||||
|
}
|
||||||
|
|
||||||
|
// WithAttachments tells the query-builder to eager-load the nodes that are connected to
|
||||||
|
// the "attachments" edge. The optional arguments are used to configure the query builder of the edge.
|
||||||
|
func (dq *DocumentQuery) WithAttachments(opts ...func(*AttachmentQuery)) *DocumentQuery {
|
||||||
|
query := &AttachmentQuery{config: dq.config}
|
||||||
|
for _, opt := range opts {
|
||||||
|
opt(query)
|
||||||
|
}
|
||||||
|
dq.withAttachments = query
|
||||||
|
return dq
|
||||||
|
}
|
||||||
|
|
||||||
|
// GroupBy is used to group vertices by one or more fields/columns.
|
||||||
|
// It is often used with aggregate functions, like: count, max, mean, min, sum.
|
||||||
|
//
|
||||||
|
// Example:
|
||||||
|
//
|
||||||
|
// var v []struct {
|
||||||
|
// CreatedAt time.Time `json:"created_at,omitempty"`
|
||||||
|
// Count int `json:"count,omitempty"`
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// client.Document.Query().
|
||||||
|
// GroupBy(document.FieldCreatedAt).
|
||||||
|
// Aggregate(ent.Count()).
|
||||||
|
// Scan(ctx, &v)
|
||||||
|
func (dq *DocumentQuery) GroupBy(field string, fields ...string) *DocumentGroupBy {
|
||||||
|
grbuild := &DocumentGroupBy{config: dq.config}
|
||||||
|
grbuild.fields = append([]string{field}, fields...)
|
||||||
|
grbuild.path = func(ctx context.Context) (prev *sql.Selector, err error) {
|
||||||
|
if err := dq.prepareQuery(ctx); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return dq.sqlQuery(ctx), nil
|
||||||
|
}
|
||||||
|
grbuild.label = document.Label
|
||||||
|
grbuild.flds, grbuild.scan = &grbuild.fields, grbuild.Scan
|
||||||
|
return grbuild
|
||||||
|
}
|
||||||
|
|
||||||
|
// Select allows the selection one or more fields/columns for the given query,
|
||||||
|
// instead of selecting all fields in the entity.
|
||||||
|
//
|
||||||
|
// Example:
|
||||||
|
//
|
||||||
|
// var v []struct {
|
||||||
|
// CreatedAt time.Time `json:"created_at,omitempty"`
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// client.Document.Query().
|
||||||
|
// Select(document.FieldCreatedAt).
|
||||||
|
// Scan(ctx, &v)
|
||||||
|
func (dq *DocumentQuery) Select(fields ...string) *DocumentSelect {
|
||||||
|
dq.fields = append(dq.fields, fields...)
|
||||||
|
selbuild := &DocumentSelect{DocumentQuery: dq}
|
||||||
|
selbuild.label = document.Label
|
||||||
|
selbuild.flds, selbuild.scan = &dq.fields, selbuild.Scan
|
||||||
|
return selbuild
|
||||||
|
}
|
||||||
|
|
||||||
|
func (dq *DocumentQuery) prepareQuery(ctx context.Context) error {
|
||||||
|
for _, f := range dq.fields {
|
||||||
|
if !document.ValidColumn(f) {
|
||||||
|
return &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if dq.path != nil {
|
||||||
|
prev, err := dq.path(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
dq.sql = prev
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (dq *DocumentQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*Document, error) {
|
||||||
|
var (
|
||||||
|
nodes = []*Document{}
|
||||||
|
withFKs = dq.withFKs
|
||||||
|
_spec = dq.querySpec()
|
||||||
|
loadedTypes = [3]bool{
|
||||||
|
dq.withGroup != nil,
|
||||||
|
dq.withDocumentTokens != nil,
|
||||||
|
dq.withAttachments != nil,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
if dq.withGroup != nil {
|
||||||
|
withFKs = true
|
||||||
|
}
|
||||||
|
if withFKs {
|
||||||
|
_spec.Node.Columns = append(_spec.Node.Columns, document.ForeignKeys...)
|
||||||
|
}
|
||||||
|
_spec.ScanValues = func(columns []string) ([]interface{}, error) {
|
||||||
|
return (*Document).scanValues(nil, columns)
|
||||||
|
}
|
||||||
|
_spec.Assign = func(columns []string, values []interface{}) error {
|
||||||
|
node := &Document{config: dq.config}
|
||||||
|
nodes = append(nodes, node)
|
||||||
|
node.Edges.loadedTypes = loadedTypes
|
||||||
|
return node.assignValues(columns, values)
|
||||||
|
}
|
||||||
|
for i := range hooks {
|
||||||
|
hooks[i](ctx, _spec)
|
||||||
|
}
|
||||||
|
if err := sqlgraph.QueryNodes(ctx, dq.driver, _spec); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if len(nodes) == 0 {
|
||||||
|
return nodes, nil
|
||||||
|
}
|
||||||
|
if query := dq.withGroup; query != nil {
|
||||||
|
if err := dq.loadGroup(ctx, query, nodes, nil,
|
||||||
|
func(n *Document, e *Group) { n.Edges.Group = e }); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if query := dq.withDocumentTokens; query != nil {
|
||||||
|
if err := dq.loadDocumentTokens(ctx, query, nodes,
|
||||||
|
func(n *Document) { n.Edges.DocumentTokens = []*DocumentToken{} },
|
||||||
|
func(n *Document, e *DocumentToken) { n.Edges.DocumentTokens = append(n.Edges.DocumentTokens, e) }); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if query := dq.withAttachments; query != nil {
|
||||||
|
if err := dq.loadAttachments(ctx, query, nodes,
|
||||||
|
func(n *Document) { n.Edges.Attachments = []*Attachment{} },
|
||||||
|
func(n *Document, e *Attachment) { n.Edges.Attachments = append(n.Edges.Attachments, e) }); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nodes, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (dq *DocumentQuery) loadGroup(ctx context.Context, query *GroupQuery, nodes []*Document, init func(*Document), assign func(*Document, *Group)) error {
|
||||||
|
ids := make([]uuid.UUID, 0, len(nodes))
|
||||||
|
nodeids := make(map[uuid.UUID][]*Document)
|
||||||
|
for i := range nodes {
|
||||||
|
if nodes[i].group_documents == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
fk := *nodes[i].group_documents
|
||||||
|
if _, ok := nodeids[fk]; !ok {
|
||||||
|
ids = append(ids, fk)
|
||||||
|
}
|
||||||
|
nodeids[fk] = append(nodeids[fk], nodes[i])
|
||||||
|
}
|
||||||
|
query.Where(group.IDIn(ids...))
|
||||||
|
neighbors, err := query.All(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
for _, n := range neighbors {
|
||||||
|
nodes, ok := nodeids[n.ID]
|
||||||
|
if !ok {
|
||||||
|
return fmt.Errorf(`unexpected foreign-key "group_documents" returned %v`, n.ID)
|
||||||
|
}
|
||||||
|
for i := range nodes {
|
||||||
|
assign(nodes[i], n)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
func (dq *DocumentQuery) loadDocumentTokens(ctx context.Context, query *DocumentTokenQuery, nodes []*Document, init func(*Document), assign func(*Document, *DocumentToken)) error {
|
||||||
|
fks := make([]driver.Value, 0, len(nodes))
|
||||||
|
nodeids := make(map[uuid.UUID]*Document)
|
||||||
|
for i := range nodes {
|
||||||
|
fks = append(fks, nodes[i].ID)
|
||||||
|
nodeids[nodes[i].ID] = nodes[i]
|
||||||
|
if init != nil {
|
||||||
|
init(nodes[i])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
query.withFKs = true
|
||||||
|
query.Where(predicate.DocumentToken(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.InValues(document.DocumentTokensColumn, fks...))
|
||||||
|
}))
|
||||||
|
neighbors, err := query.All(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
for _, n := range neighbors {
|
||||||
|
fk := n.document_document_tokens
|
||||||
|
if fk == nil {
|
||||||
|
return fmt.Errorf(`foreign-key "document_document_tokens" is nil for node %v`, n.ID)
|
||||||
|
}
|
||||||
|
node, ok := nodeids[*fk]
|
||||||
|
if !ok {
|
||||||
|
return fmt.Errorf(`unexpected foreign-key "document_document_tokens" returned %v for node %v`, *fk, n.ID)
|
||||||
|
}
|
||||||
|
assign(node, n)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
func (dq *DocumentQuery) loadAttachments(ctx context.Context, query *AttachmentQuery, nodes []*Document, init func(*Document), assign func(*Document, *Attachment)) error {
|
||||||
|
fks := make([]driver.Value, 0, len(nodes))
|
||||||
|
nodeids := make(map[uuid.UUID]*Document)
|
||||||
|
for i := range nodes {
|
||||||
|
fks = append(fks, nodes[i].ID)
|
||||||
|
nodeids[nodes[i].ID] = nodes[i]
|
||||||
|
if init != nil {
|
||||||
|
init(nodes[i])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
query.withFKs = true
|
||||||
|
query.Where(predicate.Attachment(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.InValues(document.AttachmentsColumn, fks...))
|
||||||
|
}))
|
||||||
|
neighbors, err := query.All(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
for _, n := range neighbors {
|
||||||
|
fk := n.document_attachments
|
||||||
|
if fk == nil {
|
||||||
|
return fmt.Errorf(`foreign-key "document_attachments" is nil for node %v`, n.ID)
|
||||||
|
}
|
||||||
|
node, ok := nodeids[*fk]
|
||||||
|
if !ok {
|
||||||
|
return fmt.Errorf(`unexpected foreign-key "document_attachments" returned %v for node %v`, *fk, n.ID)
|
||||||
|
}
|
||||||
|
assign(node, n)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (dq *DocumentQuery) sqlCount(ctx context.Context) (int, error) {
|
||||||
|
_spec := dq.querySpec()
|
||||||
|
_spec.Node.Columns = dq.fields
|
||||||
|
if len(dq.fields) > 0 {
|
||||||
|
_spec.Unique = dq.unique != nil && *dq.unique
|
||||||
|
}
|
||||||
|
return sqlgraph.CountNodes(ctx, dq.driver, _spec)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (dq *DocumentQuery) sqlExist(ctx context.Context) (bool, error) {
|
||||||
|
n, err := dq.sqlCount(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return false, fmt.Errorf("ent: check existence: %w", err)
|
||||||
|
}
|
||||||
|
return n > 0, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (dq *DocumentQuery) querySpec() *sqlgraph.QuerySpec {
|
||||||
|
_spec := &sqlgraph.QuerySpec{
|
||||||
|
Node: &sqlgraph.NodeSpec{
|
||||||
|
Table: document.Table,
|
||||||
|
Columns: document.Columns,
|
||||||
|
ID: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: document.FieldID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
From: dq.sql,
|
||||||
|
Unique: true,
|
||||||
|
}
|
||||||
|
if unique := dq.unique; unique != nil {
|
||||||
|
_spec.Unique = *unique
|
||||||
|
}
|
||||||
|
if fields := dq.fields; len(fields) > 0 {
|
||||||
|
_spec.Node.Columns = make([]string, 0, len(fields))
|
||||||
|
_spec.Node.Columns = append(_spec.Node.Columns, document.FieldID)
|
||||||
|
for i := range fields {
|
||||||
|
if fields[i] != document.FieldID {
|
||||||
|
_spec.Node.Columns = append(_spec.Node.Columns, fields[i])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if ps := dq.predicates; len(ps) > 0 {
|
||||||
|
_spec.Predicate = func(selector *sql.Selector) {
|
||||||
|
for i := range ps {
|
||||||
|
ps[i](selector)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if limit := dq.limit; limit != nil {
|
||||||
|
_spec.Limit = *limit
|
||||||
|
}
|
||||||
|
if offset := dq.offset; offset != nil {
|
||||||
|
_spec.Offset = *offset
|
||||||
|
}
|
||||||
|
if ps := dq.order; len(ps) > 0 {
|
||||||
|
_spec.Order = func(selector *sql.Selector) {
|
||||||
|
for i := range ps {
|
||||||
|
ps[i](selector)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return _spec
|
||||||
|
}
|
||||||
|
|
||||||
|
func (dq *DocumentQuery) sqlQuery(ctx context.Context) *sql.Selector {
|
||||||
|
builder := sql.Dialect(dq.driver.Dialect())
|
||||||
|
t1 := builder.Table(document.Table)
|
||||||
|
columns := dq.fields
|
||||||
|
if len(columns) == 0 {
|
||||||
|
columns = document.Columns
|
||||||
|
}
|
||||||
|
selector := builder.Select(t1.Columns(columns...)...).From(t1)
|
||||||
|
if dq.sql != nil {
|
||||||
|
selector = dq.sql
|
||||||
|
selector.Select(selector.Columns(columns...)...)
|
||||||
|
}
|
||||||
|
if dq.unique != nil && *dq.unique {
|
||||||
|
selector.Distinct()
|
||||||
|
}
|
||||||
|
for _, p := range dq.predicates {
|
||||||
|
p(selector)
|
||||||
|
}
|
||||||
|
for _, p := range dq.order {
|
||||||
|
p(selector)
|
||||||
|
}
|
||||||
|
if offset := dq.offset; offset != nil {
|
||||||
|
// limit is mandatory for offset clause. We start
|
||||||
|
// with default value, and override it below if needed.
|
||||||
|
selector.Offset(*offset).Limit(math.MaxInt32)
|
||||||
|
}
|
||||||
|
if limit := dq.limit; limit != nil {
|
||||||
|
selector.Limit(*limit)
|
||||||
|
}
|
||||||
|
return selector
|
||||||
|
}
|
||||||
|
|
||||||
|
// DocumentGroupBy is the group-by builder for Document entities.
|
||||||
|
type DocumentGroupBy struct {
|
||||||
|
config
|
||||||
|
selector
|
||||||
|
fields []string
|
||||||
|
fns []AggregateFunc
|
||||||
|
// intermediate query (i.e. traversal path).
|
||||||
|
sql *sql.Selector
|
||||||
|
path func(context.Context) (*sql.Selector, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Aggregate adds the given aggregation functions to the group-by query.
|
||||||
|
func (dgb *DocumentGroupBy) Aggregate(fns ...AggregateFunc) *DocumentGroupBy {
|
||||||
|
dgb.fns = append(dgb.fns, fns...)
|
||||||
|
return dgb
|
||||||
|
}
|
||||||
|
|
||||||
|
// Scan applies the group-by query and scans the result into the given value.
|
||||||
|
func (dgb *DocumentGroupBy) Scan(ctx context.Context, v interface{}) error {
|
||||||
|
query, err := dgb.path(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
dgb.sql = query
|
||||||
|
return dgb.sqlScan(ctx, v)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (dgb *DocumentGroupBy) sqlScan(ctx context.Context, v interface{}) error {
|
||||||
|
for _, f := range dgb.fields {
|
||||||
|
if !document.ValidColumn(f) {
|
||||||
|
return &ValidationError{Name: f, err: fmt.Errorf("invalid field %q for group-by", f)}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
selector := dgb.sqlQuery()
|
||||||
|
if err := selector.Err(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
rows := &sql.Rows{}
|
||||||
|
query, args := selector.Query()
|
||||||
|
if err := dgb.driver.Query(ctx, query, args, rows); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
return sql.ScanSlice(rows, v)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (dgb *DocumentGroupBy) sqlQuery() *sql.Selector {
|
||||||
|
selector := dgb.sql.Select()
|
||||||
|
aggregation := make([]string, 0, len(dgb.fns))
|
||||||
|
for _, fn := range dgb.fns {
|
||||||
|
aggregation = append(aggregation, fn(selector))
|
||||||
|
}
|
||||||
|
// If no columns were selected in a custom aggregation function, the default
|
||||||
|
// selection is the fields used for "group-by", and the aggregation functions.
|
||||||
|
if len(selector.SelectedColumns()) == 0 {
|
||||||
|
columns := make([]string, 0, len(dgb.fields)+len(dgb.fns))
|
||||||
|
for _, f := range dgb.fields {
|
||||||
|
columns = append(columns, selector.C(f))
|
||||||
|
}
|
||||||
|
columns = append(columns, aggregation...)
|
||||||
|
selector.Select(columns...)
|
||||||
|
}
|
||||||
|
return selector.GroupBy(selector.Columns(dgb.fields...)...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// DocumentSelect is the builder for selecting fields of Document entities.
|
||||||
|
type DocumentSelect struct {
|
||||||
|
*DocumentQuery
|
||||||
|
selector
|
||||||
|
// intermediate query (i.e. traversal path).
|
||||||
|
sql *sql.Selector
|
||||||
|
}
|
||||||
|
|
||||||
|
// Scan applies the selector query and scans the result into the given value.
|
||||||
|
func (ds *DocumentSelect) Scan(ctx context.Context, v interface{}) error {
|
||||||
|
if err := ds.prepareQuery(ctx); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
ds.sql = ds.DocumentQuery.sqlQuery(ctx)
|
||||||
|
return ds.sqlScan(ctx, v)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ds *DocumentSelect) sqlScan(ctx context.Context, v interface{}) error {
|
||||||
|
rows := &sql.Rows{}
|
||||||
|
query, args := ds.sql.Query()
|
||||||
|
if err := ds.driver.Query(ctx, query, args, rows); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
return sql.ScanSlice(rows, v)
|
||||||
|
}
|
858
backend/ent/document_update.go
Normal file
858
backend/ent/document_update.go
Normal file
|
@ -0,0 +1,858 @@
|
||||||
|
// Code generated by ent, DO NOT EDIT.
|
||||||
|
|
||||||
|
package ent
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"entgo.io/ent/dialect/sql"
|
||||||
|
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||||
|
"entgo.io/ent/schema/field"
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/hay-kot/content/backend/ent/attachment"
|
||||||
|
"github.com/hay-kot/content/backend/ent/document"
|
||||||
|
"github.com/hay-kot/content/backend/ent/documenttoken"
|
||||||
|
"github.com/hay-kot/content/backend/ent/group"
|
||||||
|
"github.com/hay-kot/content/backend/ent/predicate"
|
||||||
|
)
|
||||||
|
|
||||||
|
// DocumentUpdate is the builder for updating Document entities.
|
||||||
|
type DocumentUpdate struct {
|
||||||
|
config
|
||||||
|
hooks []Hook
|
||||||
|
mutation *DocumentMutation
|
||||||
|
}
|
||||||
|
|
||||||
|
// Where appends a list predicates to the DocumentUpdate builder.
|
||||||
|
func (du *DocumentUpdate) Where(ps ...predicate.Document) *DocumentUpdate {
|
||||||
|
du.mutation.Where(ps...)
|
||||||
|
return du
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetUpdatedAt sets the "updated_at" field.
|
||||||
|
func (du *DocumentUpdate) SetUpdatedAt(t time.Time) *DocumentUpdate {
|
||||||
|
du.mutation.SetUpdatedAt(t)
|
||||||
|
return du
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetTitle sets the "title" field.
|
||||||
|
func (du *DocumentUpdate) SetTitle(s string) *DocumentUpdate {
|
||||||
|
du.mutation.SetTitle(s)
|
||||||
|
return du
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetPath sets the "path" field.
|
||||||
|
func (du *DocumentUpdate) SetPath(s string) *DocumentUpdate {
|
||||||
|
du.mutation.SetPath(s)
|
||||||
|
return du
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetGroupID sets the "group" edge to the Group entity by ID.
|
||||||
|
func (du *DocumentUpdate) SetGroupID(id uuid.UUID) *DocumentUpdate {
|
||||||
|
du.mutation.SetGroupID(id)
|
||||||
|
return du
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetGroup sets the "group" edge to the Group entity.
|
||||||
|
func (du *DocumentUpdate) SetGroup(g *Group) *DocumentUpdate {
|
||||||
|
return du.SetGroupID(g.ID)
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddDocumentTokenIDs adds the "document_tokens" edge to the DocumentToken entity by IDs.
|
||||||
|
func (du *DocumentUpdate) AddDocumentTokenIDs(ids ...uuid.UUID) *DocumentUpdate {
|
||||||
|
du.mutation.AddDocumentTokenIDs(ids...)
|
||||||
|
return du
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddDocumentTokens adds the "document_tokens" edges to the DocumentToken entity.
|
||||||
|
func (du *DocumentUpdate) AddDocumentTokens(d ...*DocumentToken) *DocumentUpdate {
|
||||||
|
ids := make([]uuid.UUID, len(d))
|
||||||
|
for i := range d {
|
||||||
|
ids[i] = d[i].ID
|
||||||
|
}
|
||||||
|
return du.AddDocumentTokenIDs(ids...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddAttachmentIDs adds the "attachments" edge to the Attachment entity by IDs.
|
||||||
|
func (du *DocumentUpdate) AddAttachmentIDs(ids ...uuid.UUID) *DocumentUpdate {
|
||||||
|
du.mutation.AddAttachmentIDs(ids...)
|
||||||
|
return du
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddAttachments adds the "attachments" edges to the Attachment entity.
|
||||||
|
func (du *DocumentUpdate) AddAttachments(a ...*Attachment) *DocumentUpdate {
|
||||||
|
ids := make([]uuid.UUID, len(a))
|
||||||
|
for i := range a {
|
||||||
|
ids[i] = a[i].ID
|
||||||
|
}
|
||||||
|
return du.AddAttachmentIDs(ids...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Mutation returns the DocumentMutation object of the builder.
|
||||||
|
func (du *DocumentUpdate) Mutation() *DocumentMutation {
|
||||||
|
return du.mutation
|
||||||
|
}
|
||||||
|
|
||||||
|
// ClearGroup clears the "group" edge to the Group entity.
|
||||||
|
func (du *DocumentUpdate) ClearGroup() *DocumentUpdate {
|
||||||
|
du.mutation.ClearGroup()
|
||||||
|
return du
|
||||||
|
}
|
||||||
|
|
||||||
|
// ClearDocumentTokens clears all "document_tokens" edges to the DocumentToken entity.
|
||||||
|
func (du *DocumentUpdate) ClearDocumentTokens() *DocumentUpdate {
|
||||||
|
du.mutation.ClearDocumentTokens()
|
||||||
|
return du
|
||||||
|
}
|
||||||
|
|
||||||
|
// RemoveDocumentTokenIDs removes the "document_tokens" edge to DocumentToken entities by IDs.
|
||||||
|
func (du *DocumentUpdate) RemoveDocumentTokenIDs(ids ...uuid.UUID) *DocumentUpdate {
|
||||||
|
du.mutation.RemoveDocumentTokenIDs(ids...)
|
||||||
|
return du
|
||||||
|
}
|
||||||
|
|
||||||
|
// RemoveDocumentTokens removes "document_tokens" edges to DocumentToken entities.
|
||||||
|
func (du *DocumentUpdate) RemoveDocumentTokens(d ...*DocumentToken) *DocumentUpdate {
|
||||||
|
ids := make([]uuid.UUID, len(d))
|
||||||
|
for i := range d {
|
||||||
|
ids[i] = d[i].ID
|
||||||
|
}
|
||||||
|
return du.RemoveDocumentTokenIDs(ids...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ClearAttachments clears all "attachments" edges to the Attachment entity.
|
||||||
|
func (du *DocumentUpdate) ClearAttachments() *DocumentUpdate {
|
||||||
|
du.mutation.ClearAttachments()
|
||||||
|
return du
|
||||||
|
}
|
||||||
|
|
||||||
|
// RemoveAttachmentIDs removes the "attachments" edge to Attachment entities by IDs.
|
||||||
|
func (du *DocumentUpdate) RemoveAttachmentIDs(ids ...uuid.UUID) *DocumentUpdate {
|
||||||
|
du.mutation.RemoveAttachmentIDs(ids...)
|
||||||
|
return du
|
||||||
|
}
|
||||||
|
|
||||||
|
// RemoveAttachments removes "attachments" edges to Attachment entities.
|
||||||
|
func (du *DocumentUpdate) RemoveAttachments(a ...*Attachment) *DocumentUpdate {
|
||||||
|
ids := make([]uuid.UUID, len(a))
|
||||||
|
for i := range a {
|
||||||
|
ids[i] = a[i].ID
|
||||||
|
}
|
||||||
|
return du.RemoveAttachmentIDs(ids...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save executes the query and returns the number of nodes affected by the update operation.
|
||||||
|
func (du *DocumentUpdate) Save(ctx context.Context) (int, error) {
|
||||||
|
var (
|
||||||
|
err error
|
||||||
|
affected int
|
||||||
|
)
|
||||||
|
du.defaults()
|
||||||
|
if len(du.hooks) == 0 {
|
||||||
|
if err = du.check(); err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
affected, err = du.sqlSave(ctx)
|
||||||
|
} else {
|
||||||
|
var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) {
|
||||||
|
mutation, ok := m.(*DocumentMutation)
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("unexpected mutation type %T", m)
|
||||||
|
}
|
||||||
|
if err = du.check(); err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
du.mutation = mutation
|
||||||
|
affected, err = du.sqlSave(ctx)
|
||||||
|
mutation.done = true
|
||||||
|
return affected, err
|
||||||
|
})
|
||||||
|
for i := len(du.hooks) - 1; i >= 0; i-- {
|
||||||
|
if du.hooks[i] == nil {
|
||||||
|
return 0, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)")
|
||||||
|
}
|
||||||
|
mut = du.hooks[i](mut)
|
||||||
|
}
|
||||||
|
if _, err := mut.Mutate(ctx, du.mutation); err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return affected, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// SaveX is like Save, but panics if an error occurs.
|
||||||
|
func (du *DocumentUpdate) SaveX(ctx context.Context) int {
|
||||||
|
affected, err := du.Save(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return affected
|
||||||
|
}
|
||||||
|
|
||||||
|
// Exec executes the query.
|
||||||
|
func (du *DocumentUpdate) Exec(ctx context.Context) error {
|
||||||
|
_, err := du.Save(ctx)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExecX is like Exec, but panics if an error occurs.
|
||||||
|
func (du *DocumentUpdate) ExecX(ctx context.Context) {
|
||||||
|
if err := du.Exec(ctx); err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// defaults sets the default values of the builder before save.
|
||||||
|
func (du *DocumentUpdate) defaults() {
|
||||||
|
if _, ok := du.mutation.UpdatedAt(); !ok {
|
||||||
|
v := document.UpdateDefaultUpdatedAt()
|
||||||
|
du.mutation.SetUpdatedAt(v)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// check runs all checks and user-defined validators on the builder.
|
||||||
|
func (du *DocumentUpdate) check() error {
|
||||||
|
if v, ok := du.mutation.Title(); ok {
|
||||||
|
if err := document.TitleValidator(v); err != nil {
|
||||||
|
return &ValidationError{Name: "title", err: fmt.Errorf(`ent: validator failed for field "Document.title": %w`, err)}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if v, ok := du.mutation.Path(); ok {
|
||||||
|
if err := document.PathValidator(v); err != nil {
|
||||||
|
return &ValidationError{Name: "path", err: fmt.Errorf(`ent: validator failed for field "Document.path": %w`, err)}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if _, ok := du.mutation.GroupID(); du.mutation.GroupCleared() && !ok {
|
||||||
|
return errors.New(`ent: clearing a required unique edge "Document.group"`)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (du *DocumentUpdate) sqlSave(ctx context.Context) (n int, err error) {
|
||||||
|
_spec := &sqlgraph.UpdateSpec{
|
||||||
|
Node: &sqlgraph.NodeSpec{
|
||||||
|
Table: document.Table,
|
||||||
|
Columns: document.Columns,
|
||||||
|
ID: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: document.FieldID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
if ps := du.mutation.predicates; len(ps) > 0 {
|
||||||
|
_spec.Predicate = func(selector *sql.Selector) {
|
||||||
|
for i := range ps {
|
||||||
|
ps[i](selector)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if value, ok := du.mutation.UpdatedAt(); ok {
|
||||||
|
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeTime,
|
||||||
|
Value: value,
|
||||||
|
Column: document.FieldUpdatedAt,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if value, ok := du.mutation.Title(); ok {
|
||||||
|
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeString,
|
||||||
|
Value: value,
|
||||||
|
Column: document.FieldTitle,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if value, ok := du.mutation.Path(); ok {
|
||||||
|
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeString,
|
||||||
|
Value: value,
|
||||||
|
Column: document.FieldPath,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if du.mutation.GroupCleared() {
|
||||||
|
edge := &sqlgraph.EdgeSpec{
|
||||||
|
Rel: sqlgraph.M2O,
|
||||||
|
Inverse: true,
|
||||||
|
Table: document.GroupTable,
|
||||||
|
Columns: []string{document.GroupColumn},
|
||||||
|
Bidi: false,
|
||||||
|
Target: &sqlgraph.EdgeTarget{
|
||||||
|
IDSpec: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: group.FieldID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
_spec.Edges.Clear = append(_spec.Edges.Clear, edge)
|
||||||
|
}
|
||||||
|
if nodes := du.mutation.GroupIDs(); len(nodes) > 0 {
|
||||||
|
edge := &sqlgraph.EdgeSpec{
|
||||||
|
Rel: sqlgraph.M2O,
|
||||||
|
Inverse: true,
|
||||||
|
Table: document.GroupTable,
|
||||||
|
Columns: []string{document.GroupColumn},
|
||||||
|
Bidi: false,
|
||||||
|
Target: &sqlgraph.EdgeTarget{
|
||||||
|
IDSpec: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: group.FieldID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, k := range nodes {
|
||||||
|
edge.Target.Nodes = append(edge.Target.Nodes, k)
|
||||||
|
}
|
||||||
|
_spec.Edges.Add = append(_spec.Edges.Add, edge)
|
||||||
|
}
|
||||||
|
if du.mutation.DocumentTokensCleared() {
|
||||||
|
edge := &sqlgraph.EdgeSpec{
|
||||||
|
Rel: sqlgraph.O2M,
|
||||||
|
Inverse: false,
|
||||||
|
Table: document.DocumentTokensTable,
|
||||||
|
Columns: []string{document.DocumentTokensColumn},
|
||||||
|
Bidi: false,
|
||||||
|
Target: &sqlgraph.EdgeTarget{
|
||||||
|
IDSpec: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: documenttoken.FieldID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
_spec.Edges.Clear = append(_spec.Edges.Clear, edge)
|
||||||
|
}
|
||||||
|
if nodes := du.mutation.RemovedDocumentTokensIDs(); len(nodes) > 0 && !du.mutation.DocumentTokensCleared() {
|
||||||
|
edge := &sqlgraph.EdgeSpec{
|
||||||
|
Rel: sqlgraph.O2M,
|
||||||
|
Inverse: false,
|
||||||
|
Table: document.DocumentTokensTable,
|
||||||
|
Columns: []string{document.DocumentTokensColumn},
|
||||||
|
Bidi: false,
|
||||||
|
Target: &sqlgraph.EdgeTarget{
|
||||||
|
IDSpec: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: documenttoken.FieldID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, k := range nodes {
|
||||||
|
edge.Target.Nodes = append(edge.Target.Nodes, k)
|
||||||
|
}
|
||||||
|
_spec.Edges.Clear = append(_spec.Edges.Clear, edge)
|
||||||
|
}
|
||||||
|
if nodes := du.mutation.DocumentTokensIDs(); len(nodes) > 0 {
|
||||||
|
edge := &sqlgraph.EdgeSpec{
|
||||||
|
Rel: sqlgraph.O2M,
|
||||||
|
Inverse: false,
|
||||||
|
Table: document.DocumentTokensTable,
|
||||||
|
Columns: []string{document.DocumentTokensColumn},
|
||||||
|
Bidi: false,
|
||||||
|
Target: &sqlgraph.EdgeTarget{
|
||||||
|
IDSpec: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: documenttoken.FieldID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, k := range nodes {
|
||||||
|
edge.Target.Nodes = append(edge.Target.Nodes, k)
|
||||||
|
}
|
||||||
|
_spec.Edges.Add = append(_spec.Edges.Add, edge)
|
||||||
|
}
|
||||||
|
if du.mutation.AttachmentsCleared() {
|
||||||
|
edge := &sqlgraph.EdgeSpec{
|
||||||
|
Rel: sqlgraph.O2M,
|
||||||
|
Inverse: false,
|
||||||
|
Table: document.AttachmentsTable,
|
||||||
|
Columns: []string{document.AttachmentsColumn},
|
||||||
|
Bidi: false,
|
||||||
|
Target: &sqlgraph.EdgeTarget{
|
||||||
|
IDSpec: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: attachment.FieldID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
_spec.Edges.Clear = append(_spec.Edges.Clear, edge)
|
||||||
|
}
|
||||||
|
if nodes := du.mutation.RemovedAttachmentsIDs(); len(nodes) > 0 && !du.mutation.AttachmentsCleared() {
|
||||||
|
edge := &sqlgraph.EdgeSpec{
|
||||||
|
Rel: sqlgraph.O2M,
|
||||||
|
Inverse: false,
|
||||||
|
Table: document.AttachmentsTable,
|
||||||
|
Columns: []string{document.AttachmentsColumn},
|
||||||
|
Bidi: false,
|
||||||
|
Target: &sqlgraph.EdgeTarget{
|
||||||
|
IDSpec: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: attachment.FieldID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, k := range nodes {
|
||||||
|
edge.Target.Nodes = append(edge.Target.Nodes, k)
|
||||||
|
}
|
||||||
|
_spec.Edges.Clear = append(_spec.Edges.Clear, edge)
|
||||||
|
}
|
||||||
|
if nodes := du.mutation.AttachmentsIDs(); len(nodes) > 0 {
|
||||||
|
edge := &sqlgraph.EdgeSpec{
|
||||||
|
Rel: sqlgraph.O2M,
|
||||||
|
Inverse: false,
|
||||||
|
Table: document.AttachmentsTable,
|
||||||
|
Columns: []string{document.AttachmentsColumn},
|
||||||
|
Bidi: false,
|
||||||
|
Target: &sqlgraph.EdgeTarget{
|
||||||
|
IDSpec: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: attachment.FieldID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, k := range nodes {
|
||||||
|
edge.Target.Nodes = append(edge.Target.Nodes, k)
|
||||||
|
}
|
||||||
|
_spec.Edges.Add = append(_spec.Edges.Add, edge)
|
||||||
|
}
|
||||||
|
if n, err = sqlgraph.UpdateNodes(ctx, du.driver, _spec); err != nil {
|
||||||
|
if _, ok := err.(*sqlgraph.NotFoundError); ok {
|
||||||
|
err = &NotFoundError{document.Label}
|
||||||
|
} else if sqlgraph.IsConstraintError(err) {
|
||||||
|
err = &ConstraintError{msg: err.Error(), wrap: err}
|
||||||
|
}
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
return n, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// DocumentUpdateOne is the builder for updating a single Document entity.
|
||||||
|
type DocumentUpdateOne struct {
|
||||||
|
config
|
||||||
|
fields []string
|
||||||
|
hooks []Hook
|
||||||
|
mutation *DocumentMutation
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetUpdatedAt sets the "updated_at" field.
|
||||||
|
func (duo *DocumentUpdateOne) SetUpdatedAt(t time.Time) *DocumentUpdateOne {
|
||||||
|
duo.mutation.SetUpdatedAt(t)
|
||||||
|
return duo
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetTitle sets the "title" field.
|
||||||
|
func (duo *DocumentUpdateOne) SetTitle(s string) *DocumentUpdateOne {
|
||||||
|
duo.mutation.SetTitle(s)
|
||||||
|
return duo
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetPath sets the "path" field.
|
||||||
|
func (duo *DocumentUpdateOne) SetPath(s string) *DocumentUpdateOne {
|
||||||
|
duo.mutation.SetPath(s)
|
||||||
|
return duo
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetGroupID sets the "group" edge to the Group entity by ID.
|
||||||
|
func (duo *DocumentUpdateOne) SetGroupID(id uuid.UUID) *DocumentUpdateOne {
|
||||||
|
duo.mutation.SetGroupID(id)
|
||||||
|
return duo
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetGroup sets the "group" edge to the Group entity.
|
||||||
|
func (duo *DocumentUpdateOne) SetGroup(g *Group) *DocumentUpdateOne {
|
||||||
|
return duo.SetGroupID(g.ID)
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddDocumentTokenIDs adds the "document_tokens" edge to the DocumentToken entity by IDs.
|
||||||
|
func (duo *DocumentUpdateOne) AddDocumentTokenIDs(ids ...uuid.UUID) *DocumentUpdateOne {
|
||||||
|
duo.mutation.AddDocumentTokenIDs(ids...)
|
||||||
|
return duo
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddDocumentTokens adds the "document_tokens" edges to the DocumentToken entity.
|
||||||
|
func (duo *DocumentUpdateOne) AddDocumentTokens(d ...*DocumentToken) *DocumentUpdateOne {
|
||||||
|
ids := make([]uuid.UUID, len(d))
|
||||||
|
for i := range d {
|
||||||
|
ids[i] = d[i].ID
|
||||||
|
}
|
||||||
|
return duo.AddDocumentTokenIDs(ids...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddAttachmentIDs adds the "attachments" edge to the Attachment entity by IDs.
|
||||||
|
func (duo *DocumentUpdateOne) AddAttachmentIDs(ids ...uuid.UUID) *DocumentUpdateOne {
|
||||||
|
duo.mutation.AddAttachmentIDs(ids...)
|
||||||
|
return duo
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddAttachments adds the "attachments" edges to the Attachment entity.
|
||||||
|
func (duo *DocumentUpdateOne) AddAttachments(a ...*Attachment) *DocumentUpdateOne {
|
||||||
|
ids := make([]uuid.UUID, len(a))
|
||||||
|
for i := range a {
|
||||||
|
ids[i] = a[i].ID
|
||||||
|
}
|
||||||
|
return duo.AddAttachmentIDs(ids...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Mutation returns the DocumentMutation object of the builder.
|
||||||
|
func (duo *DocumentUpdateOne) Mutation() *DocumentMutation {
|
||||||
|
return duo.mutation
|
||||||
|
}
|
||||||
|
|
||||||
|
// ClearGroup clears the "group" edge to the Group entity.
|
||||||
|
func (duo *DocumentUpdateOne) ClearGroup() *DocumentUpdateOne {
|
||||||
|
duo.mutation.ClearGroup()
|
||||||
|
return duo
|
||||||
|
}
|
||||||
|
|
||||||
|
// ClearDocumentTokens clears all "document_tokens" edges to the DocumentToken entity.
|
||||||
|
func (duo *DocumentUpdateOne) ClearDocumentTokens() *DocumentUpdateOne {
|
||||||
|
duo.mutation.ClearDocumentTokens()
|
||||||
|
return duo
|
||||||
|
}
|
||||||
|
|
||||||
|
// RemoveDocumentTokenIDs removes the "document_tokens" edge to DocumentToken entities by IDs.
|
||||||
|
func (duo *DocumentUpdateOne) RemoveDocumentTokenIDs(ids ...uuid.UUID) *DocumentUpdateOne {
|
||||||
|
duo.mutation.RemoveDocumentTokenIDs(ids...)
|
||||||
|
return duo
|
||||||
|
}
|
||||||
|
|
||||||
|
// RemoveDocumentTokens removes "document_tokens" edges to DocumentToken entities.
|
||||||
|
func (duo *DocumentUpdateOne) RemoveDocumentTokens(d ...*DocumentToken) *DocumentUpdateOne {
|
||||||
|
ids := make([]uuid.UUID, len(d))
|
||||||
|
for i := range d {
|
||||||
|
ids[i] = d[i].ID
|
||||||
|
}
|
||||||
|
return duo.RemoveDocumentTokenIDs(ids...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ClearAttachments clears all "attachments" edges to the Attachment entity.
|
||||||
|
func (duo *DocumentUpdateOne) ClearAttachments() *DocumentUpdateOne {
|
||||||
|
duo.mutation.ClearAttachments()
|
||||||
|
return duo
|
||||||
|
}
|
||||||
|
|
||||||
|
// RemoveAttachmentIDs removes the "attachments" edge to Attachment entities by IDs.
|
||||||
|
func (duo *DocumentUpdateOne) RemoveAttachmentIDs(ids ...uuid.UUID) *DocumentUpdateOne {
|
||||||
|
duo.mutation.RemoveAttachmentIDs(ids...)
|
||||||
|
return duo
|
||||||
|
}
|
||||||
|
|
||||||
|
// RemoveAttachments removes "attachments" edges to Attachment entities.
|
||||||
|
func (duo *DocumentUpdateOne) RemoveAttachments(a ...*Attachment) *DocumentUpdateOne {
|
||||||
|
ids := make([]uuid.UUID, len(a))
|
||||||
|
for i := range a {
|
||||||
|
ids[i] = a[i].ID
|
||||||
|
}
|
||||||
|
return duo.RemoveAttachmentIDs(ids...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Select allows selecting one or more fields (columns) of the returned entity.
|
||||||
|
// The default is selecting all fields defined in the entity schema.
|
||||||
|
func (duo *DocumentUpdateOne) Select(field string, fields ...string) *DocumentUpdateOne {
|
||||||
|
duo.fields = append([]string{field}, fields...)
|
||||||
|
return duo
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save executes the query and returns the updated Document entity.
|
||||||
|
func (duo *DocumentUpdateOne) Save(ctx context.Context) (*Document, error) {
|
||||||
|
var (
|
||||||
|
err error
|
||||||
|
node *Document
|
||||||
|
)
|
||||||
|
duo.defaults()
|
||||||
|
if len(duo.hooks) == 0 {
|
||||||
|
if err = duo.check(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
node, err = duo.sqlSave(ctx)
|
||||||
|
} else {
|
||||||
|
var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) {
|
||||||
|
mutation, ok := m.(*DocumentMutation)
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("unexpected mutation type %T", m)
|
||||||
|
}
|
||||||
|
if err = duo.check(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
duo.mutation = mutation
|
||||||
|
node, err = duo.sqlSave(ctx)
|
||||||
|
mutation.done = true
|
||||||
|
return node, err
|
||||||
|
})
|
||||||
|
for i := len(duo.hooks) - 1; i >= 0; i-- {
|
||||||
|
if duo.hooks[i] == nil {
|
||||||
|
return nil, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)")
|
||||||
|
}
|
||||||
|
mut = duo.hooks[i](mut)
|
||||||
|
}
|
||||||
|
v, err := mut.Mutate(ctx, duo.mutation)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
nv, ok := v.(*Document)
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("unexpected node type %T returned from DocumentMutation", v)
|
||||||
|
}
|
||||||
|
node = nv
|
||||||
|
}
|
||||||
|
return node, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// SaveX is like Save, but panics if an error occurs.
|
||||||
|
func (duo *DocumentUpdateOne) SaveX(ctx context.Context) *Document {
|
||||||
|
node, err := duo.Save(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return node
|
||||||
|
}
|
||||||
|
|
||||||
|
// Exec executes the query on the entity.
|
||||||
|
func (duo *DocumentUpdateOne) Exec(ctx context.Context) error {
|
||||||
|
_, err := duo.Save(ctx)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExecX is like Exec, but panics if an error occurs.
|
||||||
|
func (duo *DocumentUpdateOne) ExecX(ctx context.Context) {
|
||||||
|
if err := duo.Exec(ctx); err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// defaults sets the default values of the builder before save.
|
||||||
|
func (duo *DocumentUpdateOne) defaults() {
|
||||||
|
if _, ok := duo.mutation.UpdatedAt(); !ok {
|
||||||
|
v := document.UpdateDefaultUpdatedAt()
|
||||||
|
duo.mutation.SetUpdatedAt(v)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// check runs all checks and user-defined validators on the builder.
|
||||||
|
func (duo *DocumentUpdateOne) check() error {
|
||||||
|
if v, ok := duo.mutation.Title(); ok {
|
||||||
|
if err := document.TitleValidator(v); err != nil {
|
||||||
|
return &ValidationError{Name: "title", err: fmt.Errorf(`ent: validator failed for field "Document.title": %w`, err)}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if v, ok := duo.mutation.Path(); ok {
|
||||||
|
if err := document.PathValidator(v); err != nil {
|
||||||
|
return &ValidationError{Name: "path", err: fmt.Errorf(`ent: validator failed for field "Document.path": %w`, err)}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if _, ok := duo.mutation.GroupID(); duo.mutation.GroupCleared() && !ok {
|
||||||
|
return errors.New(`ent: clearing a required unique edge "Document.group"`)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (duo *DocumentUpdateOne) sqlSave(ctx context.Context) (_node *Document, err error) {
|
||||||
|
_spec := &sqlgraph.UpdateSpec{
|
||||||
|
Node: &sqlgraph.NodeSpec{
|
||||||
|
Table: document.Table,
|
||||||
|
Columns: document.Columns,
|
||||||
|
ID: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: document.FieldID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
id, ok := duo.mutation.ID()
|
||||||
|
if !ok {
|
||||||
|
return nil, &ValidationError{Name: "id", err: errors.New(`ent: missing "Document.id" for update`)}
|
||||||
|
}
|
||||||
|
_spec.Node.ID.Value = id
|
||||||
|
if fields := duo.fields; len(fields) > 0 {
|
||||||
|
_spec.Node.Columns = make([]string, 0, len(fields))
|
||||||
|
_spec.Node.Columns = append(_spec.Node.Columns, document.FieldID)
|
||||||
|
for _, f := range fields {
|
||||||
|
if !document.ValidColumn(f) {
|
||||||
|
return nil, &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)}
|
||||||
|
}
|
||||||
|
if f != document.FieldID {
|
||||||
|
_spec.Node.Columns = append(_spec.Node.Columns, f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if ps := duo.mutation.predicates; len(ps) > 0 {
|
||||||
|
_spec.Predicate = func(selector *sql.Selector) {
|
||||||
|
for i := range ps {
|
||||||
|
ps[i](selector)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if value, ok := duo.mutation.UpdatedAt(); ok {
|
||||||
|
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeTime,
|
||||||
|
Value: value,
|
||||||
|
Column: document.FieldUpdatedAt,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if value, ok := duo.mutation.Title(); ok {
|
||||||
|
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeString,
|
||||||
|
Value: value,
|
||||||
|
Column: document.FieldTitle,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if value, ok := duo.mutation.Path(); ok {
|
||||||
|
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeString,
|
||||||
|
Value: value,
|
||||||
|
Column: document.FieldPath,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if duo.mutation.GroupCleared() {
|
||||||
|
edge := &sqlgraph.EdgeSpec{
|
||||||
|
Rel: sqlgraph.M2O,
|
||||||
|
Inverse: true,
|
||||||
|
Table: document.GroupTable,
|
||||||
|
Columns: []string{document.GroupColumn},
|
||||||
|
Bidi: false,
|
||||||
|
Target: &sqlgraph.EdgeTarget{
|
||||||
|
IDSpec: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: group.FieldID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
_spec.Edges.Clear = append(_spec.Edges.Clear, edge)
|
||||||
|
}
|
||||||
|
if nodes := duo.mutation.GroupIDs(); len(nodes) > 0 {
|
||||||
|
edge := &sqlgraph.EdgeSpec{
|
||||||
|
Rel: sqlgraph.M2O,
|
||||||
|
Inverse: true,
|
||||||
|
Table: document.GroupTable,
|
||||||
|
Columns: []string{document.GroupColumn},
|
||||||
|
Bidi: false,
|
||||||
|
Target: &sqlgraph.EdgeTarget{
|
||||||
|
IDSpec: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: group.FieldID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, k := range nodes {
|
||||||
|
edge.Target.Nodes = append(edge.Target.Nodes, k)
|
||||||
|
}
|
||||||
|
_spec.Edges.Add = append(_spec.Edges.Add, edge)
|
||||||
|
}
|
||||||
|
if duo.mutation.DocumentTokensCleared() {
|
||||||
|
edge := &sqlgraph.EdgeSpec{
|
||||||
|
Rel: sqlgraph.O2M,
|
||||||
|
Inverse: false,
|
||||||
|
Table: document.DocumentTokensTable,
|
||||||
|
Columns: []string{document.DocumentTokensColumn},
|
||||||
|
Bidi: false,
|
||||||
|
Target: &sqlgraph.EdgeTarget{
|
||||||
|
IDSpec: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: documenttoken.FieldID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
_spec.Edges.Clear = append(_spec.Edges.Clear, edge)
|
||||||
|
}
|
||||||
|
if nodes := duo.mutation.RemovedDocumentTokensIDs(); len(nodes) > 0 && !duo.mutation.DocumentTokensCleared() {
|
||||||
|
edge := &sqlgraph.EdgeSpec{
|
||||||
|
Rel: sqlgraph.O2M,
|
||||||
|
Inverse: false,
|
||||||
|
Table: document.DocumentTokensTable,
|
||||||
|
Columns: []string{document.DocumentTokensColumn},
|
||||||
|
Bidi: false,
|
||||||
|
Target: &sqlgraph.EdgeTarget{
|
||||||
|
IDSpec: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: documenttoken.FieldID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, k := range nodes {
|
||||||
|
edge.Target.Nodes = append(edge.Target.Nodes, k)
|
||||||
|
}
|
||||||
|
_spec.Edges.Clear = append(_spec.Edges.Clear, edge)
|
||||||
|
}
|
||||||
|
if nodes := duo.mutation.DocumentTokensIDs(); len(nodes) > 0 {
|
||||||
|
edge := &sqlgraph.EdgeSpec{
|
||||||
|
Rel: sqlgraph.O2M,
|
||||||
|
Inverse: false,
|
||||||
|
Table: document.DocumentTokensTable,
|
||||||
|
Columns: []string{document.DocumentTokensColumn},
|
||||||
|
Bidi: false,
|
||||||
|
Target: &sqlgraph.EdgeTarget{
|
||||||
|
IDSpec: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: documenttoken.FieldID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, k := range nodes {
|
||||||
|
edge.Target.Nodes = append(edge.Target.Nodes, k)
|
||||||
|
}
|
||||||
|
_spec.Edges.Add = append(_spec.Edges.Add, edge)
|
||||||
|
}
|
||||||
|
if duo.mutation.AttachmentsCleared() {
|
||||||
|
edge := &sqlgraph.EdgeSpec{
|
||||||
|
Rel: sqlgraph.O2M,
|
||||||
|
Inverse: false,
|
||||||
|
Table: document.AttachmentsTable,
|
||||||
|
Columns: []string{document.AttachmentsColumn},
|
||||||
|
Bidi: false,
|
||||||
|
Target: &sqlgraph.EdgeTarget{
|
||||||
|
IDSpec: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: attachment.FieldID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
_spec.Edges.Clear = append(_spec.Edges.Clear, edge)
|
||||||
|
}
|
||||||
|
if nodes := duo.mutation.RemovedAttachmentsIDs(); len(nodes) > 0 && !duo.mutation.AttachmentsCleared() {
|
||||||
|
edge := &sqlgraph.EdgeSpec{
|
||||||
|
Rel: sqlgraph.O2M,
|
||||||
|
Inverse: false,
|
||||||
|
Table: document.AttachmentsTable,
|
||||||
|
Columns: []string{document.AttachmentsColumn},
|
||||||
|
Bidi: false,
|
||||||
|
Target: &sqlgraph.EdgeTarget{
|
||||||
|
IDSpec: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: attachment.FieldID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, k := range nodes {
|
||||||
|
edge.Target.Nodes = append(edge.Target.Nodes, k)
|
||||||
|
}
|
||||||
|
_spec.Edges.Clear = append(_spec.Edges.Clear, edge)
|
||||||
|
}
|
||||||
|
if nodes := duo.mutation.AttachmentsIDs(); len(nodes) > 0 {
|
||||||
|
edge := &sqlgraph.EdgeSpec{
|
||||||
|
Rel: sqlgraph.O2M,
|
||||||
|
Inverse: false,
|
||||||
|
Table: document.AttachmentsTable,
|
||||||
|
Columns: []string{document.AttachmentsColumn},
|
||||||
|
Bidi: false,
|
||||||
|
Target: &sqlgraph.EdgeTarget{
|
||||||
|
IDSpec: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: attachment.FieldID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, k := range nodes {
|
||||||
|
edge.Target.Nodes = append(edge.Target.Nodes, k)
|
||||||
|
}
|
||||||
|
_spec.Edges.Add = append(_spec.Edges.Add, edge)
|
||||||
|
}
|
||||||
|
_node = &Document{config: duo.config}
|
||||||
|
_spec.Assign = _node.assignValues
|
||||||
|
_spec.ScanValues = _node.scanValues
|
||||||
|
if err = sqlgraph.UpdateNode(ctx, duo.driver, _spec); err != nil {
|
||||||
|
if _, ok := err.(*sqlgraph.NotFoundError); ok {
|
||||||
|
err = &NotFoundError{document.Label}
|
||||||
|
} else if sqlgraph.IsConstraintError(err) {
|
||||||
|
err = &ConstraintError{msg: err.Error(), wrap: err}
|
||||||
|
}
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return _node, nil
|
||||||
|
}
|
190
backend/ent/documenttoken.go
Normal file
190
backend/ent/documenttoken.go
Normal file
|
@ -0,0 +1,190 @@
|
||||||
|
// Code generated by ent, DO NOT EDIT.
|
||||||
|
|
||||||
|
package ent
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"entgo.io/ent/dialect/sql"
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/hay-kot/content/backend/ent/document"
|
||||||
|
"github.com/hay-kot/content/backend/ent/documenttoken"
|
||||||
|
)
|
||||||
|
|
||||||
|
// DocumentToken is the model entity for the DocumentToken schema.
|
||||||
|
type DocumentToken struct {
|
||||||
|
config `json:"-"`
|
||||||
|
// ID of the ent.
|
||||||
|
ID uuid.UUID `json:"id,omitempty"`
|
||||||
|
// CreatedAt holds the value of the "created_at" field.
|
||||||
|
CreatedAt time.Time `json:"created_at,omitempty"`
|
||||||
|
// UpdatedAt holds the value of the "updated_at" field.
|
||||||
|
UpdatedAt time.Time `json:"updated_at,omitempty"`
|
||||||
|
// Token holds the value of the "token" field.
|
||||||
|
Token []byte `json:"token,omitempty"`
|
||||||
|
// Uses holds the value of the "uses" field.
|
||||||
|
Uses int `json:"uses,omitempty"`
|
||||||
|
// ExpiresAt holds the value of the "expires_at" field.
|
||||||
|
ExpiresAt time.Time `json:"expires_at,omitempty"`
|
||||||
|
// Edges holds the relations/edges for other nodes in the graph.
|
||||||
|
// The values are being populated by the DocumentTokenQuery when eager-loading is set.
|
||||||
|
Edges DocumentTokenEdges `json:"edges"`
|
||||||
|
document_document_tokens *uuid.UUID
|
||||||
|
}
|
||||||
|
|
||||||
|
// DocumentTokenEdges holds the relations/edges for other nodes in the graph.
|
||||||
|
type DocumentTokenEdges struct {
|
||||||
|
// Document holds the value of the document edge.
|
||||||
|
Document *Document `json:"document,omitempty"`
|
||||||
|
// loadedTypes holds the information for reporting if a
|
||||||
|
// type was loaded (or requested) in eager-loading or not.
|
||||||
|
loadedTypes [1]bool
|
||||||
|
}
|
||||||
|
|
||||||
|
// DocumentOrErr returns the Document value or an error if the edge
|
||||||
|
// was not loaded in eager-loading, or loaded but was not found.
|
||||||
|
func (e DocumentTokenEdges) DocumentOrErr() (*Document, error) {
|
||||||
|
if e.loadedTypes[0] {
|
||||||
|
if e.Document == nil {
|
||||||
|
// Edge was loaded but was not found.
|
||||||
|
return nil, &NotFoundError{label: document.Label}
|
||||||
|
}
|
||||||
|
return e.Document, nil
|
||||||
|
}
|
||||||
|
return nil, &NotLoadedError{edge: "document"}
|
||||||
|
}
|
||||||
|
|
||||||
|
// scanValues returns the types for scanning values from sql.Rows.
|
||||||
|
func (*DocumentToken) scanValues(columns []string) ([]interface{}, error) {
|
||||||
|
values := make([]interface{}, len(columns))
|
||||||
|
for i := range columns {
|
||||||
|
switch columns[i] {
|
||||||
|
case documenttoken.FieldToken:
|
||||||
|
values[i] = new([]byte)
|
||||||
|
case documenttoken.FieldUses:
|
||||||
|
values[i] = new(sql.NullInt64)
|
||||||
|
case documenttoken.FieldCreatedAt, documenttoken.FieldUpdatedAt, documenttoken.FieldExpiresAt:
|
||||||
|
values[i] = new(sql.NullTime)
|
||||||
|
case documenttoken.FieldID:
|
||||||
|
values[i] = new(uuid.UUID)
|
||||||
|
case documenttoken.ForeignKeys[0]: // document_document_tokens
|
||||||
|
values[i] = &sql.NullScanner{S: new(uuid.UUID)}
|
||||||
|
default:
|
||||||
|
return nil, fmt.Errorf("unexpected column %q for type DocumentToken", columns[i])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return values, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// assignValues assigns the values that were returned from sql.Rows (after scanning)
|
||||||
|
// to the DocumentToken fields.
|
||||||
|
func (dt *DocumentToken) assignValues(columns []string, values []interface{}) error {
|
||||||
|
if m, n := len(values), len(columns); m < n {
|
||||||
|
return fmt.Errorf("mismatch number of scan values: %d != %d", m, n)
|
||||||
|
}
|
||||||
|
for i := range columns {
|
||||||
|
switch columns[i] {
|
||||||
|
case documenttoken.FieldID:
|
||||||
|
if value, ok := values[i].(*uuid.UUID); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field id", values[i])
|
||||||
|
} else if value != nil {
|
||||||
|
dt.ID = *value
|
||||||
|
}
|
||||||
|
case documenttoken.FieldCreatedAt:
|
||||||
|
if value, ok := values[i].(*sql.NullTime); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field created_at", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
dt.CreatedAt = value.Time
|
||||||
|
}
|
||||||
|
case documenttoken.FieldUpdatedAt:
|
||||||
|
if value, ok := values[i].(*sql.NullTime); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field updated_at", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
dt.UpdatedAt = value.Time
|
||||||
|
}
|
||||||
|
case documenttoken.FieldToken:
|
||||||
|
if value, ok := values[i].(*[]byte); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field token", values[i])
|
||||||
|
} else if value != nil {
|
||||||
|
dt.Token = *value
|
||||||
|
}
|
||||||
|
case documenttoken.FieldUses:
|
||||||
|
if value, ok := values[i].(*sql.NullInt64); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field uses", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
dt.Uses = int(value.Int64)
|
||||||
|
}
|
||||||
|
case documenttoken.FieldExpiresAt:
|
||||||
|
if value, ok := values[i].(*sql.NullTime); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field expires_at", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
dt.ExpiresAt = value.Time
|
||||||
|
}
|
||||||
|
case documenttoken.ForeignKeys[0]:
|
||||||
|
if value, ok := values[i].(*sql.NullScanner); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field document_document_tokens", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
dt.document_document_tokens = new(uuid.UUID)
|
||||||
|
*dt.document_document_tokens = *value.S.(*uuid.UUID)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// QueryDocument queries the "document" edge of the DocumentToken entity.
|
||||||
|
func (dt *DocumentToken) QueryDocument() *DocumentQuery {
|
||||||
|
return (&DocumentTokenClient{config: dt.config}).QueryDocument(dt)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update returns a builder for updating this DocumentToken.
|
||||||
|
// Note that you need to call DocumentToken.Unwrap() before calling this method if this DocumentToken
|
||||||
|
// was returned from a transaction, and the transaction was committed or rolled back.
|
||||||
|
func (dt *DocumentToken) Update() *DocumentTokenUpdateOne {
|
||||||
|
return (&DocumentTokenClient{config: dt.config}).UpdateOne(dt)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Unwrap unwraps the DocumentToken entity that was returned from a transaction after it was closed,
|
||||||
|
// so that all future queries will be executed through the driver which created the transaction.
|
||||||
|
func (dt *DocumentToken) Unwrap() *DocumentToken {
|
||||||
|
_tx, ok := dt.config.driver.(*txDriver)
|
||||||
|
if !ok {
|
||||||
|
panic("ent: DocumentToken is not a transactional entity")
|
||||||
|
}
|
||||||
|
dt.config.driver = _tx.drv
|
||||||
|
return dt
|
||||||
|
}
|
||||||
|
|
||||||
|
// String implements the fmt.Stringer.
|
||||||
|
func (dt *DocumentToken) String() string {
|
||||||
|
var builder strings.Builder
|
||||||
|
builder.WriteString("DocumentToken(")
|
||||||
|
builder.WriteString(fmt.Sprintf("id=%v, ", dt.ID))
|
||||||
|
builder.WriteString("created_at=")
|
||||||
|
builder.WriteString(dt.CreatedAt.Format(time.ANSIC))
|
||||||
|
builder.WriteString(", ")
|
||||||
|
builder.WriteString("updated_at=")
|
||||||
|
builder.WriteString(dt.UpdatedAt.Format(time.ANSIC))
|
||||||
|
builder.WriteString(", ")
|
||||||
|
builder.WriteString("token=")
|
||||||
|
builder.WriteString(fmt.Sprintf("%v", dt.Token))
|
||||||
|
builder.WriteString(", ")
|
||||||
|
builder.WriteString("uses=")
|
||||||
|
builder.WriteString(fmt.Sprintf("%v", dt.Uses))
|
||||||
|
builder.WriteString(", ")
|
||||||
|
builder.WriteString("expires_at=")
|
||||||
|
builder.WriteString(dt.ExpiresAt.Format(time.ANSIC))
|
||||||
|
builder.WriteByte(')')
|
||||||
|
return builder.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
// DocumentTokens is a parsable slice of DocumentToken.
|
||||||
|
type DocumentTokens []*DocumentToken
|
||||||
|
|
||||||
|
func (dt DocumentTokens) config(cfg config) {
|
||||||
|
for _i := range dt {
|
||||||
|
dt[_i].config = cfg
|
||||||
|
}
|
||||||
|
}
|
85
backend/ent/documenttoken/documenttoken.go
Normal file
85
backend/ent/documenttoken/documenttoken.go
Normal file
|
@ -0,0 +1,85 @@
|
||||||
|
// Code generated by ent, DO NOT EDIT.
|
||||||
|
|
||||||
|
package documenttoken
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
// Label holds the string label denoting the documenttoken type in the database.
|
||||||
|
Label = "document_token"
|
||||||
|
// FieldID holds the string denoting the id field in the database.
|
||||||
|
FieldID = "id"
|
||||||
|
// FieldCreatedAt holds the string denoting the created_at field in the database.
|
||||||
|
FieldCreatedAt = "created_at"
|
||||||
|
// FieldUpdatedAt holds the string denoting the updated_at field in the database.
|
||||||
|
FieldUpdatedAt = "updated_at"
|
||||||
|
// FieldToken holds the string denoting the token field in the database.
|
||||||
|
FieldToken = "token"
|
||||||
|
// FieldUses holds the string denoting the uses field in the database.
|
||||||
|
FieldUses = "uses"
|
||||||
|
// FieldExpiresAt holds the string denoting the expires_at field in the database.
|
||||||
|
FieldExpiresAt = "expires_at"
|
||||||
|
// EdgeDocument holds the string denoting the document edge name in mutations.
|
||||||
|
EdgeDocument = "document"
|
||||||
|
// Table holds the table name of the documenttoken in the database.
|
||||||
|
Table = "document_tokens"
|
||||||
|
// DocumentTable is the table that holds the document relation/edge.
|
||||||
|
DocumentTable = "document_tokens"
|
||||||
|
// DocumentInverseTable is the table name for the Document entity.
|
||||||
|
// It exists in this package in order to avoid circular dependency with the "document" package.
|
||||||
|
DocumentInverseTable = "documents"
|
||||||
|
// DocumentColumn is the table column denoting the document relation/edge.
|
||||||
|
DocumentColumn = "document_document_tokens"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Columns holds all SQL columns for documenttoken fields.
|
||||||
|
var Columns = []string{
|
||||||
|
FieldID,
|
||||||
|
FieldCreatedAt,
|
||||||
|
FieldUpdatedAt,
|
||||||
|
FieldToken,
|
||||||
|
FieldUses,
|
||||||
|
FieldExpiresAt,
|
||||||
|
}
|
||||||
|
|
||||||
|
// ForeignKeys holds the SQL foreign-keys that are owned by the "document_tokens"
|
||||||
|
// table and are not defined as standalone fields in the schema.
|
||||||
|
var ForeignKeys = []string{
|
||||||
|
"document_document_tokens",
|
||||||
|
}
|
||||||
|
|
||||||
|
// ValidColumn reports if the column name is valid (part of the table columns).
|
||||||
|
func ValidColumn(column string) bool {
|
||||||
|
for i := range Columns {
|
||||||
|
if column == Columns[i] {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for i := range ForeignKeys {
|
||||||
|
if column == ForeignKeys[i] {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
// DefaultCreatedAt holds the default value on creation for the "created_at" field.
|
||||||
|
DefaultCreatedAt func() time.Time
|
||||||
|
// DefaultUpdatedAt holds the default value on creation for the "updated_at" field.
|
||||||
|
DefaultUpdatedAt func() time.Time
|
||||||
|
// UpdateDefaultUpdatedAt holds the default value on update for the "updated_at" field.
|
||||||
|
UpdateDefaultUpdatedAt func() time.Time
|
||||||
|
// TokenValidator is a validator for the "token" field. It is called by the builders before save.
|
||||||
|
TokenValidator func([]byte) error
|
||||||
|
// DefaultUses holds the default value on creation for the "uses" field.
|
||||||
|
DefaultUses int
|
||||||
|
// DefaultExpiresAt holds the default value on creation for the "expires_at" field.
|
||||||
|
DefaultExpiresAt func() time.Time
|
||||||
|
// DefaultID holds the default value on creation for the "id" field.
|
||||||
|
DefaultID func() uuid.UUID
|
||||||
|
)
|
498
backend/ent/documenttoken/where.go
Normal file
498
backend/ent/documenttoken/where.go
Normal file
|
@ -0,0 +1,498 @@
|
||||||
|
// Code generated by ent, DO NOT EDIT.
|
||||||
|
|
||||||
|
package documenttoken
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"entgo.io/ent/dialect/sql"
|
||||||
|
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/hay-kot/content/backend/ent/predicate"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ID filters vertices based on their ID field.
|
||||||
|
func ID(id uuid.UUID) predicate.DocumentToken {
|
||||||
|
return predicate.DocumentToken(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.EQ(s.C(FieldID), id))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDEQ applies the EQ predicate on the ID field.
|
||||||
|
func IDEQ(id uuid.UUID) predicate.DocumentToken {
|
||||||
|
return predicate.DocumentToken(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.EQ(s.C(FieldID), id))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDNEQ applies the NEQ predicate on the ID field.
|
||||||
|
func IDNEQ(id uuid.UUID) predicate.DocumentToken {
|
||||||
|
return predicate.DocumentToken(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.NEQ(s.C(FieldID), id))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDIn applies the In predicate on the ID field.
|
||||||
|
func IDIn(ids ...uuid.UUID) predicate.DocumentToken {
|
||||||
|
return predicate.DocumentToken(func(s *sql.Selector) {
|
||||||
|
v := make([]interface{}, len(ids))
|
||||||
|
for i := range v {
|
||||||
|
v[i] = ids[i]
|
||||||
|
}
|
||||||
|
s.Where(sql.In(s.C(FieldID), v...))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDNotIn applies the NotIn predicate on the ID field.
|
||||||
|
func IDNotIn(ids ...uuid.UUID) predicate.DocumentToken {
|
||||||
|
return predicate.DocumentToken(func(s *sql.Selector) {
|
||||||
|
v := make([]interface{}, len(ids))
|
||||||
|
for i := range v {
|
||||||
|
v[i] = ids[i]
|
||||||
|
}
|
||||||
|
s.Where(sql.NotIn(s.C(FieldID), v...))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDGT applies the GT predicate on the ID field.
|
||||||
|
func IDGT(id uuid.UUID) predicate.DocumentToken {
|
||||||
|
return predicate.DocumentToken(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.GT(s.C(FieldID), id))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDGTE applies the GTE predicate on the ID field.
|
||||||
|
func IDGTE(id uuid.UUID) predicate.DocumentToken {
|
||||||
|
return predicate.DocumentToken(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.GTE(s.C(FieldID), id))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDLT applies the LT predicate on the ID field.
|
||||||
|
func IDLT(id uuid.UUID) predicate.DocumentToken {
|
||||||
|
return predicate.DocumentToken(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.LT(s.C(FieldID), id))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDLTE applies the LTE predicate on the ID field.
|
||||||
|
func IDLTE(id uuid.UUID) predicate.DocumentToken {
|
||||||
|
return predicate.DocumentToken(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.LTE(s.C(FieldID), id))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatedAt applies equality check predicate on the "created_at" field. It's identical to CreatedAtEQ.
|
||||||
|
func CreatedAt(v time.Time) predicate.DocumentToken {
|
||||||
|
return predicate.DocumentToken(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.EQ(s.C(FieldCreatedAt), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdatedAt applies equality check predicate on the "updated_at" field. It's identical to UpdatedAtEQ.
|
||||||
|
func UpdatedAt(v time.Time) predicate.DocumentToken {
|
||||||
|
return predicate.DocumentToken(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.EQ(s.C(FieldUpdatedAt), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Token applies equality check predicate on the "token" field. It's identical to TokenEQ.
|
||||||
|
func Token(v []byte) predicate.DocumentToken {
|
||||||
|
return predicate.DocumentToken(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.EQ(s.C(FieldToken), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Uses applies equality check predicate on the "uses" field. It's identical to UsesEQ.
|
||||||
|
func Uses(v int) predicate.DocumentToken {
|
||||||
|
return predicate.DocumentToken(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.EQ(s.C(FieldUses), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExpiresAt applies equality check predicate on the "expires_at" field. It's identical to ExpiresAtEQ.
|
||||||
|
func ExpiresAt(v time.Time) predicate.DocumentToken {
|
||||||
|
return predicate.DocumentToken(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.EQ(s.C(FieldExpiresAt), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatedAtEQ applies the EQ predicate on the "created_at" field.
|
||||||
|
func CreatedAtEQ(v time.Time) predicate.DocumentToken {
|
||||||
|
return predicate.DocumentToken(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.EQ(s.C(FieldCreatedAt), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatedAtNEQ applies the NEQ predicate on the "created_at" field.
|
||||||
|
func CreatedAtNEQ(v time.Time) predicate.DocumentToken {
|
||||||
|
return predicate.DocumentToken(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.NEQ(s.C(FieldCreatedAt), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatedAtIn applies the In predicate on the "created_at" field.
|
||||||
|
func CreatedAtIn(vs ...time.Time) predicate.DocumentToken {
|
||||||
|
v := make([]interface{}, len(vs))
|
||||||
|
for i := range v {
|
||||||
|
v[i] = vs[i]
|
||||||
|
}
|
||||||
|
return predicate.DocumentToken(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.In(s.C(FieldCreatedAt), v...))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatedAtNotIn applies the NotIn predicate on the "created_at" field.
|
||||||
|
func CreatedAtNotIn(vs ...time.Time) predicate.DocumentToken {
|
||||||
|
v := make([]interface{}, len(vs))
|
||||||
|
for i := range v {
|
||||||
|
v[i] = vs[i]
|
||||||
|
}
|
||||||
|
return predicate.DocumentToken(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.NotIn(s.C(FieldCreatedAt), v...))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatedAtGT applies the GT predicate on the "created_at" field.
|
||||||
|
func CreatedAtGT(v time.Time) predicate.DocumentToken {
|
||||||
|
return predicate.DocumentToken(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.GT(s.C(FieldCreatedAt), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatedAtGTE applies the GTE predicate on the "created_at" field.
|
||||||
|
func CreatedAtGTE(v time.Time) predicate.DocumentToken {
|
||||||
|
return predicate.DocumentToken(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.GTE(s.C(FieldCreatedAt), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatedAtLT applies the LT predicate on the "created_at" field.
|
||||||
|
func CreatedAtLT(v time.Time) predicate.DocumentToken {
|
||||||
|
return predicate.DocumentToken(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.LT(s.C(FieldCreatedAt), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatedAtLTE applies the LTE predicate on the "created_at" field.
|
||||||
|
func CreatedAtLTE(v time.Time) predicate.DocumentToken {
|
||||||
|
return predicate.DocumentToken(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.LTE(s.C(FieldCreatedAt), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdatedAtEQ applies the EQ predicate on the "updated_at" field.
|
||||||
|
func UpdatedAtEQ(v time.Time) predicate.DocumentToken {
|
||||||
|
return predicate.DocumentToken(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.EQ(s.C(FieldUpdatedAt), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdatedAtNEQ applies the NEQ predicate on the "updated_at" field.
|
||||||
|
func UpdatedAtNEQ(v time.Time) predicate.DocumentToken {
|
||||||
|
return predicate.DocumentToken(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.NEQ(s.C(FieldUpdatedAt), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdatedAtIn applies the In predicate on the "updated_at" field.
|
||||||
|
func UpdatedAtIn(vs ...time.Time) predicate.DocumentToken {
|
||||||
|
v := make([]interface{}, len(vs))
|
||||||
|
for i := range v {
|
||||||
|
v[i] = vs[i]
|
||||||
|
}
|
||||||
|
return predicate.DocumentToken(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.In(s.C(FieldUpdatedAt), v...))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdatedAtNotIn applies the NotIn predicate on the "updated_at" field.
|
||||||
|
func UpdatedAtNotIn(vs ...time.Time) predicate.DocumentToken {
|
||||||
|
v := make([]interface{}, len(vs))
|
||||||
|
for i := range v {
|
||||||
|
v[i] = vs[i]
|
||||||
|
}
|
||||||
|
return predicate.DocumentToken(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.NotIn(s.C(FieldUpdatedAt), v...))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdatedAtGT applies the GT predicate on the "updated_at" field.
|
||||||
|
func UpdatedAtGT(v time.Time) predicate.DocumentToken {
|
||||||
|
return predicate.DocumentToken(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.GT(s.C(FieldUpdatedAt), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdatedAtGTE applies the GTE predicate on the "updated_at" field.
|
||||||
|
func UpdatedAtGTE(v time.Time) predicate.DocumentToken {
|
||||||
|
return predicate.DocumentToken(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.GTE(s.C(FieldUpdatedAt), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdatedAtLT applies the LT predicate on the "updated_at" field.
|
||||||
|
func UpdatedAtLT(v time.Time) predicate.DocumentToken {
|
||||||
|
return predicate.DocumentToken(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.LT(s.C(FieldUpdatedAt), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdatedAtLTE applies the LTE predicate on the "updated_at" field.
|
||||||
|
func UpdatedAtLTE(v time.Time) predicate.DocumentToken {
|
||||||
|
return predicate.DocumentToken(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.LTE(s.C(FieldUpdatedAt), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// TokenEQ applies the EQ predicate on the "token" field.
|
||||||
|
func TokenEQ(v []byte) predicate.DocumentToken {
|
||||||
|
return predicate.DocumentToken(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.EQ(s.C(FieldToken), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// TokenNEQ applies the NEQ predicate on the "token" field.
|
||||||
|
func TokenNEQ(v []byte) predicate.DocumentToken {
|
||||||
|
return predicate.DocumentToken(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.NEQ(s.C(FieldToken), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// TokenIn applies the In predicate on the "token" field.
|
||||||
|
func TokenIn(vs ...[]byte) predicate.DocumentToken {
|
||||||
|
v := make([]interface{}, len(vs))
|
||||||
|
for i := range v {
|
||||||
|
v[i] = vs[i]
|
||||||
|
}
|
||||||
|
return predicate.DocumentToken(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.In(s.C(FieldToken), v...))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// TokenNotIn applies the NotIn predicate on the "token" field.
|
||||||
|
func TokenNotIn(vs ...[]byte) predicate.DocumentToken {
|
||||||
|
v := make([]interface{}, len(vs))
|
||||||
|
for i := range v {
|
||||||
|
v[i] = vs[i]
|
||||||
|
}
|
||||||
|
return predicate.DocumentToken(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.NotIn(s.C(FieldToken), v...))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// TokenGT applies the GT predicate on the "token" field.
|
||||||
|
func TokenGT(v []byte) predicate.DocumentToken {
|
||||||
|
return predicate.DocumentToken(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.GT(s.C(FieldToken), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// TokenGTE applies the GTE predicate on the "token" field.
|
||||||
|
func TokenGTE(v []byte) predicate.DocumentToken {
|
||||||
|
return predicate.DocumentToken(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.GTE(s.C(FieldToken), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// TokenLT applies the LT predicate on the "token" field.
|
||||||
|
func TokenLT(v []byte) predicate.DocumentToken {
|
||||||
|
return predicate.DocumentToken(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.LT(s.C(FieldToken), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// TokenLTE applies the LTE predicate on the "token" field.
|
||||||
|
func TokenLTE(v []byte) predicate.DocumentToken {
|
||||||
|
return predicate.DocumentToken(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.LTE(s.C(FieldToken), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// UsesEQ applies the EQ predicate on the "uses" field.
|
||||||
|
func UsesEQ(v int) predicate.DocumentToken {
|
||||||
|
return predicate.DocumentToken(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.EQ(s.C(FieldUses), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// UsesNEQ applies the NEQ predicate on the "uses" field.
|
||||||
|
func UsesNEQ(v int) predicate.DocumentToken {
|
||||||
|
return predicate.DocumentToken(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.NEQ(s.C(FieldUses), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// UsesIn applies the In predicate on the "uses" field.
|
||||||
|
func UsesIn(vs ...int) predicate.DocumentToken {
|
||||||
|
v := make([]interface{}, len(vs))
|
||||||
|
for i := range v {
|
||||||
|
v[i] = vs[i]
|
||||||
|
}
|
||||||
|
return predicate.DocumentToken(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.In(s.C(FieldUses), v...))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// UsesNotIn applies the NotIn predicate on the "uses" field.
|
||||||
|
func UsesNotIn(vs ...int) predicate.DocumentToken {
|
||||||
|
v := make([]interface{}, len(vs))
|
||||||
|
for i := range v {
|
||||||
|
v[i] = vs[i]
|
||||||
|
}
|
||||||
|
return predicate.DocumentToken(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.NotIn(s.C(FieldUses), v...))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// UsesGT applies the GT predicate on the "uses" field.
|
||||||
|
func UsesGT(v int) predicate.DocumentToken {
|
||||||
|
return predicate.DocumentToken(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.GT(s.C(FieldUses), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// UsesGTE applies the GTE predicate on the "uses" field.
|
||||||
|
func UsesGTE(v int) predicate.DocumentToken {
|
||||||
|
return predicate.DocumentToken(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.GTE(s.C(FieldUses), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// UsesLT applies the LT predicate on the "uses" field.
|
||||||
|
func UsesLT(v int) predicate.DocumentToken {
|
||||||
|
return predicate.DocumentToken(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.LT(s.C(FieldUses), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// UsesLTE applies the LTE predicate on the "uses" field.
|
||||||
|
func UsesLTE(v int) predicate.DocumentToken {
|
||||||
|
return predicate.DocumentToken(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.LTE(s.C(FieldUses), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExpiresAtEQ applies the EQ predicate on the "expires_at" field.
|
||||||
|
func ExpiresAtEQ(v time.Time) predicate.DocumentToken {
|
||||||
|
return predicate.DocumentToken(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.EQ(s.C(FieldExpiresAt), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExpiresAtNEQ applies the NEQ predicate on the "expires_at" field.
|
||||||
|
func ExpiresAtNEQ(v time.Time) predicate.DocumentToken {
|
||||||
|
return predicate.DocumentToken(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.NEQ(s.C(FieldExpiresAt), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExpiresAtIn applies the In predicate on the "expires_at" field.
|
||||||
|
func ExpiresAtIn(vs ...time.Time) predicate.DocumentToken {
|
||||||
|
v := make([]interface{}, len(vs))
|
||||||
|
for i := range v {
|
||||||
|
v[i] = vs[i]
|
||||||
|
}
|
||||||
|
return predicate.DocumentToken(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.In(s.C(FieldExpiresAt), v...))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExpiresAtNotIn applies the NotIn predicate on the "expires_at" field.
|
||||||
|
func ExpiresAtNotIn(vs ...time.Time) predicate.DocumentToken {
|
||||||
|
v := make([]interface{}, len(vs))
|
||||||
|
for i := range v {
|
||||||
|
v[i] = vs[i]
|
||||||
|
}
|
||||||
|
return predicate.DocumentToken(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.NotIn(s.C(FieldExpiresAt), v...))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExpiresAtGT applies the GT predicate on the "expires_at" field.
|
||||||
|
func ExpiresAtGT(v time.Time) predicate.DocumentToken {
|
||||||
|
return predicate.DocumentToken(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.GT(s.C(FieldExpiresAt), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExpiresAtGTE applies the GTE predicate on the "expires_at" field.
|
||||||
|
func ExpiresAtGTE(v time.Time) predicate.DocumentToken {
|
||||||
|
return predicate.DocumentToken(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.GTE(s.C(FieldExpiresAt), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExpiresAtLT applies the LT predicate on the "expires_at" field.
|
||||||
|
func ExpiresAtLT(v time.Time) predicate.DocumentToken {
|
||||||
|
return predicate.DocumentToken(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.LT(s.C(FieldExpiresAt), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExpiresAtLTE applies the LTE predicate on the "expires_at" field.
|
||||||
|
func ExpiresAtLTE(v time.Time) predicate.DocumentToken {
|
||||||
|
return predicate.DocumentToken(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.LTE(s.C(FieldExpiresAt), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// HasDocument applies the HasEdge predicate on the "document" edge.
|
||||||
|
func HasDocument() predicate.DocumentToken {
|
||||||
|
return predicate.DocumentToken(func(s *sql.Selector) {
|
||||||
|
step := sqlgraph.NewStep(
|
||||||
|
sqlgraph.From(Table, FieldID),
|
||||||
|
sqlgraph.To(DocumentTable, FieldID),
|
||||||
|
sqlgraph.Edge(sqlgraph.M2O, true, DocumentTable, DocumentColumn),
|
||||||
|
)
|
||||||
|
sqlgraph.HasNeighbors(s, step)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// HasDocumentWith applies the HasEdge predicate on the "document" edge with a given conditions (other predicates).
|
||||||
|
func HasDocumentWith(preds ...predicate.Document) predicate.DocumentToken {
|
||||||
|
return predicate.DocumentToken(func(s *sql.Selector) {
|
||||||
|
step := sqlgraph.NewStep(
|
||||||
|
sqlgraph.From(Table, FieldID),
|
||||||
|
sqlgraph.To(DocumentInverseTable, FieldID),
|
||||||
|
sqlgraph.Edge(sqlgraph.M2O, true, DocumentTable, DocumentColumn),
|
||||||
|
)
|
||||||
|
sqlgraph.HasNeighborsWith(s, step, func(s *sql.Selector) {
|
||||||
|
for _, p := range preds {
|
||||||
|
p(s)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// And groups predicates with the AND operator between them.
|
||||||
|
func And(predicates ...predicate.DocumentToken) predicate.DocumentToken {
|
||||||
|
return predicate.DocumentToken(func(s *sql.Selector) {
|
||||||
|
s1 := s.Clone().SetP(nil)
|
||||||
|
for _, p := range predicates {
|
||||||
|
p(s1)
|
||||||
|
}
|
||||||
|
s.Where(s1.P())
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Or groups predicates with the OR operator between them.
|
||||||
|
func Or(predicates ...predicate.DocumentToken) predicate.DocumentToken {
|
||||||
|
return predicate.DocumentToken(func(s *sql.Selector) {
|
||||||
|
s1 := s.Clone().SetP(nil)
|
||||||
|
for i, p := range predicates {
|
||||||
|
if i > 0 {
|
||||||
|
s1.Or()
|
||||||
|
}
|
||||||
|
p(s1)
|
||||||
|
}
|
||||||
|
s.Where(s1.P())
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Not applies the not operator on the given predicate.
|
||||||
|
func Not(p predicate.DocumentToken) predicate.DocumentToken {
|
||||||
|
return predicate.DocumentToken(func(s *sql.Selector) {
|
||||||
|
p(s.Not())
|
||||||
|
})
|
||||||
|
}
|
418
backend/ent/documenttoken_create.go
Normal file
418
backend/ent/documenttoken_create.go
Normal file
|
@ -0,0 +1,418 @@
|
||||||
|
// Code generated by ent, DO NOT EDIT.
|
||||||
|
|
||||||
|
package ent
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||||
|
"entgo.io/ent/schema/field"
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/hay-kot/content/backend/ent/document"
|
||||||
|
"github.com/hay-kot/content/backend/ent/documenttoken"
|
||||||
|
)
|
||||||
|
|
||||||
|
// DocumentTokenCreate is the builder for creating a DocumentToken entity.
|
||||||
|
type DocumentTokenCreate struct {
|
||||||
|
config
|
||||||
|
mutation *DocumentTokenMutation
|
||||||
|
hooks []Hook
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetCreatedAt sets the "created_at" field.
|
||||||
|
func (dtc *DocumentTokenCreate) SetCreatedAt(t time.Time) *DocumentTokenCreate {
|
||||||
|
dtc.mutation.SetCreatedAt(t)
|
||||||
|
return dtc
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableCreatedAt sets the "created_at" field if the given value is not nil.
|
||||||
|
func (dtc *DocumentTokenCreate) SetNillableCreatedAt(t *time.Time) *DocumentTokenCreate {
|
||||||
|
if t != nil {
|
||||||
|
dtc.SetCreatedAt(*t)
|
||||||
|
}
|
||||||
|
return dtc
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetUpdatedAt sets the "updated_at" field.
|
||||||
|
func (dtc *DocumentTokenCreate) SetUpdatedAt(t time.Time) *DocumentTokenCreate {
|
||||||
|
dtc.mutation.SetUpdatedAt(t)
|
||||||
|
return dtc
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableUpdatedAt sets the "updated_at" field if the given value is not nil.
|
||||||
|
func (dtc *DocumentTokenCreate) SetNillableUpdatedAt(t *time.Time) *DocumentTokenCreate {
|
||||||
|
if t != nil {
|
||||||
|
dtc.SetUpdatedAt(*t)
|
||||||
|
}
|
||||||
|
return dtc
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetToken sets the "token" field.
|
||||||
|
func (dtc *DocumentTokenCreate) SetToken(b []byte) *DocumentTokenCreate {
|
||||||
|
dtc.mutation.SetToken(b)
|
||||||
|
return dtc
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetUses sets the "uses" field.
|
||||||
|
func (dtc *DocumentTokenCreate) SetUses(i int) *DocumentTokenCreate {
|
||||||
|
dtc.mutation.SetUses(i)
|
||||||
|
return dtc
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableUses sets the "uses" field if the given value is not nil.
|
||||||
|
func (dtc *DocumentTokenCreate) SetNillableUses(i *int) *DocumentTokenCreate {
|
||||||
|
if i != nil {
|
||||||
|
dtc.SetUses(*i)
|
||||||
|
}
|
||||||
|
return dtc
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetExpiresAt sets the "expires_at" field.
|
||||||
|
func (dtc *DocumentTokenCreate) SetExpiresAt(t time.Time) *DocumentTokenCreate {
|
||||||
|
dtc.mutation.SetExpiresAt(t)
|
||||||
|
return dtc
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableExpiresAt sets the "expires_at" field if the given value is not nil.
|
||||||
|
func (dtc *DocumentTokenCreate) SetNillableExpiresAt(t *time.Time) *DocumentTokenCreate {
|
||||||
|
if t != nil {
|
||||||
|
dtc.SetExpiresAt(*t)
|
||||||
|
}
|
||||||
|
return dtc
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetID sets the "id" field.
|
||||||
|
func (dtc *DocumentTokenCreate) SetID(u uuid.UUID) *DocumentTokenCreate {
|
||||||
|
dtc.mutation.SetID(u)
|
||||||
|
return dtc
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableID sets the "id" field if the given value is not nil.
|
||||||
|
func (dtc *DocumentTokenCreate) SetNillableID(u *uuid.UUID) *DocumentTokenCreate {
|
||||||
|
if u != nil {
|
||||||
|
dtc.SetID(*u)
|
||||||
|
}
|
||||||
|
return dtc
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetDocumentID sets the "document" edge to the Document entity by ID.
|
||||||
|
func (dtc *DocumentTokenCreate) SetDocumentID(id uuid.UUID) *DocumentTokenCreate {
|
||||||
|
dtc.mutation.SetDocumentID(id)
|
||||||
|
return dtc
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableDocumentID sets the "document" edge to the Document entity by ID if the given value is not nil.
|
||||||
|
func (dtc *DocumentTokenCreate) SetNillableDocumentID(id *uuid.UUID) *DocumentTokenCreate {
|
||||||
|
if id != nil {
|
||||||
|
dtc = dtc.SetDocumentID(*id)
|
||||||
|
}
|
||||||
|
return dtc
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetDocument sets the "document" edge to the Document entity.
|
||||||
|
func (dtc *DocumentTokenCreate) SetDocument(d *Document) *DocumentTokenCreate {
|
||||||
|
return dtc.SetDocumentID(d.ID)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Mutation returns the DocumentTokenMutation object of the builder.
|
||||||
|
func (dtc *DocumentTokenCreate) Mutation() *DocumentTokenMutation {
|
||||||
|
return dtc.mutation
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save creates the DocumentToken in the database.
|
||||||
|
func (dtc *DocumentTokenCreate) Save(ctx context.Context) (*DocumentToken, error) {
|
||||||
|
var (
|
||||||
|
err error
|
||||||
|
node *DocumentToken
|
||||||
|
)
|
||||||
|
dtc.defaults()
|
||||||
|
if len(dtc.hooks) == 0 {
|
||||||
|
if err = dtc.check(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
node, err = dtc.sqlSave(ctx)
|
||||||
|
} else {
|
||||||
|
var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) {
|
||||||
|
mutation, ok := m.(*DocumentTokenMutation)
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("unexpected mutation type %T", m)
|
||||||
|
}
|
||||||
|
if err = dtc.check(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
dtc.mutation = mutation
|
||||||
|
if node, err = dtc.sqlSave(ctx); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
mutation.id = &node.ID
|
||||||
|
mutation.done = true
|
||||||
|
return node, err
|
||||||
|
})
|
||||||
|
for i := len(dtc.hooks) - 1; i >= 0; i-- {
|
||||||
|
if dtc.hooks[i] == nil {
|
||||||
|
return nil, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)")
|
||||||
|
}
|
||||||
|
mut = dtc.hooks[i](mut)
|
||||||
|
}
|
||||||
|
v, err := mut.Mutate(ctx, dtc.mutation)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
nv, ok := v.(*DocumentToken)
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("unexpected node type %T returned from DocumentTokenMutation", v)
|
||||||
|
}
|
||||||
|
node = nv
|
||||||
|
}
|
||||||
|
return node, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// SaveX calls Save and panics if Save returns an error.
|
||||||
|
func (dtc *DocumentTokenCreate) SaveX(ctx context.Context) *DocumentToken {
|
||||||
|
v, err := dtc.Save(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return v
|
||||||
|
}
|
||||||
|
|
||||||
|
// Exec executes the query.
|
||||||
|
func (dtc *DocumentTokenCreate) Exec(ctx context.Context) error {
|
||||||
|
_, err := dtc.Save(ctx)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExecX is like Exec, but panics if an error occurs.
|
||||||
|
func (dtc *DocumentTokenCreate) ExecX(ctx context.Context) {
|
||||||
|
if err := dtc.Exec(ctx); err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// defaults sets the default values of the builder before save.
|
||||||
|
func (dtc *DocumentTokenCreate) defaults() {
|
||||||
|
if _, ok := dtc.mutation.CreatedAt(); !ok {
|
||||||
|
v := documenttoken.DefaultCreatedAt()
|
||||||
|
dtc.mutation.SetCreatedAt(v)
|
||||||
|
}
|
||||||
|
if _, ok := dtc.mutation.UpdatedAt(); !ok {
|
||||||
|
v := documenttoken.DefaultUpdatedAt()
|
||||||
|
dtc.mutation.SetUpdatedAt(v)
|
||||||
|
}
|
||||||
|
if _, ok := dtc.mutation.Uses(); !ok {
|
||||||
|
v := documenttoken.DefaultUses
|
||||||
|
dtc.mutation.SetUses(v)
|
||||||
|
}
|
||||||
|
if _, ok := dtc.mutation.ExpiresAt(); !ok {
|
||||||
|
v := documenttoken.DefaultExpiresAt()
|
||||||
|
dtc.mutation.SetExpiresAt(v)
|
||||||
|
}
|
||||||
|
if _, ok := dtc.mutation.ID(); !ok {
|
||||||
|
v := documenttoken.DefaultID()
|
||||||
|
dtc.mutation.SetID(v)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// check runs all checks and user-defined validators on the builder.
|
||||||
|
func (dtc *DocumentTokenCreate) check() error {
|
||||||
|
if _, ok := dtc.mutation.CreatedAt(); !ok {
|
||||||
|
return &ValidationError{Name: "created_at", err: errors.New(`ent: missing required field "DocumentToken.created_at"`)}
|
||||||
|
}
|
||||||
|
if _, ok := dtc.mutation.UpdatedAt(); !ok {
|
||||||
|
return &ValidationError{Name: "updated_at", err: errors.New(`ent: missing required field "DocumentToken.updated_at"`)}
|
||||||
|
}
|
||||||
|
if _, ok := dtc.mutation.Token(); !ok {
|
||||||
|
return &ValidationError{Name: "token", err: errors.New(`ent: missing required field "DocumentToken.token"`)}
|
||||||
|
}
|
||||||
|
if v, ok := dtc.mutation.Token(); ok {
|
||||||
|
if err := documenttoken.TokenValidator(v); err != nil {
|
||||||
|
return &ValidationError{Name: "token", err: fmt.Errorf(`ent: validator failed for field "DocumentToken.token": %w`, err)}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if _, ok := dtc.mutation.Uses(); !ok {
|
||||||
|
return &ValidationError{Name: "uses", err: errors.New(`ent: missing required field "DocumentToken.uses"`)}
|
||||||
|
}
|
||||||
|
if _, ok := dtc.mutation.ExpiresAt(); !ok {
|
||||||
|
return &ValidationError{Name: "expires_at", err: errors.New(`ent: missing required field "DocumentToken.expires_at"`)}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (dtc *DocumentTokenCreate) sqlSave(ctx context.Context) (*DocumentToken, error) {
|
||||||
|
_node, _spec := dtc.createSpec()
|
||||||
|
if err := sqlgraph.CreateNode(ctx, dtc.driver, _spec); err != nil {
|
||||||
|
if sqlgraph.IsConstraintError(err) {
|
||||||
|
err = &ConstraintError{msg: err.Error(), wrap: err}
|
||||||
|
}
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if _spec.ID.Value != nil {
|
||||||
|
if id, ok := _spec.ID.Value.(*uuid.UUID); ok {
|
||||||
|
_node.ID = *id
|
||||||
|
} else if err := _node.ID.Scan(_spec.ID.Value); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return _node, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (dtc *DocumentTokenCreate) createSpec() (*DocumentToken, *sqlgraph.CreateSpec) {
|
||||||
|
var (
|
||||||
|
_node = &DocumentToken{config: dtc.config}
|
||||||
|
_spec = &sqlgraph.CreateSpec{
|
||||||
|
Table: documenttoken.Table,
|
||||||
|
ID: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: documenttoken.FieldID,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
if id, ok := dtc.mutation.ID(); ok {
|
||||||
|
_node.ID = id
|
||||||
|
_spec.ID.Value = &id
|
||||||
|
}
|
||||||
|
if value, ok := dtc.mutation.CreatedAt(); ok {
|
||||||
|
_spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeTime,
|
||||||
|
Value: value,
|
||||||
|
Column: documenttoken.FieldCreatedAt,
|
||||||
|
})
|
||||||
|
_node.CreatedAt = value
|
||||||
|
}
|
||||||
|
if value, ok := dtc.mutation.UpdatedAt(); ok {
|
||||||
|
_spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeTime,
|
||||||
|
Value: value,
|
||||||
|
Column: documenttoken.FieldUpdatedAt,
|
||||||
|
})
|
||||||
|
_node.UpdatedAt = value
|
||||||
|
}
|
||||||
|
if value, ok := dtc.mutation.Token(); ok {
|
||||||
|
_spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeBytes,
|
||||||
|
Value: value,
|
||||||
|
Column: documenttoken.FieldToken,
|
||||||
|
})
|
||||||
|
_node.Token = value
|
||||||
|
}
|
||||||
|
if value, ok := dtc.mutation.Uses(); ok {
|
||||||
|
_spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeInt,
|
||||||
|
Value: value,
|
||||||
|
Column: documenttoken.FieldUses,
|
||||||
|
})
|
||||||
|
_node.Uses = value
|
||||||
|
}
|
||||||
|
if value, ok := dtc.mutation.ExpiresAt(); ok {
|
||||||
|
_spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeTime,
|
||||||
|
Value: value,
|
||||||
|
Column: documenttoken.FieldExpiresAt,
|
||||||
|
})
|
||||||
|
_node.ExpiresAt = value
|
||||||
|
}
|
||||||
|
if nodes := dtc.mutation.DocumentIDs(); len(nodes) > 0 {
|
||||||
|
edge := &sqlgraph.EdgeSpec{
|
||||||
|
Rel: sqlgraph.M2O,
|
||||||
|
Inverse: true,
|
||||||
|
Table: documenttoken.DocumentTable,
|
||||||
|
Columns: []string{documenttoken.DocumentColumn},
|
||||||
|
Bidi: false,
|
||||||
|
Target: &sqlgraph.EdgeTarget{
|
||||||
|
IDSpec: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: document.FieldID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, k := range nodes {
|
||||||
|
edge.Target.Nodes = append(edge.Target.Nodes, k)
|
||||||
|
}
|
||||||
|
_node.document_document_tokens = &nodes[0]
|
||||||
|
_spec.Edges = append(_spec.Edges, edge)
|
||||||
|
}
|
||||||
|
return _node, _spec
|
||||||
|
}
|
||||||
|
|
||||||
|
// DocumentTokenCreateBulk is the builder for creating many DocumentToken entities in bulk.
|
||||||
|
type DocumentTokenCreateBulk struct {
|
||||||
|
config
|
||||||
|
builders []*DocumentTokenCreate
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save creates the DocumentToken entities in the database.
|
||||||
|
func (dtcb *DocumentTokenCreateBulk) Save(ctx context.Context) ([]*DocumentToken, error) {
|
||||||
|
specs := make([]*sqlgraph.CreateSpec, len(dtcb.builders))
|
||||||
|
nodes := make([]*DocumentToken, len(dtcb.builders))
|
||||||
|
mutators := make([]Mutator, len(dtcb.builders))
|
||||||
|
for i := range dtcb.builders {
|
||||||
|
func(i int, root context.Context) {
|
||||||
|
builder := dtcb.builders[i]
|
||||||
|
builder.defaults()
|
||||||
|
var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) {
|
||||||
|
mutation, ok := m.(*DocumentTokenMutation)
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("unexpected mutation type %T", m)
|
||||||
|
}
|
||||||
|
if err := builder.check(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
builder.mutation = mutation
|
||||||
|
nodes[i], specs[i] = builder.createSpec()
|
||||||
|
var err error
|
||||||
|
if i < len(mutators)-1 {
|
||||||
|
_, err = mutators[i+1].Mutate(root, dtcb.builders[i+1].mutation)
|
||||||
|
} else {
|
||||||
|
spec := &sqlgraph.BatchCreateSpec{Nodes: specs}
|
||||||
|
// Invoke the actual operation on the latest mutation in the chain.
|
||||||
|
if err = sqlgraph.BatchCreate(ctx, dtcb.driver, spec); err != nil {
|
||||||
|
if sqlgraph.IsConstraintError(err) {
|
||||||
|
err = &ConstraintError{msg: err.Error(), wrap: err}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
mutation.id = &nodes[i].ID
|
||||||
|
mutation.done = true
|
||||||
|
return nodes[i], nil
|
||||||
|
})
|
||||||
|
for i := len(builder.hooks) - 1; i >= 0; i-- {
|
||||||
|
mut = builder.hooks[i](mut)
|
||||||
|
}
|
||||||
|
mutators[i] = mut
|
||||||
|
}(i, ctx)
|
||||||
|
}
|
||||||
|
if len(mutators) > 0 {
|
||||||
|
if _, err := mutators[0].Mutate(ctx, dtcb.builders[0].mutation); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nodes, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// SaveX is like Save, but panics if an error occurs.
|
||||||
|
func (dtcb *DocumentTokenCreateBulk) SaveX(ctx context.Context) []*DocumentToken {
|
||||||
|
v, err := dtcb.Save(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return v
|
||||||
|
}
|
||||||
|
|
||||||
|
// Exec executes the query.
|
||||||
|
func (dtcb *DocumentTokenCreateBulk) Exec(ctx context.Context) error {
|
||||||
|
_, err := dtcb.Save(ctx)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExecX is like Exec, but panics if an error occurs.
|
||||||
|
func (dtcb *DocumentTokenCreateBulk) ExecX(ctx context.Context) {
|
||||||
|
if err := dtcb.Exec(ctx); err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
115
backend/ent/documenttoken_delete.go
Normal file
115
backend/ent/documenttoken_delete.go
Normal file
|
@ -0,0 +1,115 @@
|
||||||
|
// Code generated by ent, DO NOT EDIT.
|
||||||
|
|
||||||
|
package ent
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"entgo.io/ent/dialect/sql"
|
||||||
|
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||||
|
"entgo.io/ent/schema/field"
|
||||||
|
"github.com/hay-kot/content/backend/ent/documenttoken"
|
||||||
|
"github.com/hay-kot/content/backend/ent/predicate"
|
||||||
|
)
|
||||||
|
|
||||||
|
// DocumentTokenDelete is the builder for deleting a DocumentToken entity.
|
||||||
|
type DocumentTokenDelete struct {
|
||||||
|
config
|
||||||
|
hooks []Hook
|
||||||
|
mutation *DocumentTokenMutation
|
||||||
|
}
|
||||||
|
|
||||||
|
// Where appends a list predicates to the DocumentTokenDelete builder.
|
||||||
|
func (dtd *DocumentTokenDelete) Where(ps ...predicate.DocumentToken) *DocumentTokenDelete {
|
||||||
|
dtd.mutation.Where(ps...)
|
||||||
|
return dtd
|
||||||
|
}
|
||||||
|
|
||||||
|
// Exec executes the deletion query and returns how many vertices were deleted.
|
||||||
|
func (dtd *DocumentTokenDelete) Exec(ctx context.Context) (int, error) {
|
||||||
|
var (
|
||||||
|
err error
|
||||||
|
affected int
|
||||||
|
)
|
||||||
|
if len(dtd.hooks) == 0 {
|
||||||
|
affected, err = dtd.sqlExec(ctx)
|
||||||
|
} else {
|
||||||
|
var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) {
|
||||||
|
mutation, ok := m.(*DocumentTokenMutation)
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("unexpected mutation type %T", m)
|
||||||
|
}
|
||||||
|
dtd.mutation = mutation
|
||||||
|
affected, err = dtd.sqlExec(ctx)
|
||||||
|
mutation.done = true
|
||||||
|
return affected, err
|
||||||
|
})
|
||||||
|
for i := len(dtd.hooks) - 1; i >= 0; i-- {
|
||||||
|
if dtd.hooks[i] == nil {
|
||||||
|
return 0, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)")
|
||||||
|
}
|
||||||
|
mut = dtd.hooks[i](mut)
|
||||||
|
}
|
||||||
|
if _, err := mut.Mutate(ctx, dtd.mutation); err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return affected, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExecX is like Exec, but panics if an error occurs.
|
||||||
|
func (dtd *DocumentTokenDelete) ExecX(ctx context.Context) int {
|
||||||
|
n, err := dtd.Exec(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return n
|
||||||
|
}
|
||||||
|
|
||||||
|
func (dtd *DocumentTokenDelete) sqlExec(ctx context.Context) (int, error) {
|
||||||
|
_spec := &sqlgraph.DeleteSpec{
|
||||||
|
Node: &sqlgraph.NodeSpec{
|
||||||
|
Table: documenttoken.Table,
|
||||||
|
ID: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: documenttoken.FieldID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
if ps := dtd.mutation.predicates; len(ps) > 0 {
|
||||||
|
_spec.Predicate = func(selector *sql.Selector) {
|
||||||
|
for i := range ps {
|
||||||
|
ps[i](selector)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
affected, err := sqlgraph.DeleteNodes(ctx, dtd.driver, _spec)
|
||||||
|
if err != nil && sqlgraph.IsConstraintError(err) {
|
||||||
|
err = &ConstraintError{msg: err.Error(), wrap: err}
|
||||||
|
}
|
||||||
|
return affected, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// DocumentTokenDeleteOne is the builder for deleting a single DocumentToken entity.
|
||||||
|
type DocumentTokenDeleteOne struct {
|
||||||
|
dtd *DocumentTokenDelete
|
||||||
|
}
|
||||||
|
|
||||||
|
// Exec executes the deletion query.
|
||||||
|
func (dtdo *DocumentTokenDeleteOne) Exec(ctx context.Context) error {
|
||||||
|
n, err := dtdo.dtd.Exec(ctx)
|
||||||
|
switch {
|
||||||
|
case err != nil:
|
||||||
|
return err
|
||||||
|
case n == 0:
|
||||||
|
return &NotFoundError{documenttoken.Label}
|
||||||
|
default:
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExecX is like Exec, but panics if an error occurs.
|
||||||
|
func (dtdo *DocumentTokenDeleteOne) ExecX(ctx context.Context) {
|
||||||
|
dtdo.dtd.ExecX(ctx)
|
||||||
|
}
|
611
backend/ent/documenttoken_query.go
Normal file
611
backend/ent/documenttoken_query.go
Normal file
|
@ -0,0 +1,611 @@
|
||||||
|
// Code generated by ent, DO NOT EDIT.
|
||||||
|
|
||||||
|
package ent
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"math"
|
||||||
|
|
||||||
|
"entgo.io/ent/dialect/sql"
|
||||||
|
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||||
|
"entgo.io/ent/schema/field"
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/hay-kot/content/backend/ent/document"
|
||||||
|
"github.com/hay-kot/content/backend/ent/documenttoken"
|
||||||
|
"github.com/hay-kot/content/backend/ent/predicate"
|
||||||
|
)
|
||||||
|
|
||||||
|
// DocumentTokenQuery is the builder for querying DocumentToken entities.
|
||||||
|
type DocumentTokenQuery struct {
|
||||||
|
config
|
||||||
|
limit *int
|
||||||
|
offset *int
|
||||||
|
unique *bool
|
||||||
|
order []OrderFunc
|
||||||
|
fields []string
|
||||||
|
predicates []predicate.DocumentToken
|
||||||
|
withDocument *DocumentQuery
|
||||||
|
withFKs bool
|
||||||
|
// intermediate query (i.e. traversal path).
|
||||||
|
sql *sql.Selector
|
||||||
|
path func(context.Context) (*sql.Selector, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Where adds a new predicate for the DocumentTokenQuery builder.
|
||||||
|
func (dtq *DocumentTokenQuery) Where(ps ...predicate.DocumentToken) *DocumentTokenQuery {
|
||||||
|
dtq.predicates = append(dtq.predicates, ps...)
|
||||||
|
return dtq
|
||||||
|
}
|
||||||
|
|
||||||
|
// Limit adds a limit step to the query.
|
||||||
|
func (dtq *DocumentTokenQuery) Limit(limit int) *DocumentTokenQuery {
|
||||||
|
dtq.limit = &limit
|
||||||
|
return dtq
|
||||||
|
}
|
||||||
|
|
||||||
|
// Offset adds an offset step to the query.
|
||||||
|
func (dtq *DocumentTokenQuery) Offset(offset int) *DocumentTokenQuery {
|
||||||
|
dtq.offset = &offset
|
||||||
|
return dtq
|
||||||
|
}
|
||||||
|
|
||||||
|
// Unique configures the query builder to filter duplicate records on query.
|
||||||
|
// By default, unique is set to true, and can be disabled using this method.
|
||||||
|
func (dtq *DocumentTokenQuery) Unique(unique bool) *DocumentTokenQuery {
|
||||||
|
dtq.unique = &unique
|
||||||
|
return dtq
|
||||||
|
}
|
||||||
|
|
||||||
|
// Order adds an order step to the query.
|
||||||
|
func (dtq *DocumentTokenQuery) Order(o ...OrderFunc) *DocumentTokenQuery {
|
||||||
|
dtq.order = append(dtq.order, o...)
|
||||||
|
return dtq
|
||||||
|
}
|
||||||
|
|
||||||
|
// QueryDocument chains the current query on the "document" edge.
|
||||||
|
func (dtq *DocumentTokenQuery) QueryDocument() *DocumentQuery {
|
||||||
|
query := &DocumentQuery{config: dtq.config}
|
||||||
|
query.path = func(ctx context.Context) (fromU *sql.Selector, err error) {
|
||||||
|
if err := dtq.prepareQuery(ctx); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
selector := dtq.sqlQuery(ctx)
|
||||||
|
if err := selector.Err(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
step := sqlgraph.NewStep(
|
||||||
|
sqlgraph.From(documenttoken.Table, documenttoken.FieldID, selector),
|
||||||
|
sqlgraph.To(document.Table, document.FieldID),
|
||||||
|
sqlgraph.Edge(sqlgraph.M2O, true, documenttoken.DocumentTable, documenttoken.DocumentColumn),
|
||||||
|
)
|
||||||
|
fromU = sqlgraph.SetNeighbors(dtq.driver.Dialect(), step)
|
||||||
|
return fromU, nil
|
||||||
|
}
|
||||||
|
return query
|
||||||
|
}
|
||||||
|
|
||||||
|
// First returns the first DocumentToken entity from the query.
|
||||||
|
// Returns a *NotFoundError when no DocumentToken was found.
|
||||||
|
func (dtq *DocumentTokenQuery) First(ctx context.Context) (*DocumentToken, error) {
|
||||||
|
nodes, err := dtq.Limit(1).All(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if len(nodes) == 0 {
|
||||||
|
return nil, &NotFoundError{documenttoken.Label}
|
||||||
|
}
|
||||||
|
return nodes[0], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// FirstX is like First, but panics if an error occurs.
|
||||||
|
func (dtq *DocumentTokenQuery) FirstX(ctx context.Context) *DocumentToken {
|
||||||
|
node, err := dtq.First(ctx)
|
||||||
|
if err != nil && !IsNotFound(err) {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return node
|
||||||
|
}
|
||||||
|
|
||||||
|
// FirstID returns the first DocumentToken ID from the query.
|
||||||
|
// Returns a *NotFoundError when no DocumentToken ID was found.
|
||||||
|
func (dtq *DocumentTokenQuery) FirstID(ctx context.Context) (id uuid.UUID, err error) {
|
||||||
|
var ids []uuid.UUID
|
||||||
|
if ids, err = dtq.Limit(1).IDs(ctx); err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if len(ids) == 0 {
|
||||||
|
err = &NotFoundError{documenttoken.Label}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
return ids[0], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// FirstIDX is like FirstID, but panics if an error occurs.
|
||||||
|
func (dtq *DocumentTokenQuery) FirstIDX(ctx context.Context) uuid.UUID {
|
||||||
|
id, err := dtq.FirstID(ctx)
|
||||||
|
if err != nil && !IsNotFound(err) {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return id
|
||||||
|
}
|
||||||
|
|
||||||
|
// Only returns a single DocumentToken entity found by the query, ensuring it only returns one.
|
||||||
|
// Returns a *NotSingularError when more than one DocumentToken entity is found.
|
||||||
|
// Returns a *NotFoundError when no DocumentToken entities are found.
|
||||||
|
func (dtq *DocumentTokenQuery) Only(ctx context.Context) (*DocumentToken, error) {
|
||||||
|
nodes, err := dtq.Limit(2).All(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
switch len(nodes) {
|
||||||
|
case 1:
|
||||||
|
return nodes[0], nil
|
||||||
|
case 0:
|
||||||
|
return nil, &NotFoundError{documenttoken.Label}
|
||||||
|
default:
|
||||||
|
return nil, &NotSingularError{documenttoken.Label}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// OnlyX is like Only, but panics if an error occurs.
|
||||||
|
func (dtq *DocumentTokenQuery) OnlyX(ctx context.Context) *DocumentToken {
|
||||||
|
node, err := dtq.Only(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return node
|
||||||
|
}
|
||||||
|
|
||||||
|
// OnlyID is like Only, but returns the only DocumentToken ID in the query.
|
||||||
|
// Returns a *NotSingularError when more than one DocumentToken ID is found.
|
||||||
|
// Returns a *NotFoundError when no entities are found.
|
||||||
|
func (dtq *DocumentTokenQuery) OnlyID(ctx context.Context) (id uuid.UUID, err error) {
|
||||||
|
var ids []uuid.UUID
|
||||||
|
if ids, err = dtq.Limit(2).IDs(ctx); err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
switch len(ids) {
|
||||||
|
case 1:
|
||||||
|
id = ids[0]
|
||||||
|
case 0:
|
||||||
|
err = &NotFoundError{documenttoken.Label}
|
||||||
|
default:
|
||||||
|
err = &NotSingularError{documenttoken.Label}
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// OnlyIDX is like OnlyID, but panics if an error occurs.
|
||||||
|
func (dtq *DocumentTokenQuery) OnlyIDX(ctx context.Context) uuid.UUID {
|
||||||
|
id, err := dtq.OnlyID(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return id
|
||||||
|
}
|
||||||
|
|
||||||
|
// All executes the query and returns a list of DocumentTokens.
|
||||||
|
func (dtq *DocumentTokenQuery) All(ctx context.Context) ([]*DocumentToken, error) {
|
||||||
|
if err := dtq.prepareQuery(ctx); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return dtq.sqlAll(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
// AllX is like All, but panics if an error occurs.
|
||||||
|
func (dtq *DocumentTokenQuery) AllX(ctx context.Context) []*DocumentToken {
|
||||||
|
nodes, err := dtq.All(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return nodes
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDs executes the query and returns a list of DocumentToken IDs.
|
||||||
|
func (dtq *DocumentTokenQuery) IDs(ctx context.Context) ([]uuid.UUID, error) {
|
||||||
|
var ids []uuid.UUID
|
||||||
|
if err := dtq.Select(documenttoken.FieldID).Scan(ctx, &ids); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return ids, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDsX is like IDs, but panics if an error occurs.
|
||||||
|
func (dtq *DocumentTokenQuery) IDsX(ctx context.Context) []uuid.UUID {
|
||||||
|
ids, err := dtq.IDs(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return ids
|
||||||
|
}
|
||||||
|
|
||||||
|
// Count returns the count of the given query.
|
||||||
|
func (dtq *DocumentTokenQuery) Count(ctx context.Context) (int, error) {
|
||||||
|
if err := dtq.prepareQuery(ctx); err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
return dtq.sqlCount(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
// CountX is like Count, but panics if an error occurs.
|
||||||
|
func (dtq *DocumentTokenQuery) CountX(ctx context.Context) int {
|
||||||
|
count, err := dtq.Count(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return count
|
||||||
|
}
|
||||||
|
|
||||||
|
// Exist returns true if the query has elements in the graph.
|
||||||
|
func (dtq *DocumentTokenQuery) Exist(ctx context.Context) (bool, error) {
|
||||||
|
if err := dtq.prepareQuery(ctx); err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
return dtq.sqlExist(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExistX is like Exist, but panics if an error occurs.
|
||||||
|
func (dtq *DocumentTokenQuery) ExistX(ctx context.Context) bool {
|
||||||
|
exist, err := dtq.Exist(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return exist
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clone returns a duplicate of the DocumentTokenQuery builder, including all associated steps. It can be
|
||||||
|
// used to prepare common query builders and use them differently after the clone is made.
|
||||||
|
func (dtq *DocumentTokenQuery) Clone() *DocumentTokenQuery {
|
||||||
|
if dtq == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return &DocumentTokenQuery{
|
||||||
|
config: dtq.config,
|
||||||
|
limit: dtq.limit,
|
||||||
|
offset: dtq.offset,
|
||||||
|
order: append([]OrderFunc{}, dtq.order...),
|
||||||
|
predicates: append([]predicate.DocumentToken{}, dtq.predicates...),
|
||||||
|
withDocument: dtq.withDocument.Clone(),
|
||||||
|
// clone intermediate query.
|
||||||
|
sql: dtq.sql.Clone(),
|
||||||
|
path: dtq.path,
|
||||||
|
unique: dtq.unique,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// WithDocument tells the query-builder to eager-load the nodes that are connected to
|
||||||
|
// the "document" edge. The optional arguments are used to configure the query builder of the edge.
|
||||||
|
func (dtq *DocumentTokenQuery) WithDocument(opts ...func(*DocumentQuery)) *DocumentTokenQuery {
|
||||||
|
query := &DocumentQuery{config: dtq.config}
|
||||||
|
for _, opt := range opts {
|
||||||
|
opt(query)
|
||||||
|
}
|
||||||
|
dtq.withDocument = query
|
||||||
|
return dtq
|
||||||
|
}
|
||||||
|
|
||||||
|
// GroupBy is used to group vertices by one or more fields/columns.
|
||||||
|
// It is often used with aggregate functions, like: count, max, mean, min, sum.
|
||||||
|
//
|
||||||
|
// Example:
|
||||||
|
//
|
||||||
|
// var v []struct {
|
||||||
|
// CreatedAt time.Time `json:"created_at,omitempty"`
|
||||||
|
// Count int `json:"count,omitempty"`
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// client.DocumentToken.Query().
|
||||||
|
// GroupBy(documenttoken.FieldCreatedAt).
|
||||||
|
// Aggregate(ent.Count()).
|
||||||
|
// Scan(ctx, &v)
|
||||||
|
func (dtq *DocumentTokenQuery) GroupBy(field string, fields ...string) *DocumentTokenGroupBy {
|
||||||
|
grbuild := &DocumentTokenGroupBy{config: dtq.config}
|
||||||
|
grbuild.fields = append([]string{field}, fields...)
|
||||||
|
grbuild.path = func(ctx context.Context) (prev *sql.Selector, err error) {
|
||||||
|
if err := dtq.prepareQuery(ctx); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return dtq.sqlQuery(ctx), nil
|
||||||
|
}
|
||||||
|
grbuild.label = documenttoken.Label
|
||||||
|
grbuild.flds, grbuild.scan = &grbuild.fields, grbuild.Scan
|
||||||
|
return grbuild
|
||||||
|
}
|
||||||
|
|
||||||
|
// Select allows the selection one or more fields/columns for the given query,
|
||||||
|
// instead of selecting all fields in the entity.
|
||||||
|
//
|
||||||
|
// Example:
|
||||||
|
//
|
||||||
|
// var v []struct {
|
||||||
|
// CreatedAt time.Time `json:"created_at,omitempty"`
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// client.DocumentToken.Query().
|
||||||
|
// Select(documenttoken.FieldCreatedAt).
|
||||||
|
// Scan(ctx, &v)
|
||||||
|
func (dtq *DocumentTokenQuery) Select(fields ...string) *DocumentTokenSelect {
|
||||||
|
dtq.fields = append(dtq.fields, fields...)
|
||||||
|
selbuild := &DocumentTokenSelect{DocumentTokenQuery: dtq}
|
||||||
|
selbuild.label = documenttoken.Label
|
||||||
|
selbuild.flds, selbuild.scan = &dtq.fields, selbuild.Scan
|
||||||
|
return selbuild
|
||||||
|
}
|
||||||
|
|
||||||
|
func (dtq *DocumentTokenQuery) prepareQuery(ctx context.Context) error {
|
||||||
|
for _, f := range dtq.fields {
|
||||||
|
if !documenttoken.ValidColumn(f) {
|
||||||
|
return &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if dtq.path != nil {
|
||||||
|
prev, err := dtq.path(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
dtq.sql = prev
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (dtq *DocumentTokenQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*DocumentToken, error) {
|
||||||
|
var (
|
||||||
|
nodes = []*DocumentToken{}
|
||||||
|
withFKs = dtq.withFKs
|
||||||
|
_spec = dtq.querySpec()
|
||||||
|
loadedTypes = [1]bool{
|
||||||
|
dtq.withDocument != nil,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
if dtq.withDocument != nil {
|
||||||
|
withFKs = true
|
||||||
|
}
|
||||||
|
if withFKs {
|
||||||
|
_spec.Node.Columns = append(_spec.Node.Columns, documenttoken.ForeignKeys...)
|
||||||
|
}
|
||||||
|
_spec.ScanValues = func(columns []string) ([]interface{}, error) {
|
||||||
|
return (*DocumentToken).scanValues(nil, columns)
|
||||||
|
}
|
||||||
|
_spec.Assign = func(columns []string, values []interface{}) error {
|
||||||
|
node := &DocumentToken{config: dtq.config}
|
||||||
|
nodes = append(nodes, node)
|
||||||
|
node.Edges.loadedTypes = loadedTypes
|
||||||
|
return node.assignValues(columns, values)
|
||||||
|
}
|
||||||
|
for i := range hooks {
|
||||||
|
hooks[i](ctx, _spec)
|
||||||
|
}
|
||||||
|
if err := sqlgraph.QueryNodes(ctx, dtq.driver, _spec); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if len(nodes) == 0 {
|
||||||
|
return nodes, nil
|
||||||
|
}
|
||||||
|
if query := dtq.withDocument; query != nil {
|
||||||
|
if err := dtq.loadDocument(ctx, query, nodes, nil,
|
||||||
|
func(n *DocumentToken, e *Document) { n.Edges.Document = e }); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nodes, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (dtq *DocumentTokenQuery) loadDocument(ctx context.Context, query *DocumentQuery, nodes []*DocumentToken, init func(*DocumentToken), assign func(*DocumentToken, *Document)) error {
|
||||||
|
ids := make([]uuid.UUID, 0, len(nodes))
|
||||||
|
nodeids := make(map[uuid.UUID][]*DocumentToken)
|
||||||
|
for i := range nodes {
|
||||||
|
if nodes[i].document_document_tokens == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
fk := *nodes[i].document_document_tokens
|
||||||
|
if _, ok := nodeids[fk]; !ok {
|
||||||
|
ids = append(ids, fk)
|
||||||
|
}
|
||||||
|
nodeids[fk] = append(nodeids[fk], nodes[i])
|
||||||
|
}
|
||||||
|
query.Where(document.IDIn(ids...))
|
||||||
|
neighbors, err := query.All(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
for _, n := range neighbors {
|
||||||
|
nodes, ok := nodeids[n.ID]
|
||||||
|
if !ok {
|
||||||
|
return fmt.Errorf(`unexpected foreign-key "document_document_tokens" returned %v`, n.ID)
|
||||||
|
}
|
||||||
|
for i := range nodes {
|
||||||
|
assign(nodes[i], n)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (dtq *DocumentTokenQuery) sqlCount(ctx context.Context) (int, error) {
|
||||||
|
_spec := dtq.querySpec()
|
||||||
|
_spec.Node.Columns = dtq.fields
|
||||||
|
if len(dtq.fields) > 0 {
|
||||||
|
_spec.Unique = dtq.unique != nil && *dtq.unique
|
||||||
|
}
|
||||||
|
return sqlgraph.CountNodes(ctx, dtq.driver, _spec)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (dtq *DocumentTokenQuery) sqlExist(ctx context.Context) (bool, error) {
|
||||||
|
n, err := dtq.sqlCount(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return false, fmt.Errorf("ent: check existence: %w", err)
|
||||||
|
}
|
||||||
|
return n > 0, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (dtq *DocumentTokenQuery) querySpec() *sqlgraph.QuerySpec {
|
||||||
|
_spec := &sqlgraph.QuerySpec{
|
||||||
|
Node: &sqlgraph.NodeSpec{
|
||||||
|
Table: documenttoken.Table,
|
||||||
|
Columns: documenttoken.Columns,
|
||||||
|
ID: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: documenttoken.FieldID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
From: dtq.sql,
|
||||||
|
Unique: true,
|
||||||
|
}
|
||||||
|
if unique := dtq.unique; unique != nil {
|
||||||
|
_spec.Unique = *unique
|
||||||
|
}
|
||||||
|
if fields := dtq.fields; len(fields) > 0 {
|
||||||
|
_spec.Node.Columns = make([]string, 0, len(fields))
|
||||||
|
_spec.Node.Columns = append(_spec.Node.Columns, documenttoken.FieldID)
|
||||||
|
for i := range fields {
|
||||||
|
if fields[i] != documenttoken.FieldID {
|
||||||
|
_spec.Node.Columns = append(_spec.Node.Columns, fields[i])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if ps := dtq.predicates; len(ps) > 0 {
|
||||||
|
_spec.Predicate = func(selector *sql.Selector) {
|
||||||
|
for i := range ps {
|
||||||
|
ps[i](selector)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if limit := dtq.limit; limit != nil {
|
||||||
|
_spec.Limit = *limit
|
||||||
|
}
|
||||||
|
if offset := dtq.offset; offset != nil {
|
||||||
|
_spec.Offset = *offset
|
||||||
|
}
|
||||||
|
if ps := dtq.order; len(ps) > 0 {
|
||||||
|
_spec.Order = func(selector *sql.Selector) {
|
||||||
|
for i := range ps {
|
||||||
|
ps[i](selector)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return _spec
|
||||||
|
}
|
||||||
|
|
||||||
|
func (dtq *DocumentTokenQuery) sqlQuery(ctx context.Context) *sql.Selector {
|
||||||
|
builder := sql.Dialect(dtq.driver.Dialect())
|
||||||
|
t1 := builder.Table(documenttoken.Table)
|
||||||
|
columns := dtq.fields
|
||||||
|
if len(columns) == 0 {
|
||||||
|
columns = documenttoken.Columns
|
||||||
|
}
|
||||||
|
selector := builder.Select(t1.Columns(columns...)...).From(t1)
|
||||||
|
if dtq.sql != nil {
|
||||||
|
selector = dtq.sql
|
||||||
|
selector.Select(selector.Columns(columns...)...)
|
||||||
|
}
|
||||||
|
if dtq.unique != nil && *dtq.unique {
|
||||||
|
selector.Distinct()
|
||||||
|
}
|
||||||
|
for _, p := range dtq.predicates {
|
||||||
|
p(selector)
|
||||||
|
}
|
||||||
|
for _, p := range dtq.order {
|
||||||
|
p(selector)
|
||||||
|
}
|
||||||
|
if offset := dtq.offset; offset != nil {
|
||||||
|
// limit is mandatory for offset clause. We start
|
||||||
|
// with default value, and override it below if needed.
|
||||||
|
selector.Offset(*offset).Limit(math.MaxInt32)
|
||||||
|
}
|
||||||
|
if limit := dtq.limit; limit != nil {
|
||||||
|
selector.Limit(*limit)
|
||||||
|
}
|
||||||
|
return selector
|
||||||
|
}
|
||||||
|
|
||||||
|
// DocumentTokenGroupBy is the group-by builder for DocumentToken entities.
|
||||||
|
type DocumentTokenGroupBy struct {
|
||||||
|
config
|
||||||
|
selector
|
||||||
|
fields []string
|
||||||
|
fns []AggregateFunc
|
||||||
|
// intermediate query (i.e. traversal path).
|
||||||
|
sql *sql.Selector
|
||||||
|
path func(context.Context) (*sql.Selector, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Aggregate adds the given aggregation functions to the group-by query.
|
||||||
|
func (dtgb *DocumentTokenGroupBy) Aggregate(fns ...AggregateFunc) *DocumentTokenGroupBy {
|
||||||
|
dtgb.fns = append(dtgb.fns, fns...)
|
||||||
|
return dtgb
|
||||||
|
}
|
||||||
|
|
||||||
|
// Scan applies the group-by query and scans the result into the given value.
|
||||||
|
func (dtgb *DocumentTokenGroupBy) Scan(ctx context.Context, v interface{}) error {
|
||||||
|
query, err := dtgb.path(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
dtgb.sql = query
|
||||||
|
return dtgb.sqlScan(ctx, v)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (dtgb *DocumentTokenGroupBy) sqlScan(ctx context.Context, v interface{}) error {
|
||||||
|
for _, f := range dtgb.fields {
|
||||||
|
if !documenttoken.ValidColumn(f) {
|
||||||
|
return &ValidationError{Name: f, err: fmt.Errorf("invalid field %q for group-by", f)}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
selector := dtgb.sqlQuery()
|
||||||
|
if err := selector.Err(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
rows := &sql.Rows{}
|
||||||
|
query, args := selector.Query()
|
||||||
|
if err := dtgb.driver.Query(ctx, query, args, rows); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
return sql.ScanSlice(rows, v)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (dtgb *DocumentTokenGroupBy) sqlQuery() *sql.Selector {
|
||||||
|
selector := dtgb.sql.Select()
|
||||||
|
aggregation := make([]string, 0, len(dtgb.fns))
|
||||||
|
for _, fn := range dtgb.fns {
|
||||||
|
aggregation = append(aggregation, fn(selector))
|
||||||
|
}
|
||||||
|
// If no columns were selected in a custom aggregation function, the default
|
||||||
|
// selection is the fields used for "group-by", and the aggregation functions.
|
||||||
|
if len(selector.SelectedColumns()) == 0 {
|
||||||
|
columns := make([]string, 0, len(dtgb.fields)+len(dtgb.fns))
|
||||||
|
for _, f := range dtgb.fields {
|
||||||
|
columns = append(columns, selector.C(f))
|
||||||
|
}
|
||||||
|
columns = append(columns, aggregation...)
|
||||||
|
selector.Select(columns...)
|
||||||
|
}
|
||||||
|
return selector.GroupBy(selector.Columns(dtgb.fields...)...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// DocumentTokenSelect is the builder for selecting fields of DocumentToken entities.
|
||||||
|
type DocumentTokenSelect struct {
|
||||||
|
*DocumentTokenQuery
|
||||||
|
selector
|
||||||
|
// intermediate query (i.e. traversal path).
|
||||||
|
sql *sql.Selector
|
||||||
|
}
|
||||||
|
|
||||||
|
// Scan applies the selector query and scans the result into the given value.
|
||||||
|
func (dts *DocumentTokenSelect) Scan(ctx context.Context, v interface{}) error {
|
||||||
|
if err := dts.prepareQuery(ctx); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
dts.sql = dts.DocumentTokenQuery.sqlQuery(ctx)
|
||||||
|
return dts.sqlScan(ctx, v)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (dts *DocumentTokenSelect) sqlScan(ctx context.Context, v interface{}) error {
|
||||||
|
rows := &sql.Rows{}
|
||||||
|
query, args := dts.sql.Query()
|
||||||
|
if err := dts.driver.Query(ctx, query, args, rows); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
return sql.ScanSlice(rows, v)
|
||||||
|
}
|
582
backend/ent/documenttoken_update.go
Normal file
582
backend/ent/documenttoken_update.go
Normal file
|
@ -0,0 +1,582 @@
|
||||||
|
// Code generated by ent, DO NOT EDIT.
|
||||||
|
|
||||||
|
package ent
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"entgo.io/ent/dialect/sql"
|
||||||
|
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||||
|
"entgo.io/ent/schema/field"
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/hay-kot/content/backend/ent/document"
|
||||||
|
"github.com/hay-kot/content/backend/ent/documenttoken"
|
||||||
|
"github.com/hay-kot/content/backend/ent/predicate"
|
||||||
|
)
|
||||||
|
|
||||||
|
// DocumentTokenUpdate is the builder for updating DocumentToken entities.
|
||||||
|
type DocumentTokenUpdate struct {
|
||||||
|
config
|
||||||
|
hooks []Hook
|
||||||
|
mutation *DocumentTokenMutation
|
||||||
|
}
|
||||||
|
|
||||||
|
// Where appends a list predicates to the DocumentTokenUpdate builder.
|
||||||
|
func (dtu *DocumentTokenUpdate) Where(ps ...predicate.DocumentToken) *DocumentTokenUpdate {
|
||||||
|
dtu.mutation.Where(ps...)
|
||||||
|
return dtu
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetUpdatedAt sets the "updated_at" field.
|
||||||
|
func (dtu *DocumentTokenUpdate) SetUpdatedAt(t time.Time) *DocumentTokenUpdate {
|
||||||
|
dtu.mutation.SetUpdatedAt(t)
|
||||||
|
return dtu
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetToken sets the "token" field.
|
||||||
|
func (dtu *DocumentTokenUpdate) SetToken(b []byte) *DocumentTokenUpdate {
|
||||||
|
dtu.mutation.SetToken(b)
|
||||||
|
return dtu
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetUses sets the "uses" field.
|
||||||
|
func (dtu *DocumentTokenUpdate) SetUses(i int) *DocumentTokenUpdate {
|
||||||
|
dtu.mutation.ResetUses()
|
||||||
|
dtu.mutation.SetUses(i)
|
||||||
|
return dtu
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableUses sets the "uses" field if the given value is not nil.
|
||||||
|
func (dtu *DocumentTokenUpdate) SetNillableUses(i *int) *DocumentTokenUpdate {
|
||||||
|
if i != nil {
|
||||||
|
dtu.SetUses(*i)
|
||||||
|
}
|
||||||
|
return dtu
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddUses adds i to the "uses" field.
|
||||||
|
func (dtu *DocumentTokenUpdate) AddUses(i int) *DocumentTokenUpdate {
|
||||||
|
dtu.mutation.AddUses(i)
|
||||||
|
return dtu
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetExpiresAt sets the "expires_at" field.
|
||||||
|
func (dtu *DocumentTokenUpdate) SetExpiresAt(t time.Time) *DocumentTokenUpdate {
|
||||||
|
dtu.mutation.SetExpiresAt(t)
|
||||||
|
return dtu
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableExpiresAt sets the "expires_at" field if the given value is not nil.
|
||||||
|
func (dtu *DocumentTokenUpdate) SetNillableExpiresAt(t *time.Time) *DocumentTokenUpdate {
|
||||||
|
if t != nil {
|
||||||
|
dtu.SetExpiresAt(*t)
|
||||||
|
}
|
||||||
|
return dtu
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetDocumentID sets the "document" edge to the Document entity by ID.
|
||||||
|
func (dtu *DocumentTokenUpdate) SetDocumentID(id uuid.UUID) *DocumentTokenUpdate {
|
||||||
|
dtu.mutation.SetDocumentID(id)
|
||||||
|
return dtu
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableDocumentID sets the "document" edge to the Document entity by ID if the given value is not nil.
|
||||||
|
func (dtu *DocumentTokenUpdate) SetNillableDocumentID(id *uuid.UUID) *DocumentTokenUpdate {
|
||||||
|
if id != nil {
|
||||||
|
dtu = dtu.SetDocumentID(*id)
|
||||||
|
}
|
||||||
|
return dtu
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetDocument sets the "document" edge to the Document entity.
|
||||||
|
func (dtu *DocumentTokenUpdate) SetDocument(d *Document) *DocumentTokenUpdate {
|
||||||
|
return dtu.SetDocumentID(d.ID)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Mutation returns the DocumentTokenMutation object of the builder.
|
||||||
|
func (dtu *DocumentTokenUpdate) Mutation() *DocumentTokenMutation {
|
||||||
|
return dtu.mutation
|
||||||
|
}
|
||||||
|
|
||||||
|
// ClearDocument clears the "document" edge to the Document entity.
|
||||||
|
func (dtu *DocumentTokenUpdate) ClearDocument() *DocumentTokenUpdate {
|
||||||
|
dtu.mutation.ClearDocument()
|
||||||
|
return dtu
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save executes the query and returns the number of nodes affected by the update operation.
|
||||||
|
func (dtu *DocumentTokenUpdate) Save(ctx context.Context) (int, error) {
|
||||||
|
var (
|
||||||
|
err error
|
||||||
|
affected int
|
||||||
|
)
|
||||||
|
dtu.defaults()
|
||||||
|
if len(dtu.hooks) == 0 {
|
||||||
|
if err = dtu.check(); err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
affected, err = dtu.sqlSave(ctx)
|
||||||
|
} else {
|
||||||
|
var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) {
|
||||||
|
mutation, ok := m.(*DocumentTokenMutation)
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("unexpected mutation type %T", m)
|
||||||
|
}
|
||||||
|
if err = dtu.check(); err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
dtu.mutation = mutation
|
||||||
|
affected, err = dtu.sqlSave(ctx)
|
||||||
|
mutation.done = true
|
||||||
|
return affected, err
|
||||||
|
})
|
||||||
|
for i := len(dtu.hooks) - 1; i >= 0; i-- {
|
||||||
|
if dtu.hooks[i] == nil {
|
||||||
|
return 0, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)")
|
||||||
|
}
|
||||||
|
mut = dtu.hooks[i](mut)
|
||||||
|
}
|
||||||
|
if _, err := mut.Mutate(ctx, dtu.mutation); err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return affected, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// SaveX is like Save, but panics if an error occurs.
|
||||||
|
func (dtu *DocumentTokenUpdate) SaveX(ctx context.Context) int {
|
||||||
|
affected, err := dtu.Save(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return affected
|
||||||
|
}
|
||||||
|
|
||||||
|
// Exec executes the query.
|
||||||
|
func (dtu *DocumentTokenUpdate) Exec(ctx context.Context) error {
|
||||||
|
_, err := dtu.Save(ctx)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExecX is like Exec, but panics if an error occurs.
|
||||||
|
func (dtu *DocumentTokenUpdate) ExecX(ctx context.Context) {
|
||||||
|
if err := dtu.Exec(ctx); err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// defaults sets the default values of the builder before save.
|
||||||
|
func (dtu *DocumentTokenUpdate) defaults() {
|
||||||
|
if _, ok := dtu.mutation.UpdatedAt(); !ok {
|
||||||
|
v := documenttoken.UpdateDefaultUpdatedAt()
|
||||||
|
dtu.mutation.SetUpdatedAt(v)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// check runs all checks and user-defined validators on the builder.
|
||||||
|
func (dtu *DocumentTokenUpdate) check() error {
|
||||||
|
if v, ok := dtu.mutation.Token(); ok {
|
||||||
|
if err := documenttoken.TokenValidator(v); err != nil {
|
||||||
|
return &ValidationError{Name: "token", err: fmt.Errorf(`ent: validator failed for field "DocumentToken.token": %w`, err)}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (dtu *DocumentTokenUpdate) sqlSave(ctx context.Context) (n int, err error) {
|
||||||
|
_spec := &sqlgraph.UpdateSpec{
|
||||||
|
Node: &sqlgraph.NodeSpec{
|
||||||
|
Table: documenttoken.Table,
|
||||||
|
Columns: documenttoken.Columns,
|
||||||
|
ID: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: documenttoken.FieldID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
if ps := dtu.mutation.predicates; len(ps) > 0 {
|
||||||
|
_spec.Predicate = func(selector *sql.Selector) {
|
||||||
|
for i := range ps {
|
||||||
|
ps[i](selector)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if value, ok := dtu.mutation.UpdatedAt(); ok {
|
||||||
|
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeTime,
|
||||||
|
Value: value,
|
||||||
|
Column: documenttoken.FieldUpdatedAt,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if value, ok := dtu.mutation.Token(); ok {
|
||||||
|
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeBytes,
|
||||||
|
Value: value,
|
||||||
|
Column: documenttoken.FieldToken,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if value, ok := dtu.mutation.Uses(); ok {
|
||||||
|
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeInt,
|
||||||
|
Value: value,
|
||||||
|
Column: documenttoken.FieldUses,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if value, ok := dtu.mutation.AddedUses(); ok {
|
||||||
|
_spec.Fields.Add = append(_spec.Fields.Add, &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeInt,
|
||||||
|
Value: value,
|
||||||
|
Column: documenttoken.FieldUses,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if value, ok := dtu.mutation.ExpiresAt(); ok {
|
||||||
|
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeTime,
|
||||||
|
Value: value,
|
||||||
|
Column: documenttoken.FieldExpiresAt,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if dtu.mutation.DocumentCleared() {
|
||||||
|
edge := &sqlgraph.EdgeSpec{
|
||||||
|
Rel: sqlgraph.M2O,
|
||||||
|
Inverse: true,
|
||||||
|
Table: documenttoken.DocumentTable,
|
||||||
|
Columns: []string{documenttoken.DocumentColumn},
|
||||||
|
Bidi: false,
|
||||||
|
Target: &sqlgraph.EdgeTarget{
|
||||||
|
IDSpec: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: document.FieldID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
_spec.Edges.Clear = append(_spec.Edges.Clear, edge)
|
||||||
|
}
|
||||||
|
if nodes := dtu.mutation.DocumentIDs(); len(nodes) > 0 {
|
||||||
|
edge := &sqlgraph.EdgeSpec{
|
||||||
|
Rel: sqlgraph.M2O,
|
||||||
|
Inverse: true,
|
||||||
|
Table: documenttoken.DocumentTable,
|
||||||
|
Columns: []string{documenttoken.DocumentColumn},
|
||||||
|
Bidi: false,
|
||||||
|
Target: &sqlgraph.EdgeTarget{
|
||||||
|
IDSpec: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: document.FieldID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, k := range nodes {
|
||||||
|
edge.Target.Nodes = append(edge.Target.Nodes, k)
|
||||||
|
}
|
||||||
|
_spec.Edges.Add = append(_spec.Edges.Add, edge)
|
||||||
|
}
|
||||||
|
if n, err = sqlgraph.UpdateNodes(ctx, dtu.driver, _spec); err != nil {
|
||||||
|
if _, ok := err.(*sqlgraph.NotFoundError); ok {
|
||||||
|
err = &NotFoundError{documenttoken.Label}
|
||||||
|
} else if sqlgraph.IsConstraintError(err) {
|
||||||
|
err = &ConstraintError{msg: err.Error(), wrap: err}
|
||||||
|
}
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
return n, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// DocumentTokenUpdateOne is the builder for updating a single DocumentToken entity.
|
||||||
|
type DocumentTokenUpdateOne struct {
|
||||||
|
config
|
||||||
|
fields []string
|
||||||
|
hooks []Hook
|
||||||
|
mutation *DocumentTokenMutation
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetUpdatedAt sets the "updated_at" field.
|
||||||
|
func (dtuo *DocumentTokenUpdateOne) SetUpdatedAt(t time.Time) *DocumentTokenUpdateOne {
|
||||||
|
dtuo.mutation.SetUpdatedAt(t)
|
||||||
|
return dtuo
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetToken sets the "token" field.
|
||||||
|
func (dtuo *DocumentTokenUpdateOne) SetToken(b []byte) *DocumentTokenUpdateOne {
|
||||||
|
dtuo.mutation.SetToken(b)
|
||||||
|
return dtuo
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetUses sets the "uses" field.
|
||||||
|
func (dtuo *DocumentTokenUpdateOne) SetUses(i int) *DocumentTokenUpdateOne {
|
||||||
|
dtuo.mutation.ResetUses()
|
||||||
|
dtuo.mutation.SetUses(i)
|
||||||
|
return dtuo
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableUses sets the "uses" field if the given value is not nil.
|
||||||
|
func (dtuo *DocumentTokenUpdateOne) SetNillableUses(i *int) *DocumentTokenUpdateOne {
|
||||||
|
if i != nil {
|
||||||
|
dtuo.SetUses(*i)
|
||||||
|
}
|
||||||
|
return dtuo
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddUses adds i to the "uses" field.
|
||||||
|
func (dtuo *DocumentTokenUpdateOne) AddUses(i int) *DocumentTokenUpdateOne {
|
||||||
|
dtuo.mutation.AddUses(i)
|
||||||
|
return dtuo
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetExpiresAt sets the "expires_at" field.
|
||||||
|
func (dtuo *DocumentTokenUpdateOne) SetExpiresAt(t time.Time) *DocumentTokenUpdateOne {
|
||||||
|
dtuo.mutation.SetExpiresAt(t)
|
||||||
|
return dtuo
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableExpiresAt sets the "expires_at" field if the given value is not nil.
|
||||||
|
func (dtuo *DocumentTokenUpdateOne) SetNillableExpiresAt(t *time.Time) *DocumentTokenUpdateOne {
|
||||||
|
if t != nil {
|
||||||
|
dtuo.SetExpiresAt(*t)
|
||||||
|
}
|
||||||
|
return dtuo
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetDocumentID sets the "document" edge to the Document entity by ID.
|
||||||
|
func (dtuo *DocumentTokenUpdateOne) SetDocumentID(id uuid.UUID) *DocumentTokenUpdateOne {
|
||||||
|
dtuo.mutation.SetDocumentID(id)
|
||||||
|
return dtuo
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableDocumentID sets the "document" edge to the Document entity by ID if the given value is not nil.
|
||||||
|
func (dtuo *DocumentTokenUpdateOne) SetNillableDocumentID(id *uuid.UUID) *DocumentTokenUpdateOne {
|
||||||
|
if id != nil {
|
||||||
|
dtuo = dtuo.SetDocumentID(*id)
|
||||||
|
}
|
||||||
|
return dtuo
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetDocument sets the "document" edge to the Document entity.
|
||||||
|
func (dtuo *DocumentTokenUpdateOne) SetDocument(d *Document) *DocumentTokenUpdateOne {
|
||||||
|
return dtuo.SetDocumentID(d.ID)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Mutation returns the DocumentTokenMutation object of the builder.
|
||||||
|
func (dtuo *DocumentTokenUpdateOne) Mutation() *DocumentTokenMutation {
|
||||||
|
return dtuo.mutation
|
||||||
|
}
|
||||||
|
|
||||||
|
// ClearDocument clears the "document" edge to the Document entity.
|
||||||
|
func (dtuo *DocumentTokenUpdateOne) ClearDocument() *DocumentTokenUpdateOne {
|
||||||
|
dtuo.mutation.ClearDocument()
|
||||||
|
return dtuo
|
||||||
|
}
|
||||||
|
|
||||||
|
// Select allows selecting one or more fields (columns) of the returned entity.
|
||||||
|
// The default is selecting all fields defined in the entity schema.
|
||||||
|
func (dtuo *DocumentTokenUpdateOne) Select(field string, fields ...string) *DocumentTokenUpdateOne {
|
||||||
|
dtuo.fields = append([]string{field}, fields...)
|
||||||
|
return dtuo
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save executes the query and returns the updated DocumentToken entity.
|
||||||
|
func (dtuo *DocumentTokenUpdateOne) Save(ctx context.Context) (*DocumentToken, error) {
|
||||||
|
var (
|
||||||
|
err error
|
||||||
|
node *DocumentToken
|
||||||
|
)
|
||||||
|
dtuo.defaults()
|
||||||
|
if len(dtuo.hooks) == 0 {
|
||||||
|
if err = dtuo.check(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
node, err = dtuo.sqlSave(ctx)
|
||||||
|
} else {
|
||||||
|
var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) {
|
||||||
|
mutation, ok := m.(*DocumentTokenMutation)
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("unexpected mutation type %T", m)
|
||||||
|
}
|
||||||
|
if err = dtuo.check(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
dtuo.mutation = mutation
|
||||||
|
node, err = dtuo.sqlSave(ctx)
|
||||||
|
mutation.done = true
|
||||||
|
return node, err
|
||||||
|
})
|
||||||
|
for i := len(dtuo.hooks) - 1; i >= 0; i-- {
|
||||||
|
if dtuo.hooks[i] == nil {
|
||||||
|
return nil, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)")
|
||||||
|
}
|
||||||
|
mut = dtuo.hooks[i](mut)
|
||||||
|
}
|
||||||
|
v, err := mut.Mutate(ctx, dtuo.mutation)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
nv, ok := v.(*DocumentToken)
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("unexpected node type %T returned from DocumentTokenMutation", v)
|
||||||
|
}
|
||||||
|
node = nv
|
||||||
|
}
|
||||||
|
return node, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// SaveX is like Save, but panics if an error occurs.
|
||||||
|
func (dtuo *DocumentTokenUpdateOne) SaveX(ctx context.Context) *DocumentToken {
|
||||||
|
node, err := dtuo.Save(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return node
|
||||||
|
}
|
||||||
|
|
||||||
|
// Exec executes the query on the entity.
|
||||||
|
func (dtuo *DocumentTokenUpdateOne) Exec(ctx context.Context) error {
|
||||||
|
_, err := dtuo.Save(ctx)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExecX is like Exec, but panics if an error occurs.
|
||||||
|
func (dtuo *DocumentTokenUpdateOne) ExecX(ctx context.Context) {
|
||||||
|
if err := dtuo.Exec(ctx); err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// defaults sets the default values of the builder before save.
|
||||||
|
func (dtuo *DocumentTokenUpdateOne) defaults() {
|
||||||
|
if _, ok := dtuo.mutation.UpdatedAt(); !ok {
|
||||||
|
v := documenttoken.UpdateDefaultUpdatedAt()
|
||||||
|
dtuo.mutation.SetUpdatedAt(v)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// check runs all checks and user-defined validators on the builder.
|
||||||
|
func (dtuo *DocumentTokenUpdateOne) check() error {
|
||||||
|
if v, ok := dtuo.mutation.Token(); ok {
|
||||||
|
if err := documenttoken.TokenValidator(v); err != nil {
|
||||||
|
return &ValidationError{Name: "token", err: fmt.Errorf(`ent: validator failed for field "DocumentToken.token": %w`, err)}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (dtuo *DocumentTokenUpdateOne) sqlSave(ctx context.Context) (_node *DocumentToken, err error) {
|
||||||
|
_spec := &sqlgraph.UpdateSpec{
|
||||||
|
Node: &sqlgraph.NodeSpec{
|
||||||
|
Table: documenttoken.Table,
|
||||||
|
Columns: documenttoken.Columns,
|
||||||
|
ID: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: documenttoken.FieldID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
id, ok := dtuo.mutation.ID()
|
||||||
|
if !ok {
|
||||||
|
return nil, &ValidationError{Name: "id", err: errors.New(`ent: missing "DocumentToken.id" for update`)}
|
||||||
|
}
|
||||||
|
_spec.Node.ID.Value = id
|
||||||
|
if fields := dtuo.fields; len(fields) > 0 {
|
||||||
|
_spec.Node.Columns = make([]string, 0, len(fields))
|
||||||
|
_spec.Node.Columns = append(_spec.Node.Columns, documenttoken.FieldID)
|
||||||
|
for _, f := range fields {
|
||||||
|
if !documenttoken.ValidColumn(f) {
|
||||||
|
return nil, &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)}
|
||||||
|
}
|
||||||
|
if f != documenttoken.FieldID {
|
||||||
|
_spec.Node.Columns = append(_spec.Node.Columns, f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if ps := dtuo.mutation.predicates; len(ps) > 0 {
|
||||||
|
_spec.Predicate = func(selector *sql.Selector) {
|
||||||
|
for i := range ps {
|
||||||
|
ps[i](selector)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if value, ok := dtuo.mutation.UpdatedAt(); ok {
|
||||||
|
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeTime,
|
||||||
|
Value: value,
|
||||||
|
Column: documenttoken.FieldUpdatedAt,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if value, ok := dtuo.mutation.Token(); ok {
|
||||||
|
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeBytes,
|
||||||
|
Value: value,
|
||||||
|
Column: documenttoken.FieldToken,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if value, ok := dtuo.mutation.Uses(); ok {
|
||||||
|
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeInt,
|
||||||
|
Value: value,
|
||||||
|
Column: documenttoken.FieldUses,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if value, ok := dtuo.mutation.AddedUses(); ok {
|
||||||
|
_spec.Fields.Add = append(_spec.Fields.Add, &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeInt,
|
||||||
|
Value: value,
|
||||||
|
Column: documenttoken.FieldUses,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if value, ok := dtuo.mutation.ExpiresAt(); ok {
|
||||||
|
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeTime,
|
||||||
|
Value: value,
|
||||||
|
Column: documenttoken.FieldExpiresAt,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if dtuo.mutation.DocumentCleared() {
|
||||||
|
edge := &sqlgraph.EdgeSpec{
|
||||||
|
Rel: sqlgraph.M2O,
|
||||||
|
Inverse: true,
|
||||||
|
Table: documenttoken.DocumentTable,
|
||||||
|
Columns: []string{documenttoken.DocumentColumn},
|
||||||
|
Bidi: false,
|
||||||
|
Target: &sqlgraph.EdgeTarget{
|
||||||
|
IDSpec: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: document.FieldID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
_spec.Edges.Clear = append(_spec.Edges.Clear, edge)
|
||||||
|
}
|
||||||
|
if nodes := dtuo.mutation.DocumentIDs(); len(nodes) > 0 {
|
||||||
|
edge := &sqlgraph.EdgeSpec{
|
||||||
|
Rel: sqlgraph.M2O,
|
||||||
|
Inverse: true,
|
||||||
|
Table: documenttoken.DocumentTable,
|
||||||
|
Columns: []string{documenttoken.DocumentColumn},
|
||||||
|
Bidi: false,
|
||||||
|
Target: &sqlgraph.EdgeTarget{
|
||||||
|
IDSpec: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: document.FieldID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, k := range nodes {
|
||||||
|
edge.Target.Nodes = append(edge.Target.Nodes, k)
|
||||||
|
}
|
||||||
|
_spec.Edges.Add = append(_spec.Edges.Add, edge)
|
||||||
|
}
|
||||||
|
_node = &DocumentToken{config: dtuo.config}
|
||||||
|
_spec.Assign = _node.assignValues
|
||||||
|
_spec.ScanValues = _node.scanValues
|
||||||
|
if err = sqlgraph.UpdateNode(ctx, dtuo.driver, _spec); err != nil {
|
||||||
|
if _, ok := err.(*sqlgraph.NotFoundError); ok {
|
||||||
|
err = &NotFoundError{documenttoken.Label}
|
||||||
|
} else if sqlgraph.IsConstraintError(err) {
|
||||||
|
err = &ConstraintError{msg: err.Error(), wrap: err}
|
||||||
|
}
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return _node, nil
|
||||||
|
}
|
|
@ -10,7 +10,10 @@ import (
|
||||||
"entgo.io/ent"
|
"entgo.io/ent"
|
||||||
"entgo.io/ent/dialect/sql"
|
"entgo.io/ent/dialect/sql"
|
||||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||||
|
"github.com/hay-kot/content/backend/ent/attachment"
|
||||||
"github.com/hay-kot/content/backend/ent/authtokens"
|
"github.com/hay-kot/content/backend/ent/authtokens"
|
||||||
|
"github.com/hay-kot/content/backend/ent/document"
|
||||||
|
"github.com/hay-kot/content/backend/ent/documenttoken"
|
||||||
"github.com/hay-kot/content/backend/ent/group"
|
"github.com/hay-kot/content/backend/ent/group"
|
||||||
"github.com/hay-kot/content/backend/ent/item"
|
"github.com/hay-kot/content/backend/ent/item"
|
||||||
"github.com/hay-kot/content/backend/ent/itemfield"
|
"github.com/hay-kot/content/backend/ent/itemfield"
|
||||||
|
@ -37,13 +40,16 @@ type OrderFunc func(*sql.Selector)
|
||||||
// columnChecker returns a function indicates if the column exists in the given column.
|
// columnChecker returns a function indicates if the column exists in the given column.
|
||||||
func columnChecker(table string) func(string) error {
|
func columnChecker(table string) func(string) error {
|
||||||
checks := map[string]func(string) bool{
|
checks := map[string]func(string) bool{
|
||||||
authtokens.Table: authtokens.ValidColumn,
|
attachment.Table: attachment.ValidColumn,
|
||||||
group.Table: group.ValidColumn,
|
authtokens.Table: authtokens.ValidColumn,
|
||||||
item.Table: item.ValidColumn,
|
document.Table: document.ValidColumn,
|
||||||
itemfield.Table: itemfield.ValidColumn,
|
documenttoken.Table: documenttoken.ValidColumn,
|
||||||
label.Table: label.ValidColumn,
|
group.Table: group.ValidColumn,
|
||||||
location.Table: location.ValidColumn,
|
item.Table: item.ValidColumn,
|
||||||
user.Table: user.ValidColumn,
|
itemfield.Table: itemfield.ValidColumn,
|
||||||
|
label.Table: label.ValidColumn,
|
||||||
|
location.Table: location.ValidColumn,
|
||||||
|
user.Table: user.ValidColumn,
|
||||||
}
|
}
|
||||||
check, ok := checks[table]
|
check, ok := checks[table]
|
||||||
if !ok {
|
if !ok {
|
||||||
|
|
|
@ -40,9 +40,11 @@ type GroupEdges struct {
|
||||||
Items []*Item `json:"items,omitempty"`
|
Items []*Item `json:"items,omitempty"`
|
||||||
// Labels holds the value of the labels edge.
|
// Labels holds the value of the labels edge.
|
||||||
Labels []*Label `json:"labels,omitempty"`
|
Labels []*Label `json:"labels,omitempty"`
|
||||||
|
// Documents holds the value of the documents edge.
|
||||||
|
Documents []*Document `json:"documents,omitempty"`
|
||||||
// loadedTypes holds the information for reporting if a
|
// loadedTypes holds the information for reporting if a
|
||||||
// type was loaded (or requested) in eager-loading or not.
|
// type was loaded (or requested) in eager-loading or not.
|
||||||
loadedTypes [4]bool
|
loadedTypes [5]bool
|
||||||
}
|
}
|
||||||
|
|
||||||
// UsersOrErr returns the Users value or an error if the edge
|
// UsersOrErr returns the Users value or an error if the edge
|
||||||
|
@ -81,6 +83,15 @@ func (e GroupEdges) LabelsOrErr() ([]*Label, error) {
|
||||||
return nil, &NotLoadedError{edge: "labels"}
|
return nil, &NotLoadedError{edge: "labels"}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// DocumentsOrErr returns the Documents value or an error if the edge
|
||||||
|
// was not loaded in eager-loading.
|
||||||
|
func (e GroupEdges) DocumentsOrErr() ([]*Document, error) {
|
||||||
|
if e.loadedTypes[4] {
|
||||||
|
return e.Documents, nil
|
||||||
|
}
|
||||||
|
return nil, &NotLoadedError{edge: "documents"}
|
||||||
|
}
|
||||||
|
|
||||||
// scanValues returns the types for scanning values from sql.Rows.
|
// scanValues returns the types for scanning values from sql.Rows.
|
||||||
func (*Group) scanValues(columns []string) ([]interface{}, error) {
|
func (*Group) scanValues(columns []string) ([]interface{}, error) {
|
||||||
values := make([]interface{}, len(columns))
|
values := make([]interface{}, len(columns))
|
||||||
|
@ -162,6 +173,11 @@ func (gr *Group) QueryLabels() *LabelQuery {
|
||||||
return (&GroupClient{config: gr.config}).QueryLabels(gr)
|
return (&GroupClient{config: gr.config}).QueryLabels(gr)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// QueryDocuments queries the "documents" edge of the Group entity.
|
||||||
|
func (gr *Group) QueryDocuments() *DocumentQuery {
|
||||||
|
return (&GroupClient{config: gr.config}).QueryDocuments(gr)
|
||||||
|
}
|
||||||
|
|
||||||
// Update returns a builder for updating this Group.
|
// Update returns a builder for updating this Group.
|
||||||
// Note that you need to call Group.Unwrap() before calling this method if this Group
|
// Note that you need to call Group.Unwrap() before calling this method if this Group
|
||||||
// was returned from a transaction, and the transaction was committed or rolled back.
|
// was returned from a transaction, and the transaction was committed or rolled back.
|
||||||
|
|
|
@ -30,6 +30,8 @@ const (
|
||||||
EdgeItems = "items"
|
EdgeItems = "items"
|
||||||
// EdgeLabels holds the string denoting the labels edge name in mutations.
|
// EdgeLabels holds the string denoting the labels edge name in mutations.
|
||||||
EdgeLabels = "labels"
|
EdgeLabels = "labels"
|
||||||
|
// EdgeDocuments holds the string denoting the documents edge name in mutations.
|
||||||
|
EdgeDocuments = "documents"
|
||||||
// Table holds the table name of the group in the database.
|
// Table holds the table name of the group in the database.
|
||||||
Table = "groups"
|
Table = "groups"
|
||||||
// UsersTable is the table that holds the users relation/edge.
|
// UsersTable is the table that holds the users relation/edge.
|
||||||
|
@ -60,6 +62,13 @@ const (
|
||||||
LabelsInverseTable = "labels"
|
LabelsInverseTable = "labels"
|
||||||
// LabelsColumn is the table column denoting the labels relation/edge.
|
// LabelsColumn is the table column denoting the labels relation/edge.
|
||||||
LabelsColumn = "group_labels"
|
LabelsColumn = "group_labels"
|
||||||
|
// DocumentsTable is the table that holds the documents relation/edge.
|
||||||
|
DocumentsTable = "documents"
|
||||||
|
// DocumentsInverseTable is the table name for the Document entity.
|
||||||
|
// It exists in this package in order to avoid circular dependency with the "document" package.
|
||||||
|
DocumentsInverseTable = "documents"
|
||||||
|
// DocumentsColumn is the table column denoting the documents relation/edge.
|
||||||
|
DocumentsColumn = "group_documents"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Columns holds all SQL columns for group fields.
|
// Columns holds all SQL columns for group fields.
|
||||||
|
|
|
@ -478,6 +478,34 @@ func HasLabelsWith(preds ...predicate.Label) predicate.Group {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// HasDocuments applies the HasEdge predicate on the "documents" edge.
|
||||||
|
func HasDocuments() predicate.Group {
|
||||||
|
return predicate.Group(func(s *sql.Selector) {
|
||||||
|
step := sqlgraph.NewStep(
|
||||||
|
sqlgraph.From(Table, FieldID),
|
||||||
|
sqlgraph.To(DocumentsTable, FieldID),
|
||||||
|
sqlgraph.Edge(sqlgraph.O2M, false, DocumentsTable, DocumentsColumn),
|
||||||
|
)
|
||||||
|
sqlgraph.HasNeighbors(s, step)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// HasDocumentsWith applies the HasEdge predicate on the "documents" edge with a given conditions (other predicates).
|
||||||
|
func HasDocumentsWith(preds ...predicate.Document) predicate.Group {
|
||||||
|
return predicate.Group(func(s *sql.Selector) {
|
||||||
|
step := sqlgraph.NewStep(
|
||||||
|
sqlgraph.From(Table, FieldID),
|
||||||
|
sqlgraph.To(DocumentsInverseTable, FieldID),
|
||||||
|
sqlgraph.Edge(sqlgraph.O2M, false, DocumentsTable, DocumentsColumn),
|
||||||
|
)
|
||||||
|
sqlgraph.HasNeighborsWith(s, step, func(s *sql.Selector) {
|
||||||
|
for _, p := range preds {
|
||||||
|
p(s)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
// And groups predicates with the AND operator between them.
|
// And groups predicates with the AND operator between them.
|
||||||
func And(predicates ...predicate.Group) predicate.Group {
|
func And(predicates ...predicate.Group) predicate.Group {
|
||||||
return predicate.Group(func(s *sql.Selector) {
|
return predicate.Group(func(s *sql.Selector) {
|
||||||
|
|
|
@ -11,6 +11,7 @@ import (
|
||||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||||
"entgo.io/ent/schema/field"
|
"entgo.io/ent/schema/field"
|
||||||
"github.com/google/uuid"
|
"github.com/google/uuid"
|
||||||
|
"github.com/hay-kot/content/backend/ent/document"
|
||||||
"github.com/hay-kot/content/backend/ent/group"
|
"github.com/hay-kot/content/backend/ent/group"
|
||||||
"github.com/hay-kot/content/backend/ent/item"
|
"github.com/hay-kot/content/backend/ent/item"
|
||||||
"github.com/hay-kot/content/backend/ent/label"
|
"github.com/hay-kot/content/backend/ent/label"
|
||||||
|
@ -147,6 +148,21 @@ func (gc *GroupCreate) AddLabels(l ...*Label) *GroupCreate {
|
||||||
return gc.AddLabelIDs(ids...)
|
return gc.AddLabelIDs(ids...)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// AddDocumentIDs adds the "documents" edge to the Document entity by IDs.
|
||||||
|
func (gc *GroupCreate) AddDocumentIDs(ids ...uuid.UUID) *GroupCreate {
|
||||||
|
gc.mutation.AddDocumentIDs(ids...)
|
||||||
|
return gc
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddDocuments adds the "documents" edges to the Document entity.
|
||||||
|
func (gc *GroupCreate) AddDocuments(d ...*Document) *GroupCreate {
|
||||||
|
ids := make([]uuid.UUID, len(d))
|
||||||
|
for i := range d {
|
||||||
|
ids[i] = d[i].ID
|
||||||
|
}
|
||||||
|
return gc.AddDocumentIDs(ids...)
|
||||||
|
}
|
||||||
|
|
||||||
// Mutation returns the GroupMutation object of the builder.
|
// Mutation returns the GroupMutation object of the builder.
|
||||||
func (gc *GroupCreate) Mutation() *GroupMutation {
|
func (gc *GroupCreate) Mutation() *GroupMutation {
|
||||||
return gc.mutation
|
return gc.mutation
|
||||||
|
@ -410,6 +426,25 @@ func (gc *GroupCreate) createSpec() (*Group, *sqlgraph.CreateSpec) {
|
||||||
}
|
}
|
||||||
_spec.Edges = append(_spec.Edges, edge)
|
_spec.Edges = append(_spec.Edges, edge)
|
||||||
}
|
}
|
||||||
|
if nodes := gc.mutation.DocumentsIDs(); len(nodes) > 0 {
|
||||||
|
edge := &sqlgraph.EdgeSpec{
|
||||||
|
Rel: sqlgraph.O2M,
|
||||||
|
Inverse: false,
|
||||||
|
Table: group.DocumentsTable,
|
||||||
|
Columns: []string{group.DocumentsColumn},
|
||||||
|
Bidi: false,
|
||||||
|
Target: &sqlgraph.EdgeTarget{
|
||||||
|
IDSpec: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: document.FieldID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, k := range nodes {
|
||||||
|
edge.Target.Nodes = append(edge.Target.Nodes, k)
|
||||||
|
}
|
||||||
|
_spec.Edges = append(_spec.Edges, edge)
|
||||||
|
}
|
||||||
return _node, _spec
|
return _node, _spec
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -12,6 +12,7 @@ import (
|
||||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||||
"entgo.io/ent/schema/field"
|
"entgo.io/ent/schema/field"
|
||||||
"github.com/google/uuid"
|
"github.com/google/uuid"
|
||||||
|
"github.com/hay-kot/content/backend/ent/document"
|
||||||
"github.com/hay-kot/content/backend/ent/group"
|
"github.com/hay-kot/content/backend/ent/group"
|
||||||
"github.com/hay-kot/content/backend/ent/item"
|
"github.com/hay-kot/content/backend/ent/item"
|
||||||
"github.com/hay-kot/content/backend/ent/label"
|
"github.com/hay-kot/content/backend/ent/label"
|
||||||
|
@ -33,6 +34,7 @@ type GroupQuery struct {
|
||||||
withLocations *LocationQuery
|
withLocations *LocationQuery
|
||||||
withItems *ItemQuery
|
withItems *ItemQuery
|
||||||
withLabels *LabelQuery
|
withLabels *LabelQuery
|
||||||
|
withDocuments *DocumentQuery
|
||||||
// intermediate query (i.e. traversal path).
|
// intermediate query (i.e. traversal path).
|
||||||
sql *sql.Selector
|
sql *sql.Selector
|
||||||
path func(context.Context) (*sql.Selector, error)
|
path func(context.Context) (*sql.Selector, error)
|
||||||
|
@ -157,6 +159,28 @@ func (gq *GroupQuery) QueryLabels() *LabelQuery {
|
||||||
return query
|
return query
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// QueryDocuments chains the current query on the "documents" edge.
|
||||||
|
func (gq *GroupQuery) QueryDocuments() *DocumentQuery {
|
||||||
|
query := &DocumentQuery{config: gq.config}
|
||||||
|
query.path = func(ctx context.Context) (fromU *sql.Selector, err error) {
|
||||||
|
if err := gq.prepareQuery(ctx); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
selector := gq.sqlQuery(ctx)
|
||||||
|
if err := selector.Err(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
step := sqlgraph.NewStep(
|
||||||
|
sqlgraph.From(group.Table, group.FieldID, selector),
|
||||||
|
sqlgraph.To(document.Table, document.FieldID),
|
||||||
|
sqlgraph.Edge(sqlgraph.O2M, false, group.DocumentsTable, group.DocumentsColumn),
|
||||||
|
)
|
||||||
|
fromU = sqlgraph.SetNeighbors(gq.driver.Dialect(), step)
|
||||||
|
return fromU, nil
|
||||||
|
}
|
||||||
|
return query
|
||||||
|
}
|
||||||
|
|
||||||
// First returns the first Group entity from the query.
|
// First returns the first Group entity from the query.
|
||||||
// Returns a *NotFoundError when no Group was found.
|
// Returns a *NotFoundError when no Group was found.
|
||||||
func (gq *GroupQuery) First(ctx context.Context) (*Group, error) {
|
func (gq *GroupQuery) First(ctx context.Context) (*Group, error) {
|
||||||
|
@ -342,6 +366,7 @@ func (gq *GroupQuery) Clone() *GroupQuery {
|
||||||
withLocations: gq.withLocations.Clone(),
|
withLocations: gq.withLocations.Clone(),
|
||||||
withItems: gq.withItems.Clone(),
|
withItems: gq.withItems.Clone(),
|
||||||
withLabels: gq.withLabels.Clone(),
|
withLabels: gq.withLabels.Clone(),
|
||||||
|
withDocuments: gq.withDocuments.Clone(),
|
||||||
// clone intermediate query.
|
// clone intermediate query.
|
||||||
sql: gq.sql.Clone(),
|
sql: gq.sql.Clone(),
|
||||||
path: gq.path,
|
path: gq.path,
|
||||||
|
@ -393,6 +418,17 @@ func (gq *GroupQuery) WithLabels(opts ...func(*LabelQuery)) *GroupQuery {
|
||||||
return gq
|
return gq
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// WithDocuments tells the query-builder to eager-load the nodes that are connected to
|
||||||
|
// the "documents" edge. The optional arguments are used to configure the query builder of the edge.
|
||||||
|
func (gq *GroupQuery) WithDocuments(opts ...func(*DocumentQuery)) *GroupQuery {
|
||||||
|
query := &DocumentQuery{config: gq.config}
|
||||||
|
for _, opt := range opts {
|
||||||
|
opt(query)
|
||||||
|
}
|
||||||
|
gq.withDocuments = query
|
||||||
|
return gq
|
||||||
|
}
|
||||||
|
|
||||||
// GroupBy is used to group vertices by one or more fields/columns.
|
// GroupBy is used to group vertices by one or more fields/columns.
|
||||||
// It is often used with aggregate functions, like: count, max, mean, min, sum.
|
// It is often used with aggregate functions, like: count, max, mean, min, sum.
|
||||||
//
|
//
|
||||||
|
@ -461,11 +497,12 @@ func (gq *GroupQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*Group,
|
||||||
var (
|
var (
|
||||||
nodes = []*Group{}
|
nodes = []*Group{}
|
||||||
_spec = gq.querySpec()
|
_spec = gq.querySpec()
|
||||||
loadedTypes = [4]bool{
|
loadedTypes = [5]bool{
|
||||||
gq.withUsers != nil,
|
gq.withUsers != nil,
|
||||||
gq.withLocations != nil,
|
gq.withLocations != nil,
|
||||||
gq.withItems != nil,
|
gq.withItems != nil,
|
||||||
gq.withLabels != nil,
|
gq.withLabels != nil,
|
||||||
|
gq.withDocuments != nil,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
_spec.ScanValues = func(columns []string) ([]interface{}, error) {
|
_spec.ScanValues = func(columns []string) ([]interface{}, error) {
|
||||||
|
@ -514,6 +551,13 @@ func (gq *GroupQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*Group,
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if query := gq.withDocuments; query != nil {
|
||||||
|
if err := gq.loadDocuments(ctx, query, nodes,
|
||||||
|
func(n *Group) { n.Edges.Documents = []*Document{} },
|
||||||
|
func(n *Group, e *Document) { n.Edges.Documents = append(n.Edges.Documents, e) }); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
return nodes, nil
|
return nodes, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -641,6 +685,37 @@ func (gq *GroupQuery) loadLabels(ctx context.Context, query *LabelQuery, nodes [
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
func (gq *GroupQuery) loadDocuments(ctx context.Context, query *DocumentQuery, nodes []*Group, init func(*Group), assign func(*Group, *Document)) error {
|
||||||
|
fks := make([]driver.Value, 0, len(nodes))
|
||||||
|
nodeids := make(map[uuid.UUID]*Group)
|
||||||
|
for i := range nodes {
|
||||||
|
fks = append(fks, nodes[i].ID)
|
||||||
|
nodeids[nodes[i].ID] = nodes[i]
|
||||||
|
if init != nil {
|
||||||
|
init(nodes[i])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
query.withFKs = true
|
||||||
|
query.Where(predicate.Document(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.InValues(group.DocumentsColumn, fks...))
|
||||||
|
}))
|
||||||
|
neighbors, err := query.All(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
for _, n := range neighbors {
|
||||||
|
fk := n.group_documents
|
||||||
|
if fk == nil {
|
||||||
|
return fmt.Errorf(`foreign-key "group_documents" is nil for node %v`, n.ID)
|
||||||
|
}
|
||||||
|
node, ok := nodeids[*fk]
|
||||||
|
if !ok {
|
||||||
|
return fmt.Errorf(`unexpected foreign-key "group_documents" returned %v for node %v`, *fk, n.ID)
|
||||||
|
}
|
||||||
|
assign(node, n)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
func (gq *GroupQuery) sqlCount(ctx context.Context) (int, error) {
|
func (gq *GroupQuery) sqlCount(ctx context.Context) (int, error) {
|
||||||
_spec := gq.querySpec()
|
_spec := gq.querySpec()
|
||||||
|
|
|
@ -12,6 +12,7 @@ import (
|
||||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||||
"entgo.io/ent/schema/field"
|
"entgo.io/ent/schema/field"
|
||||||
"github.com/google/uuid"
|
"github.com/google/uuid"
|
||||||
|
"github.com/hay-kot/content/backend/ent/document"
|
||||||
"github.com/hay-kot/content/backend/ent/group"
|
"github.com/hay-kot/content/backend/ent/group"
|
||||||
"github.com/hay-kot/content/backend/ent/item"
|
"github.com/hay-kot/content/backend/ent/item"
|
||||||
"github.com/hay-kot/content/backend/ent/label"
|
"github.com/hay-kot/content/backend/ent/label"
|
||||||
|
@ -119,6 +120,21 @@ func (gu *GroupUpdate) AddLabels(l ...*Label) *GroupUpdate {
|
||||||
return gu.AddLabelIDs(ids...)
|
return gu.AddLabelIDs(ids...)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// AddDocumentIDs adds the "documents" edge to the Document entity by IDs.
|
||||||
|
func (gu *GroupUpdate) AddDocumentIDs(ids ...uuid.UUID) *GroupUpdate {
|
||||||
|
gu.mutation.AddDocumentIDs(ids...)
|
||||||
|
return gu
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddDocuments adds the "documents" edges to the Document entity.
|
||||||
|
func (gu *GroupUpdate) AddDocuments(d ...*Document) *GroupUpdate {
|
||||||
|
ids := make([]uuid.UUID, len(d))
|
||||||
|
for i := range d {
|
||||||
|
ids[i] = d[i].ID
|
||||||
|
}
|
||||||
|
return gu.AddDocumentIDs(ids...)
|
||||||
|
}
|
||||||
|
|
||||||
// Mutation returns the GroupMutation object of the builder.
|
// Mutation returns the GroupMutation object of the builder.
|
||||||
func (gu *GroupUpdate) Mutation() *GroupMutation {
|
func (gu *GroupUpdate) Mutation() *GroupMutation {
|
||||||
return gu.mutation
|
return gu.mutation
|
||||||
|
@ -208,6 +224,27 @@ func (gu *GroupUpdate) RemoveLabels(l ...*Label) *GroupUpdate {
|
||||||
return gu.RemoveLabelIDs(ids...)
|
return gu.RemoveLabelIDs(ids...)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ClearDocuments clears all "documents" edges to the Document entity.
|
||||||
|
func (gu *GroupUpdate) ClearDocuments() *GroupUpdate {
|
||||||
|
gu.mutation.ClearDocuments()
|
||||||
|
return gu
|
||||||
|
}
|
||||||
|
|
||||||
|
// RemoveDocumentIDs removes the "documents" edge to Document entities by IDs.
|
||||||
|
func (gu *GroupUpdate) RemoveDocumentIDs(ids ...uuid.UUID) *GroupUpdate {
|
||||||
|
gu.mutation.RemoveDocumentIDs(ids...)
|
||||||
|
return gu
|
||||||
|
}
|
||||||
|
|
||||||
|
// RemoveDocuments removes "documents" edges to Document entities.
|
||||||
|
func (gu *GroupUpdate) RemoveDocuments(d ...*Document) *GroupUpdate {
|
||||||
|
ids := make([]uuid.UUID, len(d))
|
||||||
|
for i := range d {
|
||||||
|
ids[i] = d[i].ID
|
||||||
|
}
|
||||||
|
return gu.RemoveDocumentIDs(ids...)
|
||||||
|
}
|
||||||
|
|
||||||
// Save executes the query and returns the number of nodes affected by the update operation.
|
// Save executes the query and returns the number of nodes affected by the update operation.
|
||||||
func (gu *GroupUpdate) Save(ctx context.Context) (int, error) {
|
func (gu *GroupUpdate) Save(ctx context.Context) (int, error) {
|
||||||
var (
|
var (
|
||||||
|
@ -547,6 +584,60 @@ func (gu *GroupUpdate) sqlSave(ctx context.Context) (n int, err error) {
|
||||||
}
|
}
|
||||||
_spec.Edges.Add = append(_spec.Edges.Add, edge)
|
_spec.Edges.Add = append(_spec.Edges.Add, edge)
|
||||||
}
|
}
|
||||||
|
if gu.mutation.DocumentsCleared() {
|
||||||
|
edge := &sqlgraph.EdgeSpec{
|
||||||
|
Rel: sqlgraph.O2M,
|
||||||
|
Inverse: false,
|
||||||
|
Table: group.DocumentsTable,
|
||||||
|
Columns: []string{group.DocumentsColumn},
|
||||||
|
Bidi: false,
|
||||||
|
Target: &sqlgraph.EdgeTarget{
|
||||||
|
IDSpec: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: document.FieldID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
_spec.Edges.Clear = append(_spec.Edges.Clear, edge)
|
||||||
|
}
|
||||||
|
if nodes := gu.mutation.RemovedDocumentsIDs(); len(nodes) > 0 && !gu.mutation.DocumentsCleared() {
|
||||||
|
edge := &sqlgraph.EdgeSpec{
|
||||||
|
Rel: sqlgraph.O2M,
|
||||||
|
Inverse: false,
|
||||||
|
Table: group.DocumentsTable,
|
||||||
|
Columns: []string{group.DocumentsColumn},
|
||||||
|
Bidi: false,
|
||||||
|
Target: &sqlgraph.EdgeTarget{
|
||||||
|
IDSpec: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: document.FieldID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, k := range nodes {
|
||||||
|
edge.Target.Nodes = append(edge.Target.Nodes, k)
|
||||||
|
}
|
||||||
|
_spec.Edges.Clear = append(_spec.Edges.Clear, edge)
|
||||||
|
}
|
||||||
|
if nodes := gu.mutation.DocumentsIDs(); len(nodes) > 0 {
|
||||||
|
edge := &sqlgraph.EdgeSpec{
|
||||||
|
Rel: sqlgraph.O2M,
|
||||||
|
Inverse: false,
|
||||||
|
Table: group.DocumentsTable,
|
||||||
|
Columns: []string{group.DocumentsColumn},
|
||||||
|
Bidi: false,
|
||||||
|
Target: &sqlgraph.EdgeTarget{
|
||||||
|
IDSpec: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: document.FieldID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, k := range nodes {
|
||||||
|
edge.Target.Nodes = append(edge.Target.Nodes, k)
|
||||||
|
}
|
||||||
|
_spec.Edges.Add = append(_spec.Edges.Add, edge)
|
||||||
|
}
|
||||||
if n, err = sqlgraph.UpdateNodes(ctx, gu.driver, _spec); err != nil {
|
if n, err = sqlgraph.UpdateNodes(ctx, gu.driver, _spec); err != nil {
|
||||||
if _, ok := err.(*sqlgraph.NotFoundError); ok {
|
if _, ok := err.(*sqlgraph.NotFoundError); ok {
|
||||||
err = &NotFoundError{group.Label}
|
err = &NotFoundError{group.Label}
|
||||||
|
@ -652,6 +743,21 @@ func (guo *GroupUpdateOne) AddLabels(l ...*Label) *GroupUpdateOne {
|
||||||
return guo.AddLabelIDs(ids...)
|
return guo.AddLabelIDs(ids...)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// AddDocumentIDs adds the "documents" edge to the Document entity by IDs.
|
||||||
|
func (guo *GroupUpdateOne) AddDocumentIDs(ids ...uuid.UUID) *GroupUpdateOne {
|
||||||
|
guo.mutation.AddDocumentIDs(ids...)
|
||||||
|
return guo
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddDocuments adds the "documents" edges to the Document entity.
|
||||||
|
func (guo *GroupUpdateOne) AddDocuments(d ...*Document) *GroupUpdateOne {
|
||||||
|
ids := make([]uuid.UUID, len(d))
|
||||||
|
for i := range d {
|
||||||
|
ids[i] = d[i].ID
|
||||||
|
}
|
||||||
|
return guo.AddDocumentIDs(ids...)
|
||||||
|
}
|
||||||
|
|
||||||
// Mutation returns the GroupMutation object of the builder.
|
// Mutation returns the GroupMutation object of the builder.
|
||||||
func (guo *GroupUpdateOne) Mutation() *GroupMutation {
|
func (guo *GroupUpdateOne) Mutation() *GroupMutation {
|
||||||
return guo.mutation
|
return guo.mutation
|
||||||
|
@ -741,6 +847,27 @@ func (guo *GroupUpdateOne) RemoveLabels(l ...*Label) *GroupUpdateOne {
|
||||||
return guo.RemoveLabelIDs(ids...)
|
return guo.RemoveLabelIDs(ids...)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ClearDocuments clears all "documents" edges to the Document entity.
|
||||||
|
func (guo *GroupUpdateOne) ClearDocuments() *GroupUpdateOne {
|
||||||
|
guo.mutation.ClearDocuments()
|
||||||
|
return guo
|
||||||
|
}
|
||||||
|
|
||||||
|
// RemoveDocumentIDs removes the "documents" edge to Document entities by IDs.
|
||||||
|
func (guo *GroupUpdateOne) RemoveDocumentIDs(ids ...uuid.UUID) *GroupUpdateOne {
|
||||||
|
guo.mutation.RemoveDocumentIDs(ids...)
|
||||||
|
return guo
|
||||||
|
}
|
||||||
|
|
||||||
|
// RemoveDocuments removes "documents" edges to Document entities.
|
||||||
|
func (guo *GroupUpdateOne) RemoveDocuments(d ...*Document) *GroupUpdateOne {
|
||||||
|
ids := make([]uuid.UUID, len(d))
|
||||||
|
for i := range d {
|
||||||
|
ids[i] = d[i].ID
|
||||||
|
}
|
||||||
|
return guo.RemoveDocumentIDs(ids...)
|
||||||
|
}
|
||||||
|
|
||||||
// Select allows selecting one or more fields (columns) of the returned entity.
|
// Select allows selecting one or more fields (columns) of the returned entity.
|
||||||
// The default is selecting all fields defined in the entity schema.
|
// The default is selecting all fields defined in the entity schema.
|
||||||
func (guo *GroupUpdateOne) Select(field string, fields ...string) *GroupUpdateOne {
|
func (guo *GroupUpdateOne) Select(field string, fields ...string) *GroupUpdateOne {
|
||||||
|
@ -1110,6 +1237,60 @@ func (guo *GroupUpdateOne) sqlSave(ctx context.Context) (_node *Group, err error
|
||||||
}
|
}
|
||||||
_spec.Edges.Add = append(_spec.Edges.Add, edge)
|
_spec.Edges.Add = append(_spec.Edges.Add, edge)
|
||||||
}
|
}
|
||||||
|
if guo.mutation.DocumentsCleared() {
|
||||||
|
edge := &sqlgraph.EdgeSpec{
|
||||||
|
Rel: sqlgraph.O2M,
|
||||||
|
Inverse: false,
|
||||||
|
Table: group.DocumentsTable,
|
||||||
|
Columns: []string{group.DocumentsColumn},
|
||||||
|
Bidi: false,
|
||||||
|
Target: &sqlgraph.EdgeTarget{
|
||||||
|
IDSpec: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: document.FieldID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
_spec.Edges.Clear = append(_spec.Edges.Clear, edge)
|
||||||
|
}
|
||||||
|
if nodes := guo.mutation.RemovedDocumentsIDs(); len(nodes) > 0 && !guo.mutation.DocumentsCleared() {
|
||||||
|
edge := &sqlgraph.EdgeSpec{
|
||||||
|
Rel: sqlgraph.O2M,
|
||||||
|
Inverse: false,
|
||||||
|
Table: group.DocumentsTable,
|
||||||
|
Columns: []string{group.DocumentsColumn},
|
||||||
|
Bidi: false,
|
||||||
|
Target: &sqlgraph.EdgeTarget{
|
||||||
|
IDSpec: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: document.FieldID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, k := range nodes {
|
||||||
|
edge.Target.Nodes = append(edge.Target.Nodes, k)
|
||||||
|
}
|
||||||
|
_spec.Edges.Clear = append(_spec.Edges.Clear, edge)
|
||||||
|
}
|
||||||
|
if nodes := guo.mutation.DocumentsIDs(); len(nodes) > 0 {
|
||||||
|
edge := &sqlgraph.EdgeSpec{
|
||||||
|
Rel: sqlgraph.O2M,
|
||||||
|
Inverse: false,
|
||||||
|
Table: group.DocumentsTable,
|
||||||
|
Columns: []string{group.DocumentsColumn},
|
||||||
|
Bidi: false,
|
||||||
|
Target: &sqlgraph.EdgeTarget{
|
||||||
|
IDSpec: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: document.FieldID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, k := range nodes {
|
||||||
|
edge.Target.Nodes = append(edge.Target.Nodes, k)
|
||||||
|
}
|
||||||
|
_spec.Edges.Add = append(_spec.Edges.Add, edge)
|
||||||
|
}
|
||||||
_node = &Group{config: guo.config}
|
_node = &Group{config: guo.config}
|
||||||
_spec.Assign = _node.assignValues
|
_spec.Assign = _node.assignValues
|
||||||
_spec.ScanValues = _node.scanValues
|
_spec.ScanValues = _node.scanValues
|
||||||
|
|
45
backend/ent/has_id.go
Normal file
45
backend/ent/has_id.go
Normal file
|
@ -0,0 +1,45 @@
|
||||||
|
// Code generated by ent, DO NOT EDIT.
|
||||||
|
|
||||||
|
package ent
|
||||||
|
|
||||||
|
import "github.com/google/uuid"
|
||||||
|
|
||||||
|
func (a *Attachment) GetID() uuid.UUID {
|
||||||
|
return a.ID
|
||||||
|
}
|
||||||
|
|
||||||
|
func (at *AuthTokens) GetID() uuid.UUID {
|
||||||
|
return at.ID
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Document) GetID() uuid.UUID {
|
||||||
|
return d.ID
|
||||||
|
}
|
||||||
|
|
||||||
|
func (dt *DocumentToken) GetID() uuid.UUID {
|
||||||
|
return dt.ID
|
||||||
|
}
|
||||||
|
|
||||||
|
func (gr *Group) GetID() uuid.UUID {
|
||||||
|
return gr.ID
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Item) GetID() uuid.UUID {
|
||||||
|
return i.ID
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_if *ItemField) GetID() uuid.UUID {
|
||||||
|
return _if.ID
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *Label) GetID() uuid.UUID {
|
||||||
|
return l.ID
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *Location) GetID() uuid.UUID {
|
||||||
|
return l.ID
|
||||||
|
}
|
||||||
|
|
||||||
|
func (u *User) GetID() uuid.UUID {
|
||||||
|
return u.ID
|
||||||
|
}
|
|
@ -9,6 +9,19 @@ import (
|
||||||
"github.com/hay-kot/content/backend/ent"
|
"github.com/hay-kot/content/backend/ent"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// The AttachmentFunc type is an adapter to allow the use of ordinary
|
||||||
|
// function as Attachment mutator.
|
||||||
|
type AttachmentFunc func(context.Context, *ent.AttachmentMutation) (ent.Value, error)
|
||||||
|
|
||||||
|
// Mutate calls f(ctx, m).
|
||||||
|
func (f AttachmentFunc) Mutate(ctx context.Context, m ent.Mutation) (ent.Value, error) {
|
||||||
|
mv, ok := m.(*ent.AttachmentMutation)
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("unexpected mutation type %T. expect *ent.AttachmentMutation", m)
|
||||||
|
}
|
||||||
|
return f(ctx, mv)
|
||||||
|
}
|
||||||
|
|
||||||
// The AuthTokensFunc type is an adapter to allow the use of ordinary
|
// The AuthTokensFunc type is an adapter to allow the use of ordinary
|
||||||
// function as AuthTokens mutator.
|
// function as AuthTokens mutator.
|
||||||
type AuthTokensFunc func(context.Context, *ent.AuthTokensMutation) (ent.Value, error)
|
type AuthTokensFunc func(context.Context, *ent.AuthTokensMutation) (ent.Value, error)
|
||||||
|
@ -22,6 +35,32 @@ func (f AuthTokensFunc) Mutate(ctx context.Context, m ent.Mutation) (ent.Value,
|
||||||
return f(ctx, mv)
|
return f(ctx, mv)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// The DocumentFunc type is an adapter to allow the use of ordinary
|
||||||
|
// function as Document mutator.
|
||||||
|
type DocumentFunc func(context.Context, *ent.DocumentMutation) (ent.Value, error)
|
||||||
|
|
||||||
|
// Mutate calls f(ctx, m).
|
||||||
|
func (f DocumentFunc) Mutate(ctx context.Context, m ent.Mutation) (ent.Value, error) {
|
||||||
|
mv, ok := m.(*ent.DocumentMutation)
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("unexpected mutation type %T. expect *ent.DocumentMutation", m)
|
||||||
|
}
|
||||||
|
return f(ctx, mv)
|
||||||
|
}
|
||||||
|
|
||||||
|
// The DocumentTokenFunc type is an adapter to allow the use of ordinary
|
||||||
|
// function as DocumentToken mutator.
|
||||||
|
type DocumentTokenFunc func(context.Context, *ent.DocumentTokenMutation) (ent.Value, error)
|
||||||
|
|
||||||
|
// Mutate calls f(ctx, m).
|
||||||
|
func (f DocumentTokenFunc) Mutate(ctx context.Context, m ent.Mutation) (ent.Value, error) {
|
||||||
|
mv, ok := m.(*ent.DocumentTokenMutation)
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("unexpected mutation type %T. expect *ent.DocumentTokenMutation", m)
|
||||||
|
}
|
||||||
|
return f(ctx, mv)
|
||||||
|
}
|
||||||
|
|
||||||
// The GroupFunc type is an adapter to allow the use of ordinary
|
// The GroupFunc type is an adapter to allow the use of ordinary
|
||||||
// function as Group mutator.
|
// function as Group mutator.
|
||||||
type GroupFunc func(context.Context, *ent.GroupMutation) (ent.Value, error)
|
type GroupFunc func(context.Context, *ent.GroupMutation) (ent.Value, error)
|
||||||
|
|
|
@ -29,6 +29,10 @@ type Item struct {
|
||||||
Description string `json:"description,omitempty"`
|
Description string `json:"description,omitempty"`
|
||||||
// Notes holds the value of the "notes" field.
|
// Notes holds the value of the "notes" field.
|
||||||
Notes string `json:"notes,omitempty"`
|
Notes string `json:"notes,omitempty"`
|
||||||
|
// Quantity holds the value of the "quantity" field.
|
||||||
|
Quantity int `json:"quantity,omitempty"`
|
||||||
|
// Insured holds the value of the "insured" field.
|
||||||
|
Insured bool `json:"insured,omitempty"`
|
||||||
// SerialNumber holds the value of the "serial_number" field.
|
// SerialNumber holds the value of the "serial_number" field.
|
||||||
SerialNumber string `json:"serial_number,omitempty"`
|
SerialNumber string `json:"serial_number,omitempty"`
|
||||||
// ModelNumber holds the value of the "model_number" field.
|
// ModelNumber holds the value of the "model_number" field.
|
||||||
|
@ -72,9 +76,11 @@ type ItemEdges struct {
|
||||||
Fields []*ItemField `json:"fields,omitempty"`
|
Fields []*ItemField `json:"fields,omitempty"`
|
||||||
// Label holds the value of the label edge.
|
// Label holds the value of the label edge.
|
||||||
Label []*Label `json:"label,omitempty"`
|
Label []*Label `json:"label,omitempty"`
|
||||||
|
// Attachments holds the value of the attachments edge.
|
||||||
|
Attachments []*Attachment `json:"attachments,omitempty"`
|
||||||
// loadedTypes holds the information for reporting if a
|
// loadedTypes holds the information for reporting if a
|
||||||
// type was loaded (or requested) in eager-loading or not.
|
// type was loaded (or requested) in eager-loading or not.
|
||||||
loadedTypes [4]bool
|
loadedTypes [5]bool
|
||||||
}
|
}
|
||||||
|
|
||||||
// GroupOrErr returns the Group value or an error if the edge
|
// GroupOrErr returns the Group value or an error if the edge
|
||||||
|
@ -121,15 +127,26 @@ func (e ItemEdges) LabelOrErr() ([]*Label, error) {
|
||||||
return nil, &NotLoadedError{edge: "label"}
|
return nil, &NotLoadedError{edge: "label"}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// AttachmentsOrErr returns the Attachments value or an error if the edge
|
||||||
|
// was not loaded in eager-loading.
|
||||||
|
func (e ItemEdges) AttachmentsOrErr() ([]*Attachment, error) {
|
||||||
|
if e.loadedTypes[4] {
|
||||||
|
return e.Attachments, nil
|
||||||
|
}
|
||||||
|
return nil, &NotLoadedError{edge: "attachments"}
|
||||||
|
}
|
||||||
|
|
||||||
// scanValues returns the types for scanning values from sql.Rows.
|
// scanValues returns the types for scanning values from sql.Rows.
|
||||||
func (*Item) scanValues(columns []string) ([]interface{}, error) {
|
func (*Item) scanValues(columns []string) ([]interface{}, error) {
|
||||||
values := make([]interface{}, len(columns))
|
values := make([]interface{}, len(columns))
|
||||||
for i := range columns {
|
for i := range columns {
|
||||||
switch columns[i] {
|
switch columns[i] {
|
||||||
case item.FieldLifetimeWarranty:
|
case item.FieldInsured, item.FieldLifetimeWarranty:
|
||||||
values[i] = new(sql.NullBool)
|
values[i] = new(sql.NullBool)
|
||||||
case item.FieldPurchasePrice, item.FieldSoldPrice:
|
case item.FieldPurchasePrice, item.FieldSoldPrice:
|
||||||
values[i] = new(sql.NullFloat64)
|
values[i] = new(sql.NullFloat64)
|
||||||
|
case item.FieldQuantity:
|
||||||
|
values[i] = new(sql.NullInt64)
|
||||||
case item.FieldName, item.FieldDescription, item.FieldNotes, item.FieldSerialNumber, item.FieldModelNumber, item.FieldManufacturer, item.FieldWarrantyDetails, item.FieldPurchaseFrom, item.FieldSoldTo, item.FieldSoldNotes:
|
case item.FieldName, item.FieldDescription, item.FieldNotes, item.FieldSerialNumber, item.FieldModelNumber, item.FieldManufacturer, item.FieldWarrantyDetails, item.FieldPurchaseFrom, item.FieldSoldTo, item.FieldSoldNotes:
|
||||||
values[i] = new(sql.NullString)
|
values[i] = new(sql.NullString)
|
||||||
case item.FieldCreatedAt, item.FieldUpdatedAt, item.FieldWarrantyExpires, item.FieldPurchaseTime, item.FieldSoldTime:
|
case item.FieldCreatedAt, item.FieldUpdatedAt, item.FieldWarrantyExpires, item.FieldPurchaseTime, item.FieldSoldTime:
|
||||||
|
@ -191,6 +208,18 @@ func (i *Item) assignValues(columns []string, values []interface{}) error {
|
||||||
} else if value.Valid {
|
} else if value.Valid {
|
||||||
i.Notes = value.String
|
i.Notes = value.String
|
||||||
}
|
}
|
||||||
|
case item.FieldQuantity:
|
||||||
|
if value, ok := values[j].(*sql.NullInt64); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field quantity", values[j])
|
||||||
|
} else if value.Valid {
|
||||||
|
i.Quantity = int(value.Int64)
|
||||||
|
}
|
||||||
|
case item.FieldInsured:
|
||||||
|
if value, ok := values[j].(*sql.NullBool); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field insured", values[j])
|
||||||
|
} else if value.Valid {
|
||||||
|
i.Insured = value.Bool
|
||||||
|
}
|
||||||
case item.FieldSerialNumber:
|
case item.FieldSerialNumber:
|
||||||
if value, ok := values[j].(*sql.NullString); !ok {
|
if value, ok := values[j].(*sql.NullString); !ok {
|
||||||
return fmt.Errorf("unexpected type %T for field serial_number", values[j])
|
return fmt.Errorf("unexpected type %T for field serial_number", values[j])
|
||||||
|
@ -308,6 +337,11 @@ func (i *Item) QueryLabel() *LabelQuery {
|
||||||
return (&ItemClient{config: i.config}).QueryLabel(i)
|
return (&ItemClient{config: i.config}).QueryLabel(i)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// QueryAttachments queries the "attachments" edge of the Item entity.
|
||||||
|
func (i *Item) QueryAttachments() *AttachmentQuery {
|
||||||
|
return (&ItemClient{config: i.config}).QueryAttachments(i)
|
||||||
|
}
|
||||||
|
|
||||||
// Update returns a builder for updating this Item.
|
// Update returns a builder for updating this Item.
|
||||||
// Note that you need to call Item.Unwrap() before calling this method if this Item
|
// Note that you need to call Item.Unwrap() before calling this method if this Item
|
||||||
// was returned from a transaction, and the transaction was committed or rolled back.
|
// was returned from a transaction, and the transaction was committed or rolled back.
|
||||||
|
@ -346,6 +380,12 @@ func (i *Item) String() string {
|
||||||
builder.WriteString("notes=")
|
builder.WriteString("notes=")
|
||||||
builder.WriteString(i.Notes)
|
builder.WriteString(i.Notes)
|
||||||
builder.WriteString(", ")
|
builder.WriteString(", ")
|
||||||
|
builder.WriteString("quantity=")
|
||||||
|
builder.WriteString(fmt.Sprintf("%v", i.Quantity))
|
||||||
|
builder.WriteString(", ")
|
||||||
|
builder.WriteString("insured=")
|
||||||
|
builder.WriteString(fmt.Sprintf("%v", i.Insured))
|
||||||
|
builder.WriteString(", ")
|
||||||
builder.WriteString("serial_number=")
|
builder.WriteString("serial_number=")
|
||||||
builder.WriteString(i.SerialNumber)
|
builder.WriteString(i.SerialNumber)
|
||||||
builder.WriteString(", ")
|
builder.WriteString(", ")
|
||||||
|
|
|
@ -23,6 +23,10 @@ const (
|
||||||
FieldDescription = "description"
|
FieldDescription = "description"
|
||||||
// FieldNotes holds the string denoting the notes field in the database.
|
// FieldNotes holds the string denoting the notes field in the database.
|
||||||
FieldNotes = "notes"
|
FieldNotes = "notes"
|
||||||
|
// FieldQuantity holds the string denoting the quantity field in the database.
|
||||||
|
FieldQuantity = "quantity"
|
||||||
|
// FieldInsured holds the string denoting the insured field in the database.
|
||||||
|
FieldInsured = "insured"
|
||||||
// FieldSerialNumber holds the string denoting the serial_number field in the database.
|
// FieldSerialNumber holds the string denoting the serial_number field in the database.
|
||||||
FieldSerialNumber = "serial_number"
|
FieldSerialNumber = "serial_number"
|
||||||
// FieldModelNumber holds the string denoting the model_number field in the database.
|
// FieldModelNumber holds the string denoting the model_number field in the database.
|
||||||
|
@ -57,6 +61,8 @@ const (
|
||||||
EdgeFields = "fields"
|
EdgeFields = "fields"
|
||||||
// EdgeLabel holds the string denoting the label edge name in mutations.
|
// EdgeLabel holds the string denoting the label edge name in mutations.
|
||||||
EdgeLabel = "label"
|
EdgeLabel = "label"
|
||||||
|
// EdgeAttachments holds the string denoting the attachments edge name in mutations.
|
||||||
|
EdgeAttachments = "attachments"
|
||||||
// Table holds the table name of the item in the database.
|
// Table holds the table name of the item in the database.
|
||||||
Table = "items"
|
Table = "items"
|
||||||
// GroupTable is the table that holds the group relation/edge.
|
// GroupTable is the table that holds the group relation/edge.
|
||||||
|
@ -85,6 +91,13 @@ const (
|
||||||
// LabelInverseTable is the table name for the Label entity.
|
// LabelInverseTable is the table name for the Label entity.
|
||||||
// It exists in this package in order to avoid circular dependency with the "label" package.
|
// It exists in this package in order to avoid circular dependency with the "label" package.
|
||||||
LabelInverseTable = "labels"
|
LabelInverseTable = "labels"
|
||||||
|
// AttachmentsTable is the table that holds the attachments relation/edge.
|
||||||
|
AttachmentsTable = "attachments"
|
||||||
|
// AttachmentsInverseTable is the table name for the Attachment entity.
|
||||||
|
// It exists in this package in order to avoid circular dependency with the "attachment" package.
|
||||||
|
AttachmentsInverseTable = "attachments"
|
||||||
|
// AttachmentsColumn is the table column denoting the attachments relation/edge.
|
||||||
|
AttachmentsColumn = "item_attachments"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Columns holds all SQL columns for item fields.
|
// Columns holds all SQL columns for item fields.
|
||||||
|
@ -95,6 +108,8 @@ var Columns = []string{
|
||||||
FieldName,
|
FieldName,
|
||||||
FieldDescription,
|
FieldDescription,
|
||||||
FieldNotes,
|
FieldNotes,
|
||||||
|
FieldQuantity,
|
||||||
|
FieldInsured,
|
||||||
FieldSerialNumber,
|
FieldSerialNumber,
|
||||||
FieldModelNumber,
|
FieldModelNumber,
|
||||||
FieldManufacturer,
|
FieldManufacturer,
|
||||||
|
@ -151,6 +166,10 @@ var (
|
||||||
DescriptionValidator func(string) error
|
DescriptionValidator func(string) error
|
||||||
// NotesValidator is a validator for the "notes" field. It is called by the builders before save.
|
// NotesValidator is a validator for the "notes" field. It is called by the builders before save.
|
||||||
NotesValidator func(string) error
|
NotesValidator func(string) error
|
||||||
|
// DefaultQuantity holds the default value on creation for the "quantity" field.
|
||||||
|
DefaultQuantity int
|
||||||
|
// DefaultInsured holds the default value on creation for the "insured" field.
|
||||||
|
DefaultInsured bool
|
||||||
// SerialNumberValidator is a validator for the "serial_number" field. It is called by the builders before save.
|
// SerialNumberValidator is a validator for the "serial_number" field. It is called by the builders before save.
|
||||||
SerialNumberValidator func(string) error
|
SerialNumberValidator func(string) error
|
||||||
// ModelNumberValidator is a validator for the "model_number" field. It is called by the builders before save.
|
// ModelNumberValidator is a validator for the "model_number" field. It is called by the builders before save.
|
||||||
|
|
|
@ -117,6 +117,20 @@ func Notes(v string) predicate.Item {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Quantity applies equality check predicate on the "quantity" field. It's identical to QuantityEQ.
|
||||||
|
func Quantity(v int) predicate.Item {
|
||||||
|
return predicate.Item(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.EQ(s.C(FieldQuantity), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Insured applies equality check predicate on the "insured" field. It's identical to InsuredEQ.
|
||||||
|
func Insured(v bool) predicate.Item {
|
||||||
|
return predicate.Item(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.EQ(s.C(FieldInsured), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
// SerialNumber applies equality check predicate on the "serial_number" field. It's identical to SerialNumberEQ.
|
// SerialNumber applies equality check predicate on the "serial_number" field. It's identical to SerialNumberEQ.
|
||||||
func SerialNumber(v string) predicate.Item {
|
func SerialNumber(v string) predicate.Item {
|
||||||
return predicate.Item(func(s *sql.Selector) {
|
return predicate.Item(func(s *sql.Selector) {
|
||||||
|
@ -661,6 +675,84 @@ func NotesContainsFold(v string) predicate.Item {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// QuantityEQ applies the EQ predicate on the "quantity" field.
|
||||||
|
func QuantityEQ(v int) predicate.Item {
|
||||||
|
return predicate.Item(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.EQ(s.C(FieldQuantity), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// QuantityNEQ applies the NEQ predicate on the "quantity" field.
|
||||||
|
func QuantityNEQ(v int) predicate.Item {
|
||||||
|
return predicate.Item(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.NEQ(s.C(FieldQuantity), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// QuantityIn applies the In predicate on the "quantity" field.
|
||||||
|
func QuantityIn(vs ...int) predicate.Item {
|
||||||
|
v := make([]interface{}, len(vs))
|
||||||
|
for i := range v {
|
||||||
|
v[i] = vs[i]
|
||||||
|
}
|
||||||
|
return predicate.Item(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.In(s.C(FieldQuantity), v...))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// QuantityNotIn applies the NotIn predicate on the "quantity" field.
|
||||||
|
func QuantityNotIn(vs ...int) predicate.Item {
|
||||||
|
v := make([]interface{}, len(vs))
|
||||||
|
for i := range v {
|
||||||
|
v[i] = vs[i]
|
||||||
|
}
|
||||||
|
return predicate.Item(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.NotIn(s.C(FieldQuantity), v...))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// QuantityGT applies the GT predicate on the "quantity" field.
|
||||||
|
func QuantityGT(v int) predicate.Item {
|
||||||
|
return predicate.Item(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.GT(s.C(FieldQuantity), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// QuantityGTE applies the GTE predicate on the "quantity" field.
|
||||||
|
func QuantityGTE(v int) predicate.Item {
|
||||||
|
return predicate.Item(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.GTE(s.C(FieldQuantity), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// QuantityLT applies the LT predicate on the "quantity" field.
|
||||||
|
func QuantityLT(v int) predicate.Item {
|
||||||
|
return predicate.Item(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.LT(s.C(FieldQuantity), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// QuantityLTE applies the LTE predicate on the "quantity" field.
|
||||||
|
func QuantityLTE(v int) predicate.Item {
|
||||||
|
return predicate.Item(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.LTE(s.C(FieldQuantity), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// InsuredEQ applies the EQ predicate on the "insured" field.
|
||||||
|
func InsuredEQ(v bool) predicate.Item {
|
||||||
|
return predicate.Item(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.EQ(s.C(FieldInsured), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// InsuredNEQ applies the NEQ predicate on the "insured" field.
|
||||||
|
func InsuredNEQ(v bool) predicate.Item {
|
||||||
|
return predicate.Item(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.NEQ(s.C(FieldInsured), v))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
// SerialNumberEQ applies the EQ predicate on the "serial_number" field.
|
// SerialNumberEQ applies the EQ predicate on the "serial_number" field.
|
||||||
func SerialNumberEQ(v string) predicate.Item {
|
func SerialNumberEQ(v string) predicate.Item {
|
||||||
return predicate.Item(func(s *sql.Selector) {
|
return predicate.Item(func(s *sql.Selector) {
|
||||||
|
@ -1940,6 +2032,34 @@ func HasLabelWith(preds ...predicate.Label) predicate.Item {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// HasAttachments applies the HasEdge predicate on the "attachments" edge.
|
||||||
|
func HasAttachments() predicate.Item {
|
||||||
|
return predicate.Item(func(s *sql.Selector) {
|
||||||
|
step := sqlgraph.NewStep(
|
||||||
|
sqlgraph.From(Table, FieldID),
|
||||||
|
sqlgraph.To(AttachmentsTable, FieldID),
|
||||||
|
sqlgraph.Edge(sqlgraph.O2M, false, AttachmentsTable, AttachmentsColumn),
|
||||||
|
)
|
||||||
|
sqlgraph.HasNeighbors(s, step)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// HasAttachmentsWith applies the HasEdge predicate on the "attachments" edge with a given conditions (other predicates).
|
||||||
|
func HasAttachmentsWith(preds ...predicate.Attachment) predicate.Item {
|
||||||
|
return predicate.Item(func(s *sql.Selector) {
|
||||||
|
step := sqlgraph.NewStep(
|
||||||
|
sqlgraph.From(Table, FieldID),
|
||||||
|
sqlgraph.To(AttachmentsInverseTable, FieldID),
|
||||||
|
sqlgraph.Edge(sqlgraph.O2M, false, AttachmentsTable, AttachmentsColumn),
|
||||||
|
)
|
||||||
|
sqlgraph.HasNeighborsWith(s, step, func(s *sql.Selector) {
|
||||||
|
for _, p := range preds {
|
||||||
|
p(s)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
// And groups predicates with the AND operator between them.
|
// And groups predicates with the AND operator between them.
|
||||||
func And(predicates ...predicate.Item) predicate.Item {
|
func And(predicates ...predicate.Item) predicate.Item {
|
||||||
return predicate.Item(func(s *sql.Selector) {
|
return predicate.Item(func(s *sql.Selector) {
|
||||||
|
|
|
@ -11,6 +11,7 @@ import (
|
||||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||||
"entgo.io/ent/schema/field"
|
"entgo.io/ent/schema/field"
|
||||||
"github.com/google/uuid"
|
"github.com/google/uuid"
|
||||||
|
"github.com/hay-kot/content/backend/ent/attachment"
|
||||||
"github.com/hay-kot/content/backend/ent/group"
|
"github.com/hay-kot/content/backend/ent/group"
|
||||||
"github.com/hay-kot/content/backend/ent/item"
|
"github.com/hay-kot/content/backend/ent/item"
|
||||||
"github.com/hay-kot/content/backend/ent/itemfield"
|
"github.com/hay-kot/content/backend/ent/itemfield"
|
||||||
|
@ -87,6 +88,34 @@ func (ic *ItemCreate) SetNillableNotes(s *string) *ItemCreate {
|
||||||
return ic
|
return ic
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SetQuantity sets the "quantity" field.
|
||||||
|
func (ic *ItemCreate) SetQuantity(i int) *ItemCreate {
|
||||||
|
ic.mutation.SetQuantity(i)
|
||||||
|
return ic
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableQuantity sets the "quantity" field if the given value is not nil.
|
||||||
|
func (ic *ItemCreate) SetNillableQuantity(i *int) *ItemCreate {
|
||||||
|
if i != nil {
|
||||||
|
ic.SetQuantity(*i)
|
||||||
|
}
|
||||||
|
return ic
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetInsured sets the "insured" field.
|
||||||
|
func (ic *ItemCreate) SetInsured(b bool) *ItemCreate {
|
||||||
|
ic.mutation.SetInsured(b)
|
||||||
|
return ic
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableInsured sets the "insured" field if the given value is not nil.
|
||||||
|
func (ic *ItemCreate) SetNillableInsured(b *bool) *ItemCreate {
|
||||||
|
if b != nil {
|
||||||
|
ic.SetInsured(*b)
|
||||||
|
}
|
||||||
|
return ic
|
||||||
|
}
|
||||||
|
|
||||||
// SetSerialNumber sets the "serial_number" field.
|
// SetSerialNumber sets the "serial_number" field.
|
||||||
func (ic *ItemCreate) SetSerialNumber(s string) *ItemCreate {
|
func (ic *ItemCreate) SetSerialNumber(s string) *ItemCreate {
|
||||||
ic.mutation.SetSerialNumber(s)
|
ic.mutation.SetSerialNumber(s)
|
||||||
|
@ -343,6 +372,21 @@ func (ic *ItemCreate) AddLabel(l ...*Label) *ItemCreate {
|
||||||
return ic.AddLabelIDs(ids...)
|
return ic.AddLabelIDs(ids...)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// AddAttachmentIDs adds the "attachments" edge to the Attachment entity by IDs.
|
||||||
|
func (ic *ItemCreate) AddAttachmentIDs(ids ...uuid.UUID) *ItemCreate {
|
||||||
|
ic.mutation.AddAttachmentIDs(ids...)
|
||||||
|
return ic
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddAttachments adds the "attachments" edges to the Attachment entity.
|
||||||
|
func (ic *ItemCreate) AddAttachments(a ...*Attachment) *ItemCreate {
|
||||||
|
ids := make([]uuid.UUID, len(a))
|
||||||
|
for i := range a {
|
||||||
|
ids[i] = a[i].ID
|
||||||
|
}
|
||||||
|
return ic.AddAttachmentIDs(ids...)
|
||||||
|
}
|
||||||
|
|
||||||
// Mutation returns the ItemMutation object of the builder.
|
// Mutation returns the ItemMutation object of the builder.
|
||||||
func (ic *ItemCreate) Mutation() *ItemMutation {
|
func (ic *ItemCreate) Mutation() *ItemMutation {
|
||||||
return ic.mutation
|
return ic.mutation
|
||||||
|
@ -428,6 +472,14 @@ func (ic *ItemCreate) defaults() {
|
||||||
v := item.DefaultUpdatedAt()
|
v := item.DefaultUpdatedAt()
|
||||||
ic.mutation.SetUpdatedAt(v)
|
ic.mutation.SetUpdatedAt(v)
|
||||||
}
|
}
|
||||||
|
if _, ok := ic.mutation.Quantity(); !ok {
|
||||||
|
v := item.DefaultQuantity
|
||||||
|
ic.mutation.SetQuantity(v)
|
||||||
|
}
|
||||||
|
if _, ok := ic.mutation.Insured(); !ok {
|
||||||
|
v := item.DefaultInsured
|
||||||
|
ic.mutation.SetInsured(v)
|
||||||
|
}
|
||||||
if _, ok := ic.mutation.LifetimeWarranty(); !ok {
|
if _, ok := ic.mutation.LifetimeWarranty(); !ok {
|
||||||
v := item.DefaultLifetimeWarranty
|
v := item.DefaultLifetimeWarranty
|
||||||
ic.mutation.SetLifetimeWarranty(v)
|
ic.mutation.SetLifetimeWarranty(v)
|
||||||
|
@ -472,6 +524,12 @@ func (ic *ItemCreate) check() error {
|
||||||
return &ValidationError{Name: "notes", err: fmt.Errorf(`ent: validator failed for field "Item.notes": %w`, err)}
|
return &ValidationError{Name: "notes", err: fmt.Errorf(`ent: validator failed for field "Item.notes": %w`, err)}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if _, ok := ic.mutation.Quantity(); !ok {
|
||||||
|
return &ValidationError{Name: "quantity", err: errors.New(`ent: missing required field "Item.quantity"`)}
|
||||||
|
}
|
||||||
|
if _, ok := ic.mutation.Insured(); !ok {
|
||||||
|
return &ValidationError{Name: "insured", err: errors.New(`ent: missing required field "Item.insured"`)}
|
||||||
|
}
|
||||||
if v, ok := ic.mutation.SerialNumber(); ok {
|
if v, ok := ic.mutation.SerialNumber(); ok {
|
||||||
if err := item.SerialNumberValidator(v); err != nil {
|
if err := item.SerialNumberValidator(v); err != nil {
|
||||||
return &ValidationError{Name: "serial_number", err: fmt.Errorf(`ent: validator failed for field "Item.serial_number": %w`, err)}
|
return &ValidationError{Name: "serial_number", err: fmt.Errorf(`ent: validator failed for field "Item.serial_number": %w`, err)}
|
||||||
|
@ -585,6 +643,22 @@ func (ic *ItemCreate) createSpec() (*Item, *sqlgraph.CreateSpec) {
|
||||||
})
|
})
|
||||||
_node.Notes = value
|
_node.Notes = value
|
||||||
}
|
}
|
||||||
|
if value, ok := ic.mutation.Quantity(); ok {
|
||||||
|
_spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeInt,
|
||||||
|
Value: value,
|
||||||
|
Column: item.FieldQuantity,
|
||||||
|
})
|
||||||
|
_node.Quantity = value
|
||||||
|
}
|
||||||
|
if value, ok := ic.mutation.Insured(); ok {
|
||||||
|
_spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeBool,
|
||||||
|
Value: value,
|
||||||
|
Column: item.FieldInsured,
|
||||||
|
})
|
||||||
|
_node.Insured = value
|
||||||
|
}
|
||||||
if value, ok := ic.mutation.SerialNumber(); ok {
|
if value, ok := ic.mutation.SerialNumber(); ok {
|
||||||
_spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{
|
_spec.Fields = append(_spec.Fields, &sqlgraph.FieldSpec{
|
||||||
Type: field.TypeString,
|
Type: field.TypeString,
|
||||||
|
@ -767,6 +841,25 @@ func (ic *ItemCreate) createSpec() (*Item, *sqlgraph.CreateSpec) {
|
||||||
}
|
}
|
||||||
_spec.Edges = append(_spec.Edges, edge)
|
_spec.Edges = append(_spec.Edges, edge)
|
||||||
}
|
}
|
||||||
|
if nodes := ic.mutation.AttachmentsIDs(); len(nodes) > 0 {
|
||||||
|
edge := &sqlgraph.EdgeSpec{
|
||||||
|
Rel: sqlgraph.O2M,
|
||||||
|
Inverse: false,
|
||||||
|
Table: item.AttachmentsTable,
|
||||||
|
Columns: []string{item.AttachmentsColumn},
|
||||||
|
Bidi: false,
|
||||||
|
Target: &sqlgraph.EdgeTarget{
|
||||||
|
IDSpec: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: attachment.FieldID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, k := range nodes {
|
||||||
|
edge.Target.Nodes = append(edge.Target.Nodes, k)
|
||||||
|
}
|
||||||
|
_spec.Edges = append(_spec.Edges, edge)
|
||||||
|
}
|
||||||
return _node, _spec
|
return _node, _spec
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -12,6 +12,7 @@ import (
|
||||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||||
"entgo.io/ent/schema/field"
|
"entgo.io/ent/schema/field"
|
||||||
"github.com/google/uuid"
|
"github.com/google/uuid"
|
||||||
|
"github.com/hay-kot/content/backend/ent/attachment"
|
||||||
"github.com/hay-kot/content/backend/ent/group"
|
"github.com/hay-kot/content/backend/ent/group"
|
||||||
"github.com/hay-kot/content/backend/ent/item"
|
"github.com/hay-kot/content/backend/ent/item"
|
||||||
"github.com/hay-kot/content/backend/ent/itemfield"
|
"github.com/hay-kot/content/backend/ent/itemfield"
|
||||||
|
@ -23,17 +24,18 @@ import (
|
||||||
// ItemQuery is the builder for querying Item entities.
|
// ItemQuery is the builder for querying Item entities.
|
||||||
type ItemQuery struct {
|
type ItemQuery struct {
|
||||||
config
|
config
|
||||||
limit *int
|
limit *int
|
||||||
offset *int
|
offset *int
|
||||||
unique *bool
|
unique *bool
|
||||||
order []OrderFunc
|
order []OrderFunc
|
||||||
fields []string
|
fields []string
|
||||||
predicates []predicate.Item
|
predicates []predicate.Item
|
||||||
withGroup *GroupQuery
|
withGroup *GroupQuery
|
||||||
withLocation *LocationQuery
|
withLocation *LocationQuery
|
||||||
withFields *ItemFieldQuery
|
withFields *ItemFieldQuery
|
||||||
withLabel *LabelQuery
|
withLabel *LabelQuery
|
||||||
withFKs bool
|
withAttachments *AttachmentQuery
|
||||||
|
withFKs bool
|
||||||
// intermediate query (i.e. traversal path).
|
// intermediate query (i.e. traversal path).
|
||||||
sql *sql.Selector
|
sql *sql.Selector
|
||||||
path func(context.Context) (*sql.Selector, error)
|
path func(context.Context) (*sql.Selector, error)
|
||||||
|
@ -158,6 +160,28 @@ func (iq *ItemQuery) QueryLabel() *LabelQuery {
|
||||||
return query
|
return query
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// QueryAttachments chains the current query on the "attachments" edge.
|
||||||
|
func (iq *ItemQuery) QueryAttachments() *AttachmentQuery {
|
||||||
|
query := &AttachmentQuery{config: iq.config}
|
||||||
|
query.path = func(ctx context.Context) (fromU *sql.Selector, err error) {
|
||||||
|
if err := iq.prepareQuery(ctx); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
selector := iq.sqlQuery(ctx)
|
||||||
|
if err := selector.Err(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
step := sqlgraph.NewStep(
|
||||||
|
sqlgraph.From(item.Table, item.FieldID, selector),
|
||||||
|
sqlgraph.To(attachment.Table, attachment.FieldID),
|
||||||
|
sqlgraph.Edge(sqlgraph.O2M, false, item.AttachmentsTable, item.AttachmentsColumn),
|
||||||
|
)
|
||||||
|
fromU = sqlgraph.SetNeighbors(iq.driver.Dialect(), step)
|
||||||
|
return fromU, nil
|
||||||
|
}
|
||||||
|
return query
|
||||||
|
}
|
||||||
|
|
||||||
// First returns the first Item entity from the query.
|
// First returns the first Item entity from the query.
|
||||||
// Returns a *NotFoundError when no Item was found.
|
// Returns a *NotFoundError when no Item was found.
|
||||||
func (iq *ItemQuery) First(ctx context.Context) (*Item, error) {
|
func (iq *ItemQuery) First(ctx context.Context) (*Item, error) {
|
||||||
|
@ -334,15 +358,16 @@ func (iq *ItemQuery) Clone() *ItemQuery {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
return &ItemQuery{
|
return &ItemQuery{
|
||||||
config: iq.config,
|
config: iq.config,
|
||||||
limit: iq.limit,
|
limit: iq.limit,
|
||||||
offset: iq.offset,
|
offset: iq.offset,
|
||||||
order: append([]OrderFunc{}, iq.order...),
|
order: append([]OrderFunc{}, iq.order...),
|
||||||
predicates: append([]predicate.Item{}, iq.predicates...),
|
predicates: append([]predicate.Item{}, iq.predicates...),
|
||||||
withGroup: iq.withGroup.Clone(),
|
withGroup: iq.withGroup.Clone(),
|
||||||
withLocation: iq.withLocation.Clone(),
|
withLocation: iq.withLocation.Clone(),
|
||||||
withFields: iq.withFields.Clone(),
|
withFields: iq.withFields.Clone(),
|
||||||
withLabel: iq.withLabel.Clone(),
|
withLabel: iq.withLabel.Clone(),
|
||||||
|
withAttachments: iq.withAttachments.Clone(),
|
||||||
// clone intermediate query.
|
// clone intermediate query.
|
||||||
sql: iq.sql.Clone(),
|
sql: iq.sql.Clone(),
|
||||||
path: iq.path,
|
path: iq.path,
|
||||||
|
@ -394,6 +419,17 @@ func (iq *ItemQuery) WithLabel(opts ...func(*LabelQuery)) *ItemQuery {
|
||||||
return iq
|
return iq
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// WithAttachments tells the query-builder to eager-load the nodes that are connected to
|
||||||
|
// the "attachments" edge. The optional arguments are used to configure the query builder of the edge.
|
||||||
|
func (iq *ItemQuery) WithAttachments(opts ...func(*AttachmentQuery)) *ItemQuery {
|
||||||
|
query := &AttachmentQuery{config: iq.config}
|
||||||
|
for _, opt := range opts {
|
||||||
|
opt(query)
|
||||||
|
}
|
||||||
|
iq.withAttachments = query
|
||||||
|
return iq
|
||||||
|
}
|
||||||
|
|
||||||
// GroupBy is used to group vertices by one or more fields/columns.
|
// GroupBy is used to group vertices by one or more fields/columns.
|
||||||
// It is often used with aggregate functions, like: count, max, mean, min, sum.
|
// It is often used with aggregate functions, like: count, max, mean, min, sum.
|
||||||
//
|
//
|
||||||
|
@ -463,11 +499,12 @@ func (iq *ItemQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*Item, e
|
||||||
nodes = []*Item{}
|
nodes = []*Item{}
|
||||||
withFKs = iq.withFKs
|
withFKs = iq.withFKs
|
||||||
_spec = iq.querySpec()
|
_spec = iq.querySpec()
|
||||||
loadedTypes = [4]bool{
|
loadedTypes = [5]bool{
|
||||||
iq.withGroup != nil,
|
iq.withGroup != nil,
|
||||||
iq.withLocation != nil,
|
iq.withLocation != nil,
|
||||||
iq.withFields != nil,
|
iq.withFields != nil,
|
||||||
iq.withLabel != nil,
|
iq.withLabel != nil,
|
||||||
|
iq.withAttachments != nil,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
if iq.withGroup != nil || iq.withLocation != nil {
|
if iq.withGroup != nil || iq.withLocation != nil {
|
||||||
|
@ -520,6 +557,13 @@ func (iq *ItemQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*Item, e
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if query := iq.withAttachments; query != nil {
|
||||||
|
if err := iq.loadAttachments(ctx, query, nodes,
|
||||||
|
func(n *Item) { n.Edges.Attachments = []*Attachment{} },
|
||||||
|
func(n *Item, e *Attachment) { n.Edges.Attachments = append(n.Edges.Attachments, e) }); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
return nodes, nil
|
return nodes, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -670,6 +714,37 @@ func (iq *ItemQuery) loadLabel(ctx context.Context, query *LabelQuery, nodes []*
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
func (iq *ItemQuery) loadAttachments(ctx context.Context, query *AttachmentQuery, nodes []*Item, init func(*Item), assign func(*Item, *Attachment)) error {
|
||||||
|
fks := make([]driver.Value, 0, len(nodes))
|
||||||
|
nodeids := make(map[uuid.UUID]*Item)
|
||||||
|
for i := range nodes {
|
||||||
|
fks = append(fks, nodes[i].ID)
|
||||||
|
nodeids[nodes[i].ID] = nodes[i]
|
||||||
|
if init != nil {
|
||||||
|
init(nodes[i])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
query.withFKs = true
|
||||||
|
query.Where(predicate.Attachment(func(s *sql.Selector) {
|
||||||
|
s.Where(sql.InValues(item.AttachmentsColumn, fks...))
|
||||||
|
}))
|
||||||
|
neighbors, err := query.All(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
for _, n := range neighbors {
|
||||||
|
fk := n.item_attachments
|
||||||
|
if fk == nil {
|
||||||
|
return fmt.Errorf(`foreign-key "item_attachments" is nil for node %v`, n.ID)
|
||||||
|
}
|
||||||
|
node, ok := nodeids[*fk]
|
||||||
|
if !ok {
|
||||||
|
return fmt.Errorf(`unexpected foreign-key "item_attachments" returned %v for node %v`, *fk, n.ID)
|
||||||
|
}
|
||||||
|
assign(node, n)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
func (iq *ItemQuery) sqlCount(ctx context.Context) (int, error) {
|
func (iq *ItemQuery) sqlCount(ctx context.Context) (int, error) {
|
||||||
_spec := iq.querySpec()
|
_spec := iq.querySpec()
|
||||||
|
|
|
@ -12,6 +12,7 @@ import (
|
||||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||||
"entgo.io/ent/schema/field"
|
"entgo.io/ent/schema/field"
|
||||||
"github.com/google/uuid"
|
"github.com/google/uuid"
|
||||||
|
"github.com/hay-kot/content/backend/ent/attachment"
|
||||||
"github.com/hay-kot/content/backend/ent/group"
|
"github.com/hay-kot/content/backend/ent/group"
|
||||||
"github.com/hay-kot/content/backend/ent/item"
|
"github.com/hay-kot/content/backend/ent/item"
|
||||||
"github.com/hay-kot/content/backend/ent/itemfield"
|
"github.com/hay-kot/content/backend/ent/itemfield"
|
||||||
|
@ -85,6 +86,41 @@ func (iu *ItemUpdate) ClearNotes() *ItemUpdate {
|
||||||
return iu
|
return iu
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SetQuantity sets the "quantity" field.
|
||||||
|
func (iu *ItemUpdate) SetQuantity(i int) *ItemUpdate {
|
||||||
|
iu.mutation.ResetQuantity()
|
||||||
|
iu.mutation.SetQuantity(i)
|
||||||
|
return iu
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableQuantity sets the "quantity" field if the given value is not nil.
|
||||||
|
func (iu *ItemUpdate) SetNillableQuantity(i *int) *ItemUpdate {
|
||||||
|
if i != nil {
|
||||||
|
iu.SetQuantity(*i)
|
||||||
|
}
|
||||||
|
return iu
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddQuantity adds i to the "quantity" field.
|
||||||
|
func (iu *ItemUpdate) AddQuantity(i int) *ItemUpdate {
|
||||||
|
iu.mutation.AddQuantity(i)
|
||||||
|
return iu
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetInsured sets the "insured" field.
|
||||||
|
func (iu *ItemUpdate) SetInsured(b bool) *ItemUpdate {
|
||||||
|
iu.mutation.SetInsured(b)
|
||||||
|
return iu
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableInsured sets the "insured" field if the given value is not nil.
|
||||||
|
func (iu *ItemUpdate) SetNillableInsured(b *bool) *ItemUpdate {
|
||||||
|
if b != nil {
|
||||||
|
iu.SetInsured(*b)
|
||||||
|
}
|
||||||
|
return iu
|
||||||
|
}
|
||||||
|
|
||||||
// SetSerialNumber sets the "serial_number" field.
|
// SetSerialNumber sets the "serial_number" field.
|
||||||
func (iu *ItemUpdate) SetSerialNumber(s string) *ItemUpdate {
|
func (iu *ItemUpdate) SetSerialNumber(s string) *ItemUpdate {
|
||||||
iu.mutation.SetSerialNumber(s)
|
iu.mutation.SetSerialNumber(s)
|
||||||
|
@ -401,6 +437,21 @@ func (iu *ItemUpdate) AddLabel(l ...*Label) *ItemUpdate {
|
||||||
return iu.AddLabelIDs(ids...)
|
return iu.AddLabelIDs(ids...)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// AddAttachmentIDs adds the "attachments" edge to the Attachment entity by IDs.
|
||||||
|
func (iu *ItemUpdate) AddAttachmentIDs(ids ...uuid.UUID) *ItemUpdate {
|
||||||
|
iu.mutation.AddAttachmentIDs(ids...)
|
||||||
|
return iu
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddAttachments adds the "attachments" edges to the Attachment entity.
|
||||||
|
func (iu *ItemUpdate) AddAttachments(a ...*Attachment) *ItemUpdate {
|
||||||
|
ids := make([]uuid.UUID, len(a))
|
||||||
|
for i := range a {
|
||||||
|
ids[i] = a[i].ID
|
||||||
|
}
|
||||||
|
return iu.AddAttachmentIDs(ids...)
|
||||||
|
}
|
||||||
|
|
||||||
// Mutation returns the ItemMutation object of the builder.
|
// Mutation returns the ItemMutation object of the builder.
|
||||||
func (iu *ItemUpdate) Mutation() *ItemMutation {
|
func (iu *ItemUpdate) Mutation() *ItemMutation {
|
||||||
return iu.mutation
|
return iu.mutation
|
||||||
|
@ -460,6 +511,27 @@ func (iu *ItemUpdate) RemoveLabel(l ...*Label) *ItemUpdate {
|
||||||
return iu.RemoveLabelIDs(ids...)
|
return iu.RemoveLabelIDs(ids...)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ClearAttachments clears all "attachments" edges to the Attachment entity.
|
||||||
|
func (iu *ItemUpdate) ClearAttachments() *ItemUpdate {
|
||||||
|
iu.mutation.ClearAttachments()
|
||||||
|
return iu
|
||||||
|
}
|
||||||
|
|
||||||
|
// RemoveAttachmentIDs removes the "attachments" edge to Attachment entities by IDs.
|
||||||
|
func (iu *ItemUpdate) RemoveAttachmentIDs(ids ...uuid.UUID) *ItemUpdate {
|
||||||
|
iu.mutation.RemoveAttachmentIDs(ids...)
|
||||||
|
return iu
|
||||||
|
}
|
||||||
|
|
||||||
|
// RemoveAttachments removes "attachments" edges to Attachment entities.
|
||||||
|
func (iu *ItemUpdate) RemoveAttachments(a ...*Attachment) *ItemUpdate {
|
||||||
|
ids := make([]uuid.UUID, len(a))
|
||||||
|
for i := range a {
|
||||||
|
ids[i] = a[i].ID
|
||||||
|
}
|
||||||
|
return iu.RemoveAttachmentIDs(ids...)
|
||||||
|
}
|
||||||
|
|
||||||
// Save executes the query and returns the number of nodes affected by the update operation.
|
// Save executes the query and returns the number of nodes affected by the update operation.
|
||||||
func (iu *ItemUpdate) Save(ctx context.Context) (int, error) {
|
func (iu *ItemUpdate) Save(ctx context.Context) (int, error) {
|
||||||
var (
|
var (
|
||||||
|
@ -635,6 +707,27 @@ func (iu *ItemUpdate) sqlSave(ctx context.Context) (n int, err error) {
|
||||||
Column: item.FieldNotes,
|
Column: item.FieldNotes,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
if value, ok := iu.mutation.Quantity(); ok {
|
||||||
|
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeInt,
|
||||||
|
Value: value,
|
||||||
|
Column: item.FieldQuantity,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if value, ok := iu.mutation.AddedQuantity(); ok {
|
||||||
|
_spec.Fields.Add = append(_spec.Fields.Add, &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeInt,
|
||||||
|
Value: value,
|
||||||
|
Column: item.FieldQuantity,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if value, ok := iu.mutation.Insured(); ok {
|
||||||
|
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeBool,
|
||||||
|
Value: value,
|
||||||
|
Column: item.FieldInsured,
|
||||||
|
})
|
||||||
|
}
|
||||||
if value, ok := iu.mutation.SerialNumber(); ok {
|
if value, ok := iu.mutation.SerialNumber(); ok {
|
||||||
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
|
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
|
||||||
Type: field.TypeString,
|
Type: field.TypeString,
|
||||||
|
@ -978,6 +1071,60 @@ func (iu *ItemUpdate) sqlSave(ctx context.Context) (n int, err error) {
|
||||||
}
|
}
|
||||||
_spec.Edges.Add = append(_spec.Edges.Add, edge)
|
_spec.Edges.Add = append(_spec.Edges.Add, edge)
|
||||||
}
|
}
|
||||||
|
if iu.mutation.AttachmentsCleared() {
|
||||||
|
edge := &sqlgraph.EdgeSpec{
|
||||||
|
Rel: sqlgraph.O2M,
|
||||||
|
Inverse: false,
|
||||||
|
Table: item.AttachmentsTable,
|
||||||
|
Columns: []string{item.AttachmentsColumn},
|
||||||
|
Bidi: false,
|
||||||
|
Target: &sqlgraph.EdgeTarget{
|
||||||
|
IDSpec: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: attachment.FieldID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
_spec.Edges.Clear = append(_spec.Edges.Clear, edge)
|
||||||
|
}
|
||||||
|
if nodes := iu.mutation.RemovedAttachmentsIDs(); len(nodes) > 0 && !iu.mutation.AttachmentsCleared() {
|
||||||
|
edge := &sqlgraph.EdgeSpec{
|
||||||
|
Rel: sqlgraph.O2M,
|
||||||
|
Inverse: false,
|
||||||
|
Table: item.AttachmentsTable,
|
||||||
|
Columns: []string{item.AttachmentsColumn},
|
||||||
|
Bidi: false,
|
||||||
|
Target: &sqlgraph.EdgeTarget{
|
||||||
|
IDSpec: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: attachment.FieldID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, k := range nodes {
|
||||||
|
edge.Target.Nodes = append(edge.Target.Nodes, k)
|
||||||
|
}
|
||||||
|
_spec.Edges.Clear = append(_spec.Edges.Clear, edge)
|
||||||
|
}
|
||||||
|
if nodes := iu.mutation.AttachmentsIDs(); len(nodes) > 0 {
|
||||||
|
edge := &sqlgraph.EdgeSpec{
|
||||||
|
Rel: sqlgraph.O2M,
|
||||||
|
Inverse: false,
|
||||||
|
Table: item.AttachmentsTable,
|
||||||
|
Columns: []string{item.AttachmentsColumn},
|
||||||
|
Bidi: false,
|
||||||
|
Target: &sqlgraph.EdgeTarget{
|
||||||
|
IDSpec: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: attachment.FieldID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, k := range nodes {
|
||||||
|
edge.Target.Nodes = append(edge.Target.Nodes, k)
|
||||||
|
}
|
||||||
|
_spec.Edges.Add = append(_spec.Edges.Add, edge)
|
||||||
|
}
|
||||||
if n, err = sqlgraph.UpdateNodes(ctx, iu.driver, _spec); err != nil {
|
if n, err = sqlgraph.UpdateNodes(ctx, iu.driver, _spec); err != nil {
|
||||||
if _, ok := err.(*sqlgraph.NotFoundError); ok {
|
if _, ok := err.(*sqlgraph.NotFoundError); ok {
|
||||||
err = &NotFoundError{item.Label}
|
err = &NotFoundError{item.Label}
|
||||||
|
@ -1049,6 +1196,41 @@ func (iuo *ItemUpdateOne) ClearNotes() *ItemUpdateOne {
|
||||||
return iuo
|
return iuo
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SetQuantity sets the "quantity" field.
|
||||||
|
func (iuo *ItemUpdateOne) SetQuantity(i int) *ItemUpdateOne {
|
||||||
|
iuo.mutation.ResetQuantity()
|
||||||
|
iuo.mutation.SetQuantity(i)
|
||||||
|
return iuo
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableQuantity sets the "quantity" field if the given value is not nil.
|
||||||
|
func (iuo *ItemUpdateOne) SetNillableQuantity(i *int) *ItemUpdateOne {
|
||||||
|
if i != nil {
|
||||||
|
iuo.SetQuantity(*i)
|
||||||
|
}
|
||||||
|
return iuo
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddQuantity adds i to the "quantity" field.
|
||||||
|
func (iuo *ItemUpdateOne) AddQuantity(i int) *ItemUpdateOne {
|
||||||
|
iuo.mutation.AddQuantity(i)
|
||||||
|
return iuo
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetInsured sets the "insured" field.
|
||||||
|
func (iuo *ItemUpdateOne) SetInsured(b bool) *ItemUpdateOne {
|
||||||
|
iuo.mutation.SetInsured(b)
|
||||||
|
return iuo
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableInsured sets the "insured" field if the given value is not nil.
|
||||||
|
func (iuo *ItemUpdateOne) SetNillableInsured(b *bool) *ItemUpdateOne {
|
||||||
|
if b != nil {
|
||||||
|
iuo.SetInsured(*b)
|
||||||
|
}
|
||||||
|
return iuo
|
||||||
|
}
|
||||||
|
|
||||||
// SetSerialNumber sets the "serial_number" field.
|
// SetSerialNumber sets the "serial_number" field.
|
||||||
func (iuo *ItemUpdateOne) SetSerialNumber(s string) *ItemUpdateOne {
|
func (iuo *ItemUpdateOne) SetSerialNumber(s string) *ItemUpdateOne {
|
||||||
iuo.mutation.SetSerialNumber(s)
|
iuo.mutation.SetSerialNumber(s)
|
||||||
|
@ -1365,6 +1547,21 @@ func (iuo *ItemUpdateOne) AddLabel(l ...*Label) *ItemUpdateOne {
|
||||||
return iuo.AddLabelIDs(ids...)
|
return iuo.AddLabelIDs(ids...)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// AddAttachmentIDs adds the "attachments" edge to the Attachment entity by IDs.
|
||||||
|
func (iuo *ItemUpdateOne) AddAttachmentIDs(ids ...uuid.UUID) *ItemUpdateOne {
|
||||||
|
iuo.mutation.AddAttachmentIDs(ids...)
|
||||||
|
return iuo
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddAttachments adds the "attachments" edges to the Attachment entity.
|
||||||
|
func (iuo *ItemUpdateOne) AddAttachments(a ...*Attachment) *ItemUpdateOne {
|
||||||
|
ids := make([]uuid.UUID, len(a))
|
||||||
|
for i := range a {
|
||||||
|
ids[i] = a[i].ID
|
||||||
|
}
|
||||||
|
return iuo.AddAttachmentIDs(ids...)
|
||||||
|
}
|
||||||
|
|
||||||
// Mutation returns the ItemMutation object of the builder.
|
// Mutation returns the ItemMutation object of the builder.
|
||||||
func (iuo *ItemUpdateOne) Mutation() *ItemMutation {
|
func (iuo *ItemUpdateOne) Mutation() *ItemMutation {
|
||||||
return iuo.mutation
|
return iuo.mutation
|
||||||
|
@ -1424,6 +1621,27 @@ func (iuo *ItemUpdateOne) RemoveLabel(l ...*Label) *ItemUpdateOne {
|
||||||
return iuo.RemoveLabelIDs(ids...)
|
return iuo.RemoveLabelIDs(ids...)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ClearAttachments clears all "attachments" edges to the Attachment entity.
|
||||||
|
func (iuo *ItemUpdateOne) ClearAttachments() *ItemUpdateOne {
|
||||||
|
iuo.mutation.ClearAttachments()
|
||||||
|
return iuo
|
||||||
|
}
|
||||||
|
|
||||||
|
// RemoveAttachmentIDs removes the "attachments" edge to Attachment entities by IDs.
|
||||||
|
func (iuo *ItemUpdateOne) RemoveAttachmentIDs(ids ...uuid.UUID) *ItemUpdateOne {
|
||||||
|
iuo.mutation.RemoveAttachmentIDs(ids...)
|
||||||
|
return iuo
|
||||||
|
}
|
||||||
|
|
||||||
|
// RemoveAttachments removes "attachments" edges to Attachment entities.
|
||||||
|
func (iuo *ItemUpdateOne) RemoveAttachments(a ...*Attachment) *ItemUpdateOne {
|
||||||
|
ids := make([]uuid.UUID, len(a))
|
||||||
|
for i := range a {
|
||||||
|
ids[i] = a[i].ID
|
||||||
|
}
|
||||||
|
return iuo.RemoveAttachmentIDs(ids...)
|
||||||
|
}
|
||||||
|
|
||||||
// Select allows selecting one or more fields (columns) of the returned entity.
|
// Select allows selecting one or more fields (columns) of the returned entity.
|
||||||
// The default is selecting all fields defined in the entity schema.
|
// The default is selecting all fields defined in the entity schema.
|
||||||
func (iuo *ItemUpdateOne) Select(field string, fields ...string) *ItemUpdateOne {
|
func (iuo *ItemUpdateOne) Select(field string, fields ...string) *ItemUpdateOne {
|
||||||
|
@ -1629,6 +1847,27 @@ func (iuo *ItemUpdateOne) sqlSave(ctx context.Context) (_node *Item, err error)
|
||||||
Column: item.FieldNotes,
|
Column: item.FieldNotes,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
if value, ok := iuo.mutation.Quantity(); ok {
|
||||||
|
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeInt,
|
||||||
|
Value: value,
|
||||||
|
Column: item.FieldQuantity,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if value, ok := iuo.mutation.AddedQuantity(); ok {
|
||||||
|
_spec.Fields.Add = append(_spec.Fields.Add, &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeInt,
|
||||||
|
Value: value,
|
||||||
|
Column: item.FieldQuantity,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if value, ok := iuo.mutation.Insured(); ok {
|
||||||
|
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeBool,
|
||||||
|
Value: value,
|
||||||
|
Column: item.FieldInsured,
|
||||||
|
})
|
||||||
|
}
|
||||||
if value, ok := iuo.mutation.SerialNumber(); ok {
|
if value, ok := iuo.mutation.SerialNumber(); ok {
|
||||||
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
|
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
|
||||||
Type: field.TypeString,
|
Type: field.TypeString,
|
||||||
|
@ -1972,6 +2211,60 @@ func (iuo *ItemUpdateOne) sqlSave(ctx context.Context) (_node *Item, err error)
|
||||||
}
|
}
|
||||||
_spec.Edges.Add = append(_spec.Edges.Add, edge)
|
_spec.Edges.Add = append(_spec.Edges.Add, edge)
|
||||||
}
|
}
|
||||||
|
if iuo.mutation.AttachmentsCleared() {
|
||||||
|
edge := &sqlgraph.EdgeSpec{
|
||||||
|
Rel: sqlgraph.O2M,
|
||||||
|
Inverse: false,
|
||||||
|
Table: item.AttachmentsTable,
|
||||||
|
Columns: []string{item.AttachmentsColumn},
|
||||||
|
Bidi: false,
|
||||||
|
Target: &sqlgraph.EdgeTarget{
|
||||||
|
IDSpec: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: attachment.FieldID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
_spec.Edges.Clear = append(_spec.Edges.Clear, edge)
|
||||||
|
}
|
||||||
|
if nodes := iuo.mutation.RemovedAttachmentsIDs(); len(nodes) > 0 && !iuo.mutation.AttachmentsCleared() {
|
||||||
|
edge := &sqlgraph.EdgeSpec{
|
||||||
|
Rel: sqlgraph.O2M,
|
||||||
|
Inverse: false,
|
||||||
|
Table: item.AttachmentsTable,
|
||||||
|
Columns: []string{item.AttachmentsColumn},
|
||||||
|
Bidi: false,
|
||||||
|
Target: &sqlgraph.EdgeTarget{
|
||||||
|
IDSpec: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: attachment.FieldID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, k := range nodes {
|
||||||
|
edge.Target.Nodes = append(edge.Target.Nodes, k)
|
||||||
|
}
|
||||||
|
_spec.Edges.Clear = append(_spec.Edges.Clear, edge)
|
||||||
|
}
|
||||||
|
if nodes := iuo.mutation.AttachmentsIDs(); len(nodes) > 0 {
|
||||||
|
edge := &sqlgraph.EdgeSpec{
|
||||||
|
Rel: sqlgraph.O2M,
|
||||||
|
Inverse: false,
|
||||||
|
Table: item.AttachmentsTable,
|
||||||
|
Columns: []string{item.AttachmentsColumn},
|
||||||
|
Bidi: false,
|
||||||
|
Target: &sqlgraph.EdgeTarget{
|
||||||
|
IDSpec: &sqlgraph.FieldSpec{
|
||||||
|
Type: field.TypeUUID,
|
||||||
|
Column: attachment.FieldID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, k := range nodes {
|
||||||
|
edge.Target.Nodes = append(edge.Target.Nodes, k)
|
||||||
|
}
|
||||||
|
_spec.Edges.Add = append(_spec.Edges.Add, edge)
|
||||||
|
}
|
||||||
_node = &Item{config: iuo.config}
|
_node = &Item{config: iuo.config}
|
||||||
_spec.Assign = _node.assignValues
|
_spec.Assign = _node.assignValues
|
||||||
_spec.ScanValues = _node.scanValues
|
_spec.ScanValues = _node.scanValues
|
||||||
|
|
|
@ -8,6 +8,35 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
|
// AttachmentsColumns holds the columns for the "attachments" table.
|
||||||
|
AttachmentsColumns = []*schema.Column{
|
||||||
|
{Name: "id", Type: field.TypeUUID},
|
||||||
|
{Name: "created_at", Type: field.TypeTime},
|
||||||
|
{Name: "updated_at", Type: field.TypeTime},
|
||||||
|
{Name: "type", Type: field.TypeEnum, Enums: []string{"photo", "manual", "warranty", "attachment"}, Default: "attachment"},
|
||||||
|
{Name: "document_attachments", Type: field.TypeUUID},
|
||||||
|
{Name: "item_attachments", Type: field.TypeUUID},
|
||||||
|
}
|
||||||
|
// AttachmentsTable holds the schema information for the "attachments" table.
|
||||||
|
AttachmentsTable = &schema.Table{
|
||||||
|
Name: "attachments",
|
||||||
|
Columns: AttachmentsColumns,
|
||||||
|
PrimaryKey: []*schema.Column{AttachmentsColumns[0]},
|
||||||
|
ForeignKeys: []*schema.ForeignKey{
|
||||||
|
{
|
||||||
|
Symbol: "attachments_documents_attachments",
|
||||||
|
Columns: []*schema.Column{AttachmentsColumns[4]},
|
||||||
|
RefColumns: []*schema.Column{DocumentsColumns[0]},
|
||||||
|
OnDelete: schema.Cascade,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Symbol: "attachments_items_attachments",
|
||||||
|
Columns: []*schema.Column{AttachmentsColumns[5]},
|
||||||
|
RefColumns: []*schema.Column{ItemsColumns[0]},
|
||||||
|
OnDelete: schema.Cascade,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
// AuthTokensColumns holds the columns for the "auth_tokens" table.
|
// AuthTokensColumns holds the columns for the "auth_tokens" table.
|
||||||
AuthTokensColumns = []*schema.Column{
|
AuthTokensColumns = []*schema.Column{
|
||||||
{Name: "id", Type: field.TypeUUID},
|
{Name: "id", Type: field.TypeUUID},
|
||||||
|
@ -38,6 +67,60 @@ var (
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
// DocumentsColumns holds the columns for the "documents" table.
|
||||||
|
DocumentsColumns = []*schema.Column{
|
||||||
|
{Name: "id", Type: field.TypeUUID},
|
||||||
|
{Name: "created_at", Type: field.TypeTime},
|
||||||
|
{Name: "updated_at", Type: field.TypeTime},
|
||||||
|
{Name: "title", Type: field.TypeString, Size: 255},
|
||||||
|
{Name: "path", Type: field.TypeString, Size: 500},
|
||||||
|
{Name: "group_documents", Type: field.TypeUUID},
|
||||||
|
}
|
||||||
|
// DocumentsTable holds the schema information for the "documents" table.
|
||||||
|
DocumentsTable = &schema.Table{
|
||||||
|
Name: "documents",
|
||||||
|
Columns: DocumentsColumns,
|
||||||
|
PrimaryKey: []*schema.Column{DocumentsColumns[0]},
|
||||||
|
ForeignKeys: []*schema.ForeignKey{
|
||||||
|
{
|
||||||
|
Symbol: "documents_groups_documents",
|
||||||
|
Columns: []*schema.Column{DocumentsColumns[5]},
|
||||||
|
RefColumns: []*schema.Column{GroupsColumns[0]},
|
||||||
|
OnDelete: schema.Cascade,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
// DocumentTokensColumns holds the columns for the "document_tokens" table.
|
||||||
|
DocumentTokensColumns = []*schema.Column{
|
||||||
|
{Name: "id", Type: field.TypeUUID},
|
||||||
|
{Name: "created_at", Type: field.TypeTime},
|
||||||
|
{Name: "updated_at", Type: field.TypeTime},
|
||||||
|
{Name: "token", Type: field.TypeBytes, Unique: true},
|
||||||
|
{Name: "uses", Type: field.TypeInt, Default: 1},
|
||||||
|
{Name: "expires_at", Type: field.TypeTime},
|
||||||
|
{Name: "document_document_tokens", Type: field.TypeUUID, Nullable: true},
|
||||||
|
}
|
||||||
|
// DocumentTokensTable holds the schema information for the "document_tokens" table.
|
||||||
|
DocumentTokensTable = &schema.Table{
|
||||||
|
Name: "document_tokens",
|
||||||
|
Columns: DocumentTokensColumns,
|
||||||
|
PrimaryKey: []*schema.Column{DocumentTokensColumns[0]},
|
||||||
|
ForeignKeys: []*schema.ForeignKey{
|
||||||
|
{
|
||||||
|
Symbol: "document_tokens_documents_document_tokens",
|
||||||
|
Columns: []*schema.Column{DocumentTokensColumns[6]},
|
||||||
|
RefColumns: []*schema.Column{DocumentsColumns[0]},
|
||||||
|
OnDelete: schema.Cascade,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Indexes: []*schema.Index{
|
||||||
|
{
|
||||||
|
Name: "documenttoken_token",
|
||||||
|
Unique: false,
|
||||||
|
Columns: []*schema.Column{DocumentTokensColumns[3]},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
// GroupsColumns holds the columns for the "groups" table.
|
// GroupsColumns holds the columns for the "groups" table.
|
||||||
GroupsColumns = []*schema.Column{
|
GroupsColumns = []*schema.Column{
|
||||||
{Name: "id", Type: field.TypeUUID},
|
{Name: "id", Type: field.TypeUUID},
|
||||||
|
@ -60,6 +143,8 @@ var (
|
||||||
{Name: "name", Type: field.TypeString, Size: 255},
|
{Name: "name", Type: field.TypeString, Size: 255},
|
||||||
{Name: "description", Type: field.TypeString, Nullable: true, Size: 1000},
|
{Name: "description", Type: field.TypeString, Nullable: true, Size: 1000},
|
||||||
{Name: "notes", Type: field.TypeString, Nullable: true, Size: 1000},
|
{Name: "notes", Type: field.TypeString, Nullable: true, Size: 1000},
|
||||||
|
{Name: "quantity", Type: field.TypeInt, Default: 1},
|
||||||
|
{Name: "insured", Type: field.TypeBool, Default: false},
|
||||||
{Name: "serial_number", Type: field.TypeString, Nullable: true, Size: 255},
|
{Name: "serial_number", Type: field.TypeString, Nullable: true, Size: 255},
|
||||||
{Name: "model_number", Type: field.TypeString, Nullable: true, Size: 255},
|
{Name: "model_number", Type: field.TypeString, Nullable: true, Size: 255},
|
||||||
{Name: "manufacturer", Type: field.TypeString, Nullable: true, Size: 255},
|
{Name: "manufacturer", Type: field.TypeString, Nullable: true, Size: 255},
|
||||||
|
@ -84,13 +169,13 @@ var (
|
||||||
ForeignKeys: []*schema.ForeignKey{
|
ForeignKeys: []*schema.ForeignKey{
|
||||||
{
|
{
|
||||||
Symbol: "items_groups_items",
|
Symbol: "items_groups_items",
|
||||||
Columns: []*schema.Column{ItemsColumns[19]},
|
Columns: []*schema.Column{ItemsColumns[21]},
|
||||||
RefColumns: []*schema.Column{GroupsColumns[0]},
|
RefColumns: []*schema.Column{GroupsColumns[0]},
|
||||||
OnDelete: schema.Cascade,
|
OnDelete: schema.Cascade,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
Symbol: "items_locations_items",
|
Symbol: "items_locations_items",
|
||||||
Columns: []*schema.Column{ItemsColumns[20]},
|
Columns: []*schema.Column{ItemsColumns[22]},
|
||||||
RefColumns: []*schema.Column{LocationsColumns[0]},
|
RefColumns: []*schema.Column{LocationsColumns[0]},
|
||||||
OnDelete: schema.SetNull,
|
OnDelete: schema.SetNull,
|
||||||
},
|
},
|
||||||
|
@ -104,17 +189,17 @@ var (
|
||||||
{
|
{
|
||||||
Name: "item_manufacturer",
|
Name: "item_manufacturer",
|
||||||
Unique: false,
|
Unique: false,
|
||||||
Columns: []*schema.Column{ItemsColumns[8]},
|
Columns: []*schema.Column{ItemsColumns[10]},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
Name: "item_model_number",
|
Name: "item_model_number",
|
||||||
Unique: false,
|
Unique: false,
|
||||||
Columns: []*schema.Column{ItemsColumns[7]},
|
Columns: []*schema.Column{ItemsColumns[9]},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
Name: "item_serial_number",
|
Name: "item_serial_number",
|
||||||
Unique: false,
|
Unique: false,
|
||||||
Columns: []*schema.Column{ItemsColumns[6]},
|
Columns: []*schema.Column{ItemsColumns[8]},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
@ -245,7 +330,10 @@ var (
|
||||||
}
|
}
|
||||||
// Tables holds all the tables in the schema.
|
// Tables holds all the tables in the schema.
|
||||||
Tables = []*schema.Table{
|
Tables = []*schema.Table{
|
||||||
|
AttachmentsTable,
|
||||||
AuthTokensTable,
|
AuthTokensTable,
|
||||||
|
DocumentsTable,
|
||||||
|
DocumentTokensTable,
|
||||||
GroupsTable,
|
GroupsTable,
|
||||||
ItemsTable,
|
ItemsTable,
|
||||||
ItemFieldsTable,
|
ItemFieldsTable,
|
||||||
|
@ -257,7 +345,11 @@ var (
|
||||||
)
|
)
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
|
AttachmentsTable.ForeignKeys[0].RefTable = DocumentsTable
|
||||||
|
AttachmentsTable.ForeignKeys[1].RefTable = ItemsTable
|
||||||
AuthTokensTable.ForeignKeys[0].RefTable = UsersTable
|
AuthTokensTable.ForeignKeys[0].RefTable = UsersTable
|
||||||
|
DocumentsTable.ForeignKeys[0].RefTable = GroupsTable
|
||||||
|
DocumentTokensTable.ForeignKeys[0].RefTable = DocumentsTable
|
||||||
ItemsTable.ForeignKeys[0].RefTable = GroupsTable
|
ItemsTable.ForeignKeys[0].RefTable = GroupsTable
|
||||||
ItemsTable.ForeignKeys[1].RefTable = LocationsTable
|
ItemsTable.ForeignKeys[1].RefTable = LocationsTable
|
||||||
ItemFieldsTable.ForeignKeys[0].RefTable = ItemsTable
|
ItemFieldsTable.ForeignKeys[0].RefTable = ItemsTable
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -6,9 +6,18 @@ import (
|
||||||
"entgo.io/ent/dialect/sql"
|
"entgo.io/ent/dialect/sql"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// Attachment is the predicate function for attachment builders.
|
||||||
|
type Attachment func(*sql.Selector)
|
||||||
|
|
||||||
// AuthTokens is the predicate function for authtokens builders.
|
// AuthTokens is the predicate function for authtokens builders.
|
||||||
type AuthTokens func(*sql.Selector)
|
type AuthTokens func(*sql.Selector)
|
||||||
|
|
||||||
|
// Document is the predicate function for document builders.
|
||||||
|
type Document func(*sql.Selector)
|
||||||
|
|
||||||
|
// DocumentToken is the predicate function for documenttoken builders.
|
||||||
|
type DocumentToken func(*sql.Selector)
|
||||||
|
|
||||||
// Group is the predicate function for group builders.
|
// Group is the predicate function for group builders.
|
||||||
type Group func(*sql.Selector)
|
type Group func(*sql.Selector)
|
||||||
|
|
||||||
|
|
|
@ -6,7 +6,10 @@ import (
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/google/uuid"
|
"github.com/google/uuid"
|
||||||
|
"github.com/hay-kot/content/backend/ent/attachment"
|
||||||
"github.com/hay-kot/content/backend/ent/authtokens"
|
"github.com/hay-kot/content/backend/ent/authtokens"
|
||||||
|
"github.com/hay-kot/content/backend/ent/document"
|
||||||
|
"github.com/hay-kot/content/backend/ent/documenttoken"
|
||||||
"github.com/hay-kot/content/backend/ent/group"
|
"github.com/hay-kot/content/backend/ent/group"
|
||||||
"github.com/hay-kot/content/backend/ent/item"
|
"github.com/hay-kot/content/backend/ent/item"
|
||||||
"github.com/hay-kot/content/backend/ent/itemfield"
|
"github.com/hay-kot/content/backend/ent/itemfield"
|
||||||
|
@ -20,6 +23,25 @@ import (
|
||||||
// (default values, validators, hooks and policies) and stitches it
|
// (default values, validators, hooks and policies) and stitches it
|
||||||
// to their package variables.
|
// to their package variables.
|
||||||
func init() {
|
func init() {
|
||||||
|
attachmentMixin := schema.Attachment{}.Mixin()
|
||||||
|
attachmentMixinFields0 := attachmentMixin[0].Fields()
|
||||||
|
_ = attachmentMixinFields0
|
||||||
|
attachmentFields := schema.Attachment{}.Fields()
|
||||||
|
_ = attachmentFields
|
||||||
|
// attachmentDescCreatedAt is the schema descriptor for created_at field.
|
||||||
|
attachmentDescCreatedAt := attachmentMixinFields0[1].Descriptor()
|
||||||
|
// attachment.DefaultCreatedAt holds the default value on creation for the created_at field.
|
||||||
|
attachment.DefaultCreatedAt = attachmentDescCreatedAt.Default.(func() time.Time)
|
||||||
|
// attachmentDescUpdatedAt is the schema descriptor for updated_at field.
|
||||||
|
attachmentDescUpdatedAt := attachmentMixinFields0[2].Descriptor()
|
||||||
|
// attachment.DefaultUpdatedAt holds the default value on creation for the updated_at field.
|
||||||
|
attachment.DefaultUpdatedAt = attachmentDescUpdatedAt.Default.(func() time.Time)
|
||||||
|
// attachment.UpdateDefaultUpdatedAt holds the default value on update for the updated_at field.
|
||||||
|
attachment.UpdateDefaultUpdatedAt = attachmentDescUpdatedAt.UpdateDefault.(func() time.Time)
|
||||||
|
// attachmentDescID is the schema descriptor for id field.
|
||||||
|
attachmentDescID := attachmentMixinFields0[0].Descriptor()
|
||||||
|
// attachment.DefaultID holds the default value on creation for the id field.
|
||||||
|
attachment.DefaultID = attachmentDescID.Default.(func() uuid.UUID)
|
||||||
authtokensMixin := schema.AuthTokens{}.Mixin()
|
authtokensMixin := schema.AuthTokens{}.Mixin()
|
||||||
authtokensMixinFields0 := authtokensMixin[0].Fields()
|
authtokensMixinFields0 := authtokensMixin[0].Fields()
|
||||||
_ = authtokensMixinFields0
|
_ = authtokensMixinFields0
|
||||||
|
@ -43,6 +65,92 @@ func init() {
|
||||||
authtokensDescID := authtokensMixinFields0[0].Descriptor()
|
authtokensDescID := authtokensMixinFields0[0].Descriptor()
|
||||||
// authtokens.DefaultID holds the default value on creation for the id field.
|
// authtokens.DefaultID holds the default value on creation for the id field.
|
||||||
authtokens.DefaultID = authtokensDescID.Default.(func() uuid.UUID)
|
authtokens.DefaultID = authtokensDescID.Default.(func() uuid.UUID)
|
||||||
|
documentMixin := schema.Document{}.Mixin()
|
||||||
|
documentMixinFields0 := documentMixin[0].Fields()
|
||||||
|
_ = documentMixinFields0
|
||||||
|
documentFields := schema.Document{}.Fields()
|
||||||
|
_ = documentFields
|
||||||
|
// documentDescCreatedAt is the schema descriptor for created_at field.
|
||||||
|
documentDescCreatedAt := documentMixinFields0[1].Descriptor()
|
||||||
|
// document.DefaultCreatedAt holds the default value on creation for the created_at field.
|
||||||
|
document.DefaultCreatedAt = documentDescCreatedAt.Default.(func() time.Time)
|
||||||
|
// documentDescUpdatedAt is the schema descriptor for updated_at field.
|
||||||
|
documentDescUpdatedAt := documentMixinFields0[2].Descriptor()
|
||||||
|
// document.DefaultUpdatedAt holds the default value on creation for the updated_at field.
|
||||||
|
document.DefaultUpdatedAt = documentDescUpdatedAt.Default.(func() time.Time)
|
||||||
|
// document.UpdateDefaultUpdatedAt holds the default value on update for the updated_at field.
|
||||||
|
document.UpdateDefaultUpdatedAt = documentDescUpdatedAt.UpdateDefault.(func() time.Time)
|
||||||
|
// documentDescTitle is the schema descriptor for title field.
|
||||||
|
documentDescTitle := documentFields[0].Descriptor()
|
||||||
|
// document.TitleValidator is a validator for the "title" field. It is called by the builders before save.
|
||||||
|
document.TitleValidator = func() func(string) error {
|
||||||
|
validators := documentDescTitle.Validators
|
||||||
|
fns := [...]func(string) error{
|
||||||
|
validators[0].(func(string) error),
|
||||||
|
validators[1].(func(string) error),
|
||||||
|
}
|
||||||
|
return func(title string) error {
|
||||||
|
for _, fn := range fns {
|
||||||
|
if err := fn(title); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
// documentDescPath is the schema descriptor for path field.
|
||||||
|
documentDescPath := documentFields[1].Descriptor()
|
||||||
|
// document.PathValidator is a validator for the "path" field. It is called by the builders before save.
|
||||||
|
document.PathValidator = func() func(string) error {
|
||||||
|
validators := documentDescPath.Validators
|
||||||
|
fns := [...]func(string) error{
|
||||||
|
validators[0].(func(string) error),
|
||||||
|
validators[1].(func(string) error),
|
||||||
|
}
|
||||||
|
return func(_path string) error {
|
||||||
|
for _, fn := range fns {
|
||||||
|
if err := fn(_path); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
// documentDescID is the schema descriptor for id field.
|
||||||
|
documentDescID := documentMixinFields0[0].Descriptor()
|
||||||
|
// document.DefaultID holds the default value on creation for the id field.
|
||||||
|
document.DefaultID = documentDescID.Default.(func() uuid.UUID)
|
||||||
|
documenttokenMixin := schema.DocumentToken{}.Mixin()
|
||||||
|
documenttokenMixinFields0 := documenttokenMixin[0].Fields()
|
||||||
|
_ = documenttokenMixinFields0
|
||||||
|
documenttokenFields := schema.DocumentToken{}.Fields()
|
||||||
|
_ = documenttokenFields
|
||||||
|
// documenttokenDescCreatedAt is the schema descriptor for created_at field.
|
||||||
|
documenttokenDescCreatedAt := documenttokenMixinFields0[1].Descriptor()
|
||||||
|
// documenttoken.DefaultCreatedAt holds the default value on creation for the created_at field.
|
||||||
|
documenttoken.DefaultCreatedAt = documenttokenDescCreatedAt.Default.(func() time.Time)
|
||||||
|
// documenttokenDescUpdatedAt is the schema descriptor for updated_at field.
|
||||||
|
documenttokenDescUpdatedAt := documenttokenMixinFields0[2].Descriptor()
|
||||||
|
// documenttoken.DefaultUpdatedAt holds the default value on creation for the updated_at field.
|
||||||
|
documenttoken.DefaultUpdatedAt = documenttokenDescUpdatedAt.Default.(func() time.Time)
|
||||||
|
// documenttoken.UpdateDefaultUpdatedAt holds the default value on update for the updated_at field.
|
||||||
|
documenttoken.UpdateDefaultUpdatedAt = documenttokenDescUpdatedAt.UpdateDefault.(func() time.Time)
|
||||||
|
// documenttokenDescToken is the schema descriptor for token field.
|
||||||
|
documenttokenDescToken := documenttokenFields[0].Descriptor()
|
||||||
|
// documenttoken.TokenValidator is a validator for the "token" field. It is called by the builders before save.
|
||||||
|
documenttoken.TokenValidator = documenttokenDescToken.Validators[0].(func([]byte) error)
|
||||||
|
// documenttokenDescUses is the schema descriptor for uses field.
|
||||||
|
documenttokenDescUses := documenttokenFields[1].Descriptor()
|
||||||
|
// documenttoken.DefaultUses holds the default value on creation for the uses field.
|
||||||
|
documenttoken.DefaultUses = documenttokenDescUses.Default.(int)
|
||||||
|
// documenttokenDescExpiresAt is the schema descriptor for expires_at field.
|
||||||
|
documenttokenDescExpiresAt := documenttokenFields[2].Descriptor()
|
||||||
|
// documenttoken.DefaultExpiresAt holds the default value on creation for the expires_at field.
|
||||||
|
documenttoken.DefaultExpiresAt = documenttokenDescExpiresAt.Default.(func() time.Time)
|
||||||
|
// documenttokenDescID is the schema descriptor for id field.
|
||||||
|
documenttokenDescID := documenttokenMixinFields0[0].Descriptor()
|
||||||
|
// documenttoken.DefaultID holds the default value on creation for the id field.
|
||||||
|
documenttoken.DefaultID = documenttokenDescID.Default.(func() uuid.UUID)
|
||||||
groupMixin := schema.Group{}.Mixin()
|
groupMixin := schema.Group{}.Mixin()
|
||||||
groupMixinFields0 := groupMixin[0].Fields()
|
groupMixinFields0 := groupMixin[0].Fields()
|
||||||
_ = groupMixinFields0
|
_ = groupMixinFields0
|
||||||
|
@ -123,36 +231,44 @@ func init() {
|
||||||
itemDescNotes := itemFields[0].Descriptor()
|
itemDescNotes := itemFields[0].Descriptor()
|
||||||
// item.NotesValidator is a validator for the "notes" field. It is called by the builders before save.
|
// item.NotesValidator is a validator for the "notes" field. It is called by the builders before save.
|
||||||
item.NotesValidator = itemDescNotes.Validators[0].(func(string) error)
|
item.NotesValidator = itemDescNotes.Validators[0].(func(string) error)
|
||||||
|
// itemDescQuantity is the schema descriptor for quantity field.
|
||||||
|
itemDescQuantity := itemFields[1].Descriptor()
|
||||||
|
// item.DefaultQuantity holds the default value on creation for the quantity field.
|
||||||
|
item.DefaultQuantity = itemDescQuantity.Default.(int)
|
||||||
|
// itemDescInsured is the schema descriptor for insured field.
|
||||||
|
itemDescInsured := itemFields[2].Descriptor()
|
||||||
|
// item.DefaultInsured holds the default value on creation for the insured field.
|
||||||
|
item.DefaultInsured = itemDescInsured.Default.(bool)
|
||||||
// itemDescSerialNumber is the schema descriptor for serial_number field.
|
// itemDescSerialNumber is the schema descriptor for serial_number field.
|
||||||
itemDescSerialNumber := itemFields[1].Descriptor()
|
itemDescSerialNumber := itemFields[3].Descriptor()
|
||||||
// item.SerialNumberValidator is a validator for the "serial_number" field. It is called by the builders before save.
|
// item.SerialNumberValidator is a validator for the "serial_number" field. It is called by the builders before save.
|
||||||
item.SerialNumberValidator = itemDescSerialNumber.Validators[0].(func(string) error)
|
item.SerialNumberValidator = itemDescSerialNumber.Validators[0].(func(string) error)
|
||||||
// itemDescModelNumber is the schema descriptor for model_number field.
|
// itemDescModelNumber is the schema descriptor for model_number field.
|
||||||
itemDescModelNumber := itemFields[2].Descriptor()
|
itemDescModelNumber := itemFields[4].Descriptor()
|
||||||
// item.ModelNumberValidator is a validator for the "model_number" field. It is called by the builders before save.
|
// item.ModelNumberValidator is a validator for the "model_number" field. It is called by the builders before save.
|
||||||
item.ModelNumberValidator = itemDescModelNumber.Validators[0].(func(string) error)
|
item.ModelNumberValidator = itemDescModelNumber.Validators[0].(func(string) error)
|
||||||
// itemDescManufacturer is the schema descriptor for manufacturer field.
|
// itemDescManufacturer is the schema descriptor for manufacturer field.
|
||||||
itemDescManufacturer := itemFields[3].Descriptor()
|
itemDescManufacturer := itemFields[5].Descriptor()
|
||||||
// item.ManufacturerValidator is a validator for the "manufacturer" field. It is called by the builders before save.
|
// item.ManufacturerValidator is a validator for the "manufacturer" field. It is called by the builders before save.
|
||||||
item.ManufacturerValidator = itemDescManufacturer.Validators[0].(func(string) error)
|
item.ManufacturerValidator = itemDescManufacturer.Validators[0].(func(string) error)
|
||||||
// itemDescLifetimeWarranty is the schema descriptor for lifetime_warranty field.
|
// itemDescLifetimeWarranty is the schema descriptor for lifetime_warranty field.
|
||||||
itemDescLifetimeWarranty := itemFields[4].Descriptor()
|
itemDescLifetimeWarranty := itemFields[6].Descriptor()
|
||||||
// item.DefaultLifetimeWarranty holds the default value on creation for the lifetime_warranty field.
|
// item.DefaultLifetimeWarranty holds the default value on creation for the lifetime_warranty field.
|
||||||
item.DefaultLifetimeWarranty = itemDescLifetimeWarranty.Default.(bool)
|
item.DefaultLifetimeWarranty = itemDescLifetimeWarranty.Default.(bool)
|
||||||
// itemDescWarrantyDetails is the schema descriptor for warranty_details field.
|
// itemDescWarrantyDetails is the schema descriptor for warranty_details field.
|
||||||
itemDescWarrantyDetails := itemFields[6].Descriptor()
|
itemDescWarrantyDetails := itemFields[8].Descriptor()
|
||||||
// item.WarrantyDetailsValidator is a validator for the "warranty_details" field. It is called by the builders before save.
|
// item.WarrantyDetailsValidator is a validator for the "warranty_details" field. It is called by the builders before save.
|
||||||
item.WarrantyDetailsValidator = itemDescWarrantyDetails.Validators[0].(func(string) error)
|
item.WarrantyDetailsValidator = itemDescWarrantyDetails.Validators[0].(func(string) error)
|
||||||
// itemDescPurchasePrice is the schema descriptor for purchase_price field.
|
// itemDescPurchasePrice is the schema descriptor for purchase_price field.
|
||||||
itemDescPurchasePrice := itemFields[9].Descriptor()
|
itemDescPurchasePrice := itemFields[11].Descriptor()
|
||||||
// item.DefaultPurchasePrice holds the default value on creation for the purchase_price field.
|
// item.DefaultPurchasePrice holds the default value on creation for the purchase_price field.
|
||||||
item.DefaultPurchasePrice = itemDescPurchasePrice.Default.(float64)
|
item.DefaultPurchasePrice = itemDescPurchasePrice.Default.(float64)
|
||||||
// itemDescSoldPrice is the schema descriptor for sold_price field.
|
// itemDescSoldPrice is the schema descriptor for sold_price field.
|
||||||
itemDescSoldPrice := itemFields[12].Descriptor()
|
itemDescSoldPrice := itemFields[14].Descriptor()
|
||||||
// item.DefaultSoldPrice holds the default value on creation for the sold_price field.
|
// item.DefaultSoldPrice holds the default value on creation for the sold_price field.
|
||||||
item.DefaultSoldPrice = itemDescSoldPrice.Default.(float64)
|
item.DefaultSoldPrice = itemDescSoldPrice.Default.(float64)
|
||||||
// itemDescSoldNotes is the schema descriptor for sold_notes field.
|
// itemDescSoldNotes is the schema descriptor for sold_notes field.
|
||||||
itemDescSoldNotes := itemFields[13].Descriptor()
|
itemDescSoldNotes := itemFields[15].Descriptor()
|
||||||
// item.SoldNotesValidator is a validator for the "sold_notes" field. It is called by the builders before save.
|
// item.SoldNotesValidator is a validator for the "sold_notes" field. It is called by the builders before save.
|
||||||
item.SoldNotesValidator = itemDescSoldNotes.Validators[0].(func(string) error)
|
item.SoldNotesValidator = itemDescSoldNotes.Validators[0].(func(string) error)
|
||||||
// itemDescID is the schema descriptor for id field.
|
// itemDescID is the schema descriptor for id field.
|
||||||
|
|
42
backend/ent/schema/attachment.go
Normal file
42
backend/ent/schema/attachment.go
Normal file
|
@ -0,0 +1,42 @@
|
||||||
|
package schema
|
||||||
|
|
||||||
|
import (
|
||||||
|
"entgo.io/ent"
|
||||||
|
"entgo.io/ent/schema/edge"
|
||||||
|
"entgo.io/ent/schema/field"
|
||||||
|
"github.com/hay-kot/content/backend/ent/schema/mixins"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Attachment holds the schema definition for the Attachment entity.
|
||||||
|
type Attachment struct {
|
||||||
|
ent.Schema
|
||||||
|
}
|
||||||
|
|
||||||
|
func (Attachment) Mixin() []ent.Mixin {
|
||||||
|
return []ent.Mixin{
|
||||||
|
mixins.BaseMixin{},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fields of the Attachment.
|
||||||
|
func (Attachment) Fields() []ent.Field {
|
||||||
|
return []ent.Field{
|
||||||
|
field.Enum("type").
|
||||||
|
Values("photo", "manual", "warranty", "attachment").
|
||||||
|
Default("attachment"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Edges of the Attachment.
|
||||||
|
func (Attachment) Edges() []ent.Edge {
|
||||||
|
return []ent.Edge{
|
||||||
|
edge.From("item", Item.Type).
|
||||||
|
Ref("attachments").
|
||||||
|
Required().
|
||||||
|
Unique(),
|
||||||
|
edge.From("document", Document.Type).
|
||||||
|
Ref("attachments").
|
||||||
|
Required().
|
||||||
|
Unique(),
|
||||||
|
}
|
||||||
|
}
|
|
@ -42,7 +42,6 @@ func (AuthTokens) Edges() []ent.Edge {
|
||||||
|
|
||||||
func (AuthTokens) Indexes() []ent.Index {
|
func (AuthTokens) Indexes() []ent.Index {
|
||||||
return []ent.Index{
|
return []ent.Index{
|
||||||
// non-unique index.
|
|
||||||
index.Fields("token"),
|
index.Fields("token"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
50
backend/ent/schema/document.go
Normal file
50
backend/ent/schema/document.go
Normal file
|
@ -0,0 +1,50 @@
|
||||||
|
package schema
|
||||||
|
|
||||||
|
import (
|
||||||
|
"entgo.io/ent"
|
||||||
|
"entgo.io/ent/dialect/entsql"
|
||||||
|
"entgo.io/ent/schema/edge"
|
||||||
|
"entgo.io/ent/schema/field"
|
||||||
|
"github.com/hay-kot/content/backend/ent/schema/mixins"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Document holds the schema definition for the Document entity.
|
||||||
|
type Document struct {
|
||||||
|
ent.Schema
|
||||||
|
}
|
||||||
|
|
||||||
|
func (Document) Mixin() []ent.Mixin {
|
||||||
|
return []ent.Mixin{
|
||||||
|
mixins.BaseMixin{},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fields of the Document.
|
||||||
|
func (Document) Fields() []ent.Field {
|
||||||
|
return []ent.Field{
|
||||||
|
field.String("title").
|
||||||
|
MaxLen(255).
|
||||||
|
NotEmpty(),
|
||||||
|
field.String("path").
|
||||||
|
MaxLen(500).
|
||||||
|
NotEmpty(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Edges of the Document.
|
||||||
|
func (Document) Edges() []ent.Edge {
|
||||||
|
return []ent.Edge{
|
||||||
|
edge.From("group", Group.Type).
|
||||||
|
Ref("documents").
|
||||||
|
Required().
|
||||||
|
Unique(),
|
||||||
|
edge.To("document_tokens", DocumentToken.Type).
|
||||||
|
Annotations(entsql.Annotation{
|
||||||
|
OnDelete: entsql.Cascade,
|
||||||
|
}),
|
||||||
|
edge.To("attachments", Attachment.Type).
|
||||||
|
Annotations(entsql.Annotation{
|
||||||
|
OnDelete: entsql.Cascade,
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
}
|
50
backend/ent/schema/document_token.go
Normal file
50
backend/ent/schema/document_token.go
Normal file
|
@ -0,0 +1,50 @@
|
||||||
|
package schema
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"entgo.io/ent"
|
||||||
|
"entgo.io/ent/schema/edge"
|
||||||
|
"entgo.io/ent/schema/field"
|
||||||
|
"entgo.io/ent/schema/index"
|
||||||
|
"github.com/hay-kot/content/backend/ent/schema/mixins"
|
||||||
|
)
|
||||||
|
|
||||||
|
// DocumentToken holds the schema definition for the DocumentToken entity.
|
||||||
|
type DocumentToken struct {
|
||||||
|
ent.Schema
|
||||||
|
}
|
||||||
|
|
||||||
|
func (DocumentToken) Mixin() []ent.Mixin {
|
||||||
|
return []ent.Mixin{
|
||||||
|
mixins.BaseMixin{},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fields of the DocumentToken.
|
||||||
|
func (DocumentToken) Fields() []ent.Field {
|
||||||
|
return []ent.Field{
|
||||||
|
field.Bytes("token").
|
||||||
|
NotEmpty().
|
||||||
|
Unique(),
|
||||||
|
field.Int("uses").
|
||||||
|
Default(1),
|
||||||
|
field.Time("expires_at").
|
||||||
|
Default(func() time.Time { return time.Now().Add(time.Minute * 10) }),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Edges of the DocumentToken.
|
||||||
|
func (DocumentToken) Edges() []ent.Edge {
|
||||||
|
return []ent.Edge{
|
||||||
|
edge.From("document", Document.Type).
|
||||||
|
Ref("document_tokens").
|
||||||
|
Unique(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (DocumentToken) Indexes() []ent.Index {
|
||||||
|
return []ent.Index{
|
||||||
|
index.Fields("token"),
|
||||||
|
}
|
||||||
|
}
|
|
@ -34,17 +34,25 @@ func (Group) Fields() []ent.Field {
|
||||||
// Edges of the Home.
|
// Edges of the Home.
|
||||||
func (Group) Edges() []ent.Edge {
|
func (Group) Edges() []ent.Edge {
|
||||||
return []ent.Edge{
|
return []ent.Edge{
|
||||||
edge.To("users", User.Type).Annotations(entsql.Annotation{
|
edge.To("users", User.Type).
|
||||||
OnDelete: entsql.Cascade,
|
Annotations(entsql.Annotation{
|
||||||
}),
|
OnDelete: entsql.Cascade,
|
||||||
edge.To("locations", Location.Type).Annotations(entsql.Annotation{
|
}),
|
||||||
OnDelete: entsql.Cascade,
|
edge.To("locations", Location.Type).
|
||||||
}),
|
Annotations(entsql.Annotation{
|
||||||
edge.To("items", Item.Type).Annotations(entsql.Annotation{
|
OnDelete: entsql.Cascade,
|
||||||
OnDelete: entsql.Cascade,
|
}),
|
||||||
}),
|
edge.To("items", Item.Type).
|
||||||
edge.To("labels", Label.Type).Annotations(entsql.Annotation{
|
Annotations(entsql.Annotation{
|
||||||
OnDelete: entsql.Cascade,
|
OnDelete: entsql.Cascade,
|
||||||
}),
|
}),
|
||||||
|
edge.To("labels", Label.Type).
|
||||||
|
Annotations(entsql.Annotation{
|
||||||
|
OnDelete: entsql.Cascade,
|
||||||
|
}),
|
||||||
|
edge.To("documents", Document.Type).
|
||||||
|
Annotations(entsql.Annotation{
|
||||||
|
OnDelete: entsql.Cascade,
|
||||||
|
}),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -37,6 +37,10 @@ func (Item) Fields() []ent.Field {
|
||||||
field.String("notes").
|
field.String("notes").
|
||||||
MaxLen(1000).
|
MaxLen(1000).
|
||||||
Optional(),
|
Optional(),
|
||||||
|
field.Int("quantity").
|
||||||
|
Default(1),
|
||||||
|
field.Bool("insured").
|
||||||
|
Default(false),
|
||||||
|
|
||||||
// ------------------------------------
|
// ------------------------------------
|
||||||
// item identification
|
// item identification
|
||||||
|
@ -93,10 +97,15 @@ func (Item) Edges() []ent.Edge {
|
||||||
edge.From("location", Location.Type).
|
edge.From("location", Location.Type).
|
||||||
Ref("items").
|
Ref("items").
|
||||||
Unique(),
|
Unique(),
|
||||||
edge.To("fields", ItemField.Type).Annotations(entsql.Annotation{
|
edge.To("fields", ItemField.Type).
|
||||||
OnDelete: entsql.Cascade,
|
Annotations(entsql.Annotation{
|
||||||
}),
|
OnDelete: entsql.Cascade,
|
||||||
|
}),
|
||||||
edge.From("label", Label.Type).
|
edge.From("label", Label.Type).
|
||||||
Ref("items"),
|
Ref("items"),
|
||||||
|
edge.To("attachments", Attachment.Type).
|
||||||
|
Annotations(entsql.Annotation{
|
||||||
|
OnDelete: entsql.Cascade,
|
||||||
|
}),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
18
backend/ent/schema/templates/has_id.tmpl
Normal file
18
backend/ent/schema/templates/has_id.tmpl
Normal file
|
@ -0,0 +1,18 @@
|
||||||
|
{{/* The line below tells Intellij/GoLand to enable the autocompletion based on the *gen.Graph type. */}}
|
||||||
|
{{/* gotype: entgo.io/ent/entc/gen.Graph */}}
|
||||||
|
|
||||||
|
{{ define "has_id" }}
|
||||||
|
|
||||||
|
{{/* Add the base header for the generated file */}}
|
||||||
|
{{ $pkg := base $.Config.Package }}
|
||||||
|
{{ template "header" $ }}
|
||||||
|
import "github.com/google/uuid"
|
||||||
|
{{/* Loop over all nodes and implement the "HasID" interface */}}
|
||||||
|
{{ range $n := $.Nodes }}
|
||||||
|
{{ $receiver := $n.Receiver }}
|
||||||
|
func ({{ $receiver }} *{{ $n.Name }}) GetID() uuid.UUID {
|
||||||
|
return {{ $receiver }}.ID
|
||||||
|
}
|
||||||
|
{{ end }}
|
||||||
|
|
||||||
|
{{ end }}
|
|
@ -12,8 +12,14 @@ import (
|
||||||
// Tx is a transactional client that is created by calling Client.Tx().
|
// Tx is a transactional client that is created by calling Client.Tx().
|
||||||
type Tx struct {
|
type Tx struct {
|
||||||
config
|
config
|
||||||
|
// Attachment is the client for interacting with the Attachment builders.
|
||||||
|
Attachment *AttachmentClient
|
||||||
// AuthTokens is the client for interacting with the AuthTokens builders.
|
// AuthTokens is the client for interacting with the AuthTokens builders.
|
||||||
AuthTokens *AuthTokensClient
|
AuthTokens *AuthTokensClient
|
||||||
|
// Document is the client for interacting with the Document builders.
|
||||||
|
Document *DocumentClient
|
||||||
|
// DocumentToken is the client for interacting with the DocumentToken builders.
|
||||||
|
DocumentToken *DocumentTokenClient
|
||||||
// Group is the client for interacting with the Group builders.
|
// Group is the client for interacting with the Group builders.
|
||||||
Group *GroupClient
|
Group *GroupClient
|
||||||
// Item is the client for interacting with the Item builders.
|
// Item is the client for interacting with the Item builders.
|
||||||
|
@ -161,7 +167,10 @@ func (tx *Tx) Client() *Client {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (tx *Tx) init() {
|
func (tx *Tx) init() {
|
||||||
|
tx.Attachment = NewAttachmentClient(tx.config)
|
||||||
tx.AuthTokens = NewAuthTokensClient(tx.config)
|
tx.AuthTokens = NewAuthTokensClient(tx.config)
|
||||||
|
tx.Document = NewDocumentClient(tx.config)
|
||||||
|
tx.DocumentToken = NewDocumentTokenClient(tx.config)
|
||||||
tx.Group = NewGroupClient(tx.config)
|
tx.Group = NewGroupClient(tx.config)
|
||||||
tx.Item = NewItemClient(tx.config)
|
tx.Item = NewItemClient(tx.config)
|
||||||
tx.ItemField = NewItemFieldClient(tx.config)
|
tx.ItemField = NewItemFieldClient(tx.config)
|
||||||
|
@ -177,7 +186,7 @@ func (tx *Tx) init() {
|
||||||
// of them in order to commit or rollback the transaction.
|
// of them in order to commit or rollback the transaction.
|
||||||
//
|
//
|
||||||
// If a closed transaction is embedded in one of the generated entities, and the entity
|
// If a closed transaction is embedded in one of the generated entities, and the entity
|
||||||
// applies a query, for example: AuthTokens.QueryXXX(), the query will be executed
|
// applies a query, for example: Attachment.QueryXXX(), the query will be executed
|
||||||
// through the driver which created this transaction.
|
// through the driver which created this transaction.
|
||||||
//
|
//
|
||||||
// Note that txDriver is not goroutine safe.
|
// Note that txDriver is not goroutine safe.
|
||||||
|
|
|
@ -8,9 +8,9 @@ import (
|
||||||
func UserFactory() types.UserCreate {
|
func UserFactory() types.UserCreate {
|
||||||
f := faker.NewFaker()
|
f := faker.NewFaker()
|
||||||
return types.UserCreate{
|
return types.UserCreate{
|
||||||
Name: f.RandomString(10),
|
Name: f.Str(10),
|
||||||
Email: f.RandomEmail(),
|
Email: f.Email(),
|
||||||
Password: f.RandomString(10),
|
Password: f.Str(10),
|
||||||
IsSuperuser: f.RandomBool(),
|
IsSuperuser: f.Bool(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
62
backend/internal/repo/id_set.go
Normal file
62
backend/internal/repo/id_set.go
Normal file
|
@ -0,0 +1,62 @@
|
||||||
|
package repo
|
||||||
|
|
||||||
|
import "github.com/google/uuid"
|
||||||
|
|
||||||
|
// HasID is an interface to entities that have an ID uuid.UUID field and a GetID() method.
|
||||||
|
// This interface is fulfilled by all entities generated by entgo.io/ent via a custom template
|
||||||
|
type HasID interface {
|
||||||
|
GetID() uuid.UUID
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDSet is a utility set-like type for working with sets of uuid.UUIDs within a repository
|
||||||
|
// instance. Most useful for comparing lists of UUIDs for processing relationship
|
||||||
|
// IDs and remove/adding relationships as required.
|
||||||
|
//
|
||||||
|
// # See how ItemRepo uses it to manage the Labels-To-Items relationship
|
||||||
|
//
|
||||||
|
// NOTE: may be worth moving this to a more generic package/set implementation
|
||||||
|
// or use a 3rd party set library, but this is good enough for now
|
||||||
|
type IDSet struct {
|
||||||
|
mp map[uuid.UUID]struct{}
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewIDSet(l int) *IDSet {
|
||||||
|
return &IDSet{
|
||||||
|
mp: make(map[uuid.UUID]struct{}, l),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func EntitiesToIDSet[T HasID](entities []T) *IDSet {
|
||||||
|
s := NewIDSet(len(entities))
|
||||||
|
for _, e := range entities {
|
||||||
|
s.Add(e.GetID())
|
||||||
|
}
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *IDSet) Slice() []uuid.UUID {
|
||||||
|
s := make([]uuid.UUID, 0, len(t.mp))
|
||||||
|
for k := range t.mp {
|
||||||
|
s = append(s, k)
|
||||||
|
}
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *IDSet) Add(ids ...uuid.UUID) {
|
||||||
|
for _, id := range ids {
|
||||||
|
t.mp[id] = struct{}{}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *IDSet) Has(id uuid.UUID) bool {
|
||||||
|
_, ok := t.mp[id]
|
||||||
|
return ok
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *IDSet) Len() int {
|
||||||
|
return len(t.mp)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *IDSet) Remove(id uuid.UUID) {
|
||||||
|
delete(t.mp, id)
|
||||||
|
}
|
47
backend/internal/repo/repo_documents.go
Normal file
47
backend/internal/repo/repo_documents.go
Normal file
|
@ -0,0 +1,47 @@
|
||||||
|
package repo
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/hay-kot/content/backend/ent"
|
||||||
|
"github.com/hay-kot/content/backend/ent/document"
|
||||||
|
"github.com/hay-kot/content/backend/ent/group"
|
||||||
|
"github.com/hay-kot/content/backend/internal/types"
|
||||||
|
)
|
||||||
|
|
||||||
|
// DocumentRepository is a repository for Document entity
|
||||||
|
type DocumentRepository struct {
|
||||||
|
db *ent.Client
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *DocumentRepository) Create(ctx context.Context, gid uuid.UUID, doc types.DocumentCreate) (*ent.Document, error) {
|
||||||
|
return r.db.Document.Create().
|
||||||
|
SetGroupID(gid).
|
||||||
|
SetTitle(doc.Title).
|
||||||
|
SetPath(doc.Path).
|
||||||
|
Save(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *DocumentRepository) GetAll(ctx context.Context, gid uuid.UUID) ([]*ent.Document, error) {
|
||||||
|
return r.db.Document.Query().
|
||||||
|
Where(document.HasGroupWith(group.ID(gid))).
|
||||||
|
All(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *DocumentRepository) Get(ctx context.Context, id uuid.UUID) (*ent.Document, error) {
|
||||||
|
return r.db.Document.Query().
|
||||||
|
Where(document.ID(id)).
|
||||||
|
Only(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *DocumentRepository) Update(ctx context.Context, id uuid.UUID, doc types.DocumentUpdate) (*ent.Document, error) {
|
||||||
|
return r.db.Document.UpdateOneID(id).
|
||||||
|
SetTitle(doc.Title).
|
||||||
|
SetPath(doc.Path).
|
||||||
|
Save(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *DocumentRepository) Delete(ctx context.Context, id uuid.UUID) error {
|
||||||
|
return r.db.Document.DeleteOneID(id).Exec(ctx)
|
||||||
|
}
|
202
backend/internal/repo/repo_documents_test.go
Normal file
202
backend/internal/repo/repo_documents_test.go
Normal file
|
@ -0,0 +1,202 @@
|
||||||
|
package repo
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/hay-kot/content/backend/ent"
|
||||||
|
"github.com/hay-kot/content/backend/internal/types"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestDocumentRepository_Create(t *testing.T) {
|
||||||
|
type args struct {
|
||||||
|
ctx context.Context
|
||||||
|
gid uuid.UUID
|
||||||
|
doc types.DocumentCreate
|
||||||
|
}
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
args args
|
||||||
|
want *ent.Document
|
||||||
|
wantErr bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "create document",
|
||||||
|
args: args{
|
||||||
|
ctx: context.Background(),
|
||||||
|
gid: tGroup.ID,
|
||||||
|
doc: types.DocumentCreate{
|
||||||
|
Title: "test document",
|
||||||
|
Path: "/test/document",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
want: &ent.Document{
|
||||||
|
Title: "test document",
|
||||||
|
Path: "/test/document",
|
||||||
|
},
|
||||||
|
wantErr: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "create document with empty title",
|
||||||
|
args: args{
|
||||||
|
ctx: context.Background(),
|
||||||
|
gid: tGroup.ID,
|
||||||
|
doc: types.DocumentCreate{
|
||||||
|
Title: "",
|
||||||
|
Path: "/test/document",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
want: nil,
|
||||||
|
wantErr: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "create document with empty path",
|
||||||
|
args: args{
|
||||||
|
ctx: context.Background(),
|
||||||
|
gid: tGroup.ID,
|
||||||
|
doc: types.DocumentCreate{
|
||||||
|
Title: "test document",
|
||||||
|
Path: "",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
want: nil,
|
||||||
|
wantErr: true,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
ids := make([]uuid.UUID, 0, len(tests))
|
||||||
|
|
||||||
|
t.Cleanup(func() {
|
||||||
|
for _, id := range ids {
|
||||||
|
err := tRepos.Docs.Delete(context.Background(), id)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
got, err := tRepos.Docs.Create(tt.args.ctx, tt.args.gid, tt.args.doc)
|
||||||
|
if (err != nil) != tt.wantErr {
|
||||||
|
t.Errorf("DocumentRepository.Create() error = %v, wantErr %v", err, tt.wantErr)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if tt.wantErr {
|
||||||
|
assert.Error(t, err)
|
||||||
|
assert.Nil(t, got)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
assert.Equal(t, tt.want.Title, got.Title)
|
||||||
|
assert.Equal(t, tt.want.Path, got.Path)
|
||||||
|
ids = append(ids, got.ID)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func useDocs(t *testing.T, num int) []*ent.Document {
|
||||||
|
t.Helper()
|
||||||
|
|
||||||
|
results := make([]*ent.Document, 0, num)
|
||||||
|
ids := make([]uuid.UUID, 0, num)
|
||||||
|
|
||||||
|
for i := 0; i < num; i++ {
|
||||||
|
doc, err := tRepos.Docs.Create(context.Background(), tGroup.ID, types.DocumentCreate{
|
||||||
|
Title: fk.Str(10),
|
||||||
|
Path: fk.Path(),
|
||||||
|
})
|
||||||
|
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.NotNil(t, doc)
|
||||||
|
results = append(results, doc)
|
||||||
|
ids = append(ids, doc.ID)
|
||||||
|
}
|
||||||
|
|
||||||
|
t.Cleanup(func() {
|
||||||
|
for _, id := range ids {
|
||||||
|
err := tRepos.Docs.Delete(context.Background(), id)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
assert.True(t, ent.IsNotFound(err))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDocumentRepository_GetAll(t *testing.T) {
|
||||||
|
entities := useDocs(t, 10)
|
||||||
|
|
||||||
|
for _, entity := range entities {
|
||||||
|
assert.NotNil(t, entity)
|
||||||
|
}
|
||||||
|
|
||||||
|
all, err := tRepos.Docs.GetAll(context.Background(), tGroup.ID)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
assert.Len(t, all, 10)
|
||||||
|
for _, entity := range all {
|
||||||
|
assert.NotNil(t, entity)
|
||||||
|
|
||||||
|
for _, e := range entities {
|
||||||
|
if e.ID == entity.ID {
|
||||||
|
assert.Equal(t, e.Title, entity.Title)
|
||||||
|
assert.Equal(t, e.Path, entity.Path)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDocumentRepository_Get(t *testing.T) {
|
||||||
|
entities := useDocs(t, 10)
|
||||||
|
|
||||||
|
for _, entity := range entities {
|
||||||
|
got, err := tRepos.Docs.Get(context.Background(), entity.ID)
|
||||||
|
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Equal(t, entity.ID, got.ID)
|
||||||
|
assert.Equal(t, entity.Title, got.Title)
|
||||||
|
assert.Equal(t, entity.Path, got.Path)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDocumentRepository_Update(t *testing.T) {
|
||||||
|
entities := useDocs(t, 10)
|
||||||
|
|
||||||
|
for _, entity := range entities {
|
||||||
|
got, err := tRepos.Docs.Get(context.Background(), entity.ID)
|
||||||
|
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Equal(t, entity.ID, got.ID)
|
||||||
|
assert.Equal(t, entity.Title, got.Title)
|
||||||
|
assert.Equal(t, entity.Path, got.Path)
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, entity := range entities {
|
||||||
|
updateData := types.DocumentUpdate{
|
||||||
|
Title: fk.Str(10),
|
||||||
|
Path: fk.Path(),
|
||||||
|
}
|
||||||
|
|
||||||
|
updated, err := tRepos.Docs.Update(context.Background(), entity.ID, updateData)
|
||||||
|
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Equal(t, entity.ID, updated.ID)
|
||||||
|
assert.Equal(t, updateData.Title, updated.Title)
|
||||||
|
assert.Equal(t, updateData.Path, updated.Path)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDocumentRepository_Delete(t *testing.T) {
|
||||||
|
entities := useDocs(t, 10)
|
||||||
|
|
||||||
|
for _, entity := range entities {
|
||||||
|
err := tRepos.Docs.Delete(context.Background(), entity.ID)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
_, err = tRepos.Docs.Get(context.Background(), entity.ID)
|
||||||
|
assert.Error(t, err)
|
||||||
|
}
|
||||||
|
}
|
41
backend/internal/repo/repo_documents_tokens.go
Normal file
41
backend/internal/repo/repo_documents_tokens.go
Normal file
|
@ -0,0 +1,41 @@
|
||||||
|
package repo
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/hay-kot/content/backend/ent"
|
||||||
|
"github.com/hay-kot/content/backend/ent/documenttoken"
|
||||||
|
"github.com/hay-kot/content/backend/internal/types"
|
||||||
|
)
|
||||||
|
|
||||||
|
// DocumentTokensRepository is a repository for Document entity
|
||||||
|
type DocumentTokensRepository struct {
|
||||||
|
db *ent.Client
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *DocumentTokensRepository) Create(ctx context.Context, data types.DocumentTokenCreate) (*ent.DocumentToken, error) {
|
||||||
|
result, err := r.db.DocumentToken.Create().
|
||||||
|
SetDocumentID(data.DocumentID).
|
||||||
|
SetToken(data.TokenHash).
|
||||||
|
SetExpiresAt(data.ExpiresAt).
|
||||||
|
Save(ctx)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return r.db.DocumentToken.Query().
|
||||||
|
Where(documenttoken.ID(result.ID)).
|
||||||
|
WithDocument().
|
||||||
|
Only(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *DocumentTokensRepository) PurgeExpiredTokens(ctx context.Context) (int, error) {
|
||||||
|
return r.db.DocumentToken.Delete().Where(documenttoken.ExpiresAtLT(time.Now())).Exec(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *DocumentTokensRepository) Delete(ctx context.Context, id uuid.UUID) error {
|
||||||
|
return r.db.DocumentToken.DeleteOneID(id).Exec(ctx)
|
||||||
|
}
|
149
backend/internal/repo/repo_documents_tokens_test.go
Normal file
149
backend/internal/repo/repo_documents_tokens_test.go
Normal file
|
@ -0,0 +1,149 @@
|
||||||
|
package repo
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"testing"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/hay-kot/content/backend/ent"
|
||||||
|
"github.com/hay-kot/content/backend/ent/documenttoken"
|
||||||
|
"github.com/hay-kot/content/backend/internal/types"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestDocumentTokensRepository_Create(t *testing.T) {
|
||||||
|
entities := useDocs(t, 1)
|
||||||
|
doc := entities[0]
|
||||||
|
expires := fk.Time()
|
||||||
|
|
||||||
|
type args struct {
|
||||||
|
ctx context.Context
|
||||||
|
data types.DocumentTokenCreate
|
||||||
|
}
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
args args
|
||||||
|
want *ent.DocumentToken
|
||||||
|
wantErr bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "create document token",
|
||||||
|
args: args{
|
||||||
|
ctx: context.Background(),
|
||||||
|
data: types.DocumentTokenCreate{
|
||||||
|
DocumentID: doc.ID,
|
||||||
|
TokenHash: []byte("token"),
|
||||||
|
ExpiresAt: expires,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
want: &ent.DocumentToken{
|
||||||
|
Edges: ent.DocumentTokenEdges{
|
||||||
|
Document: doc,
|
||||||
|
},
|
||||||
|
Token: []byte("token"),
|
||||||
|
ExpiresAt: expires,
|
||||||
|
},
|
||||||
|
wantErr: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "create document token with empty token",
|
||||||
|
args: args{
|
||||||
|
ctx: context.Background(),
|
||||||
|
data: types.DocumentTokenCreate{
|
||||||
|
DocumentID: doc.ID,
|
||||||
|
TokenHash: []byte(""),
|
||||||
|
ExpiresAt: expires,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
want: nil,
|
||||||
|
wantErr: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "create document token with empty document id",
|
||||||
|
args: args{
|
||||||
|
ctx: context.Background(),
|
||||||
|
data: types.DocumentTokenCreate{
|
||||||
|
DocumentID: uuid.Nil,
|
||||||
|
TokenHash: []byte("token"),
|
||||||
|
ExpiresAt: expires,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
want: nil,
|
||||||
|
wantErr: true,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
ids := make([]uuid.UUID, 0, len(tests))
|
||||||
|
|
||||||
|
t.Cleanup(func() {
|
||||||
|
for _, id := range ids {
|
||||||
|
_ = tRepos.DocTokens.Delete(context.Background(), id)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
|
||||||
|
got, err := tRepos.DocTokens.Create(tt.args.ctx, tt.args.data)
|
||||||
|
if (err != nil) != tt.wantErr {
|
||||||
|
t.Errorf("DocumentTokensRepository.Create() error = %v, wantErr %v", err, tt.wantErr)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if tt.wantErr {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
assert.Equal(t, tt.want.Token, got.Token)
|
||||||
|
assert.WithinDuration(t, tt.want.ExpiresAt, got.ExpiresAt, time.Duration(1)*time.Second)
|
||||||
|
assert.Equal(t, tt.want.Edges.Document.ID, got.Edges.Document.ID)
|
||||||
|
})
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func useDocTokens(t *testing.T, num int) []*ent.DocumentToken {
|
||||||
|
entity := useDocs(t, 1)[0]
|
||||||
|
|
||||||
|
results := make([]*ent.DocumentToken, 0, num)
|
||||||
|
|
||||||
|
ids := make([]uuid.UUID, 0, num)
|
||||||
|
t.Cleanup(func() {
|
||||||
|
for _, id := range ids {
|
||||||
|
_ = tRepos.DocTokens.Delete(context.Background(), id)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
for i := 0; i < num; i++ {
|
||||||
|
e, err := tRepos.DocTokens.Create(context.Background(), types.DocumentTokenCreate{
|
||||||
|
DocumentID: entity.ID,
|
||||||
|
TokenHash: []byte(fk.Str(10)),
|
||||||
|
ExpiresAt: fk.Time(),
|
||||||
|
})
|
||||||
|
|
||||||
|
assert.NoError(t, err)
|
||||||
|
results = append(results, e)
|
||||||
|
ids = append(ids, e.ID)
|
||||||
|
}
|
||||||
|
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDocumentTokensRepository_PurgeExpiredTokens(t *testing.T) {
|
||||||
|
entities := useDocTokens(t, 2)
|
||||||
|
|
||||||
|
// set expired token
|
||||||
|
tRepos.DocTokens.db.DocumentToken.Update().
|
||||||
|
Where(documenttoken.ID(entities[0].ID)).
|
||||||
|
SetExpiresAt(time.Now().Add(-time.Hour)).
|
||||||
|
ExecX(context.Background())
|
||||||
|
|
||||||
|
count, err := tRepos.DocTokens.PurgeExpiredTokens(context.Background())
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Equal(t, 1, count)
|
||||||
|
|
||||||
|
all, err := tRepos.DocTokens.db.DocumentToken.Query().All(context.Background())
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Len(t, all, 1)
|
||||||
|
assert.Equal(t, entities[1].ID, all[0].ID)
|
||||||
|
}
|
44
backend/internal/repo/repo_item_attachments.go
Normal file
44
backend/internal/repo/repo_item_attachments.go
Normal file
|
@ -0,0 +1,44 @@
|
||||||
|
package repo
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/hay-kot/content/backend/ent"
|
||||||
|
"github.com/hay-kot/content/backend/ent/attachment"
|
||||||
|
)
|
||||||
|
|
||||||
|
// AttachmentRepo is a repository for Attachments table that links Items to Documents
|
||||||
|
// While also specifying the type of the attachment. This _ONLY_ provides basic Create Update
|
||||||
|
// And Delete operations. For accessing the actual documents, use the Items repository since it
|
||||||
|
// provides the attachments with the documents.
|
||||||
|
type AttachmentRepo struct {
|
||||||
|
db *ent.Client
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *AttachmentRepo) Create(ctx context.Context, itemId, docId uuid.UUID, typ attachment.Type) (*ent.Attachment, error) {
|
||||||
|
return r.db.Attachment.Create().
|
||||||
|
SetType(typ).
|
||||||
|
SetDocumentID(docId).
|
||||||
|
SetItemID(itemId).
|
||||||
|
Save(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *AttachmentRepo) Get(ctx context.Context, id uuid.UUID) (*ent.Attachment, error) {
|
||||||
|
return r.db.Attachment.
|
||||||
|
Query().
|
||||||
|
Where(attachment.ID(id)).
|
||||||
|
WithItem().
|
||||||
|
WithDocument().
|
||||||
|
Only(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *AttachmentRepo) Update(ctx context.Context, itemId uuid.UUID, typ attachment.Type) (*ent.Attachment, error) {
|
||||||
|
return r.db.Attachment.UpdateOneID(itemId).
|
||||||
|
SetType(typ).
|
||||||
|
Save(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *AttachmentRepo) Delete(ctx context.Context, id uuid.UUID) error {
|
||||||
|
return r.db.Attachment.DeleteOneID(id).Exec(ctx)
|
||||||
|
}
|
133
backend/internal/repo/repo_item_attachments_test.go
Normal file
133
backend/internal/repo/repo_item_attachments_test.go
Normal file
|
@ -0,0 +1,133 @@
|
||||||
|
package repo
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/hay-kot/content/backend/ent"
|
||||||
|
"github.com/hay-kot/content/backend/ent/attachment"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestAttachmentRepo_Create(t *testing.T) {
|
||||||
|
doc := useDocs(t, 1)[0]
|
||||||
|
item := useItems(t, 1)[0]
|
||||||
|
|
||||||
|
ids := []uuid.UUID{doc.ID, item.ID}
|
||||||
|
t.Cleanup(func() {
|
||||||
|
for _, id := range ids {
|
||||||
|
_ = tRepos.Attachments.Delete(context.Background(), id)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
type args struct {
|
||||||
|
ctx context.Context
|
||||||
|
itemId uuid.UUID
|
||||||
|
docId uuid.UUID
|
||||||
|
typ attachment.Type
|
||||||
|
}
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
args args
|
||||||
|
want *ent.Attachment
|
||||||
|
wantErr bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "create attachment",
|
||||||
|
args: args{
|
||||||
|
ctx: context.Background(),
|
||||||
|
itemId: item.ID,
|
||||||
|
docId: doc.ID,
|
||||||
|
typ: attachment.TypePhoto,
|
||||||
|
},
|
||||||
|
want: &ent.Attachment{
|
||||||
|
Type: attachment.TypePhoto,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "create attachment with invalid item id",
|
||||||
|
args: args{
|
||||||
|
ctx: context.Background(),
|
||||||
|
itemId: uuid.New(),
|
||||||
|
docId: doc.ID,
|
||||||
|
typ: "blarg",
|
||||||
|
},
|
||||||
|
wantErr: true,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
|
||||||
|
got, err := tRepos.Attachments.Create(tt.args.ctx, tt.args.itemId, tt.args.docId, tt.args.typ)
|
||||||
|
if (err != nil) != tt.wantErr {
|
||||||
|
t.Errorf("AttachmentRepo.Create() error = %v, wantErr %v", err, tt.wantErr)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if tt.wantErr {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
assert.Equal(t, tt.want.Type, got.Type)
|
||||||
|
|
||||||
|
withItems, err := tRepos.Attachments.Get(tt.args.ctx, got.ID)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Equal(t, tt.args.itemId, withItems.Edges.Item.ID)
|
||||||
|
assert.Equal(t, tt.args.docId, withItems.Edges.Document.ID)
|
||||||
|
|
||||||
|
ids = append(ids, got.ID)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func useAttachments(t *testing.T, n int) []*ent.Attachment {
|
||||||
|
t.Helper()
|
||||||
|
|
||||||
|
doc := useDocs(t, 1)[0]
|
||||||
|
item := useItems(t, 1)[0]
|
||||||
|
|
||||||
|
ids := make([]uuid.UUID, 0, n)
|
||||||
|
t.Cleanup(func() {
|
||||||
|
for _, id := range ids {
|
||||||
|
_ = tRepos.Attachments.Delete(context.Background(), id)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
attachments := make([]*ent.Attachment, n)
|
||||||
|
for i := 0; i < n; i++ {
|
||||||
|
attachment, err := tRepos.Attachments.Create(context.Background(), item.ID, doc.ID, attachment.TypePhoto)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
attachments[i] = attachment
|
||||||
|
|
||||||
|
ids = append(ids, attachment.ID)
|
||||||
|
}
|
||||||
|
|
||||||
|
return attachments
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestAttachmentRepo_Update(t *testing.T) {
|
||||||
|
entity := useAttachments(t, 1)[0]
|
||||||
|
|
||||||
|
for _, typ := range []attachment.Type{"photo", "manual", "warranty", "attachment"} {
|
||||||
|
t.Run(string(typ), func(t *testing.T) {
|
||||||
|
_, err := tRepos.Attachments.Update(context.Background(), entity.ID, typ)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
updated, err := tRepos.Attachments.Get(context.Background(), entity.ID)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Equal(t, typ, updated.Type)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestAttachmentRepo_Delete(t *testing.T) {
|
||||||
|
entity := useAttachments(t, 1)[0]
|
||||||
|
|
||||||
|
err := tRepos.Attachments.Delete(context.Background(), entity.ID)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
_, err = tRepos.Attachments.Get(context.Background(), entity.ID)
|
||||||
|
assert.Error(t, err)
|
||||||
|
}
|
|
@ -21,9 +21,13 @@ func (e *ItemsRepository) GetOne(ctx context.Context, id uuid.UUID) (*ent.Item,
|
||||||
WithLabel().
|
WithLabel().
|
||||||
WithLocation().
|
WithLocation().
|
||||||
WithGroup().
|
WithGroup().
|
||||||
|
WithAttachments(func(aq *ent.AttachmentQuery) {
|
||||||
|
aq.WithDocument()
|
||||||
|
}).
|
||||||
Only(ctx)
|
Only(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// GetAll returns all the items in the database with the Labels and Locations eager loaded.
|
||||||
func (e *ItemsRepository) GetAll(ctx context.Context, gid uuid.UUID) ([]*ent.Item, error) {
|
func (e *ItemsRepository) GetAll(ctx context.Context, gid uuid.UUID) ([]*ent.Item, error) {
|
||||||
return e.db.Item.Query().
|
return e.db.Item.Query().
|
||||||
Where(item.HasGroupWith(group.ID(gid))).
|
Where(item.HasGroupWith(group.ID(gid))).
|
||||||
|
@ -72,11 +76,31 @@ func (e *ItemsRepository) Update(ctx context.Context, data types.ItemUpdate) (*e
|
||||||
SetSoldNotes(data.SoldNotes).
|
SetSoldNotes(data.SoldNotes).
|
||||||
SetNotes(data.Notes).
|
SetNotes(data.Notes).
|
||||||
SetLifetimeWarranty(data.LifetimeWarranty).
|
SetLifetimeWarranty(data.LifetimeWarranty).
|
||||||
|
SetInsured(data.Insured).
|
||||||
SetWarrantyExpires(data.WarrantyExpires).
|
SetWarrantyExpires(data.WarrantyExpires).
|
||||||
SetWarrantyDetails(data.WarrantyDetails)
|
SetWarrantyDetails(data.WarrantyDetails).
|
||||||
|
SetQuantity(data.Quantity)
|
||||||
|
|
||||||
err := q.Exec(ctx)
|
currentLabels, err := e.db.Item.Query().Where(item.ID(data.ID)).QueryLabel().All(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
set := EntitiesToIDSet(currentLabels)
|
||||||
|
|
||||||
|
for _, l := range data.LabelIDs {
|
||||||
|
if set.Has(l) {
|
||||||
|
set.Remove(l)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
q.AddLabelIDs(l)
|
||||||
|
}
|
||||||
|
|
||||||
|
if set.Len() > 0 {
|
||||||
|
q.RemoveLabelIDs(set.Slice()...)
|
||||||
|
}
|
||||||
|
|
||||||
|
err = q.Exec(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,6 +5,7 @@ import (
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
"github.com/hay-kot/content/backend/ent"
|
"github.com/hay-kot/content/backend/ent"
|
||||||
"github.com/hay-kot/content/backend/internal/types"
|
"github.com/hay-kot/content/backend/internal/types"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
|
@ -12,12 +13,12 @@ import (
|
||||||
|
|
||||||
func itemFactory() types.ItemCreate {
|
func itemFactory() types.ItemCreate {
|
||||||
return types.ItemCreate{
|
return types.ItemCreate{
|
||||||
Name: fk.RandomString(10),
|
Name: fk.Str(10),
|
||||||
Description: fk.RandomString(100),
|
Description: fk.Str(100),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func useItems(t *testing.T, len int) ([]*ent.Item, func()) {
|
func useItems(t *testing.T, len int) []*ent.Item {
|
||||||
t.Helper()
|
t.Helper()
|
||||||
|
|
||||||
location, err := tRepos.Locations.Create(context.Background(), tGroup.ID, locationFactory())
|
location, err := tRepos.Locations.Create(context.Background(), tGroup.ID, locationFactory())
|
||||||
|
@ -33,17 +34,17 @@ func useItems(t *testing.T, len int) ([]*ent.Item, func()) {
|
||||||
items[i] = item
|
items[i] = item
|
||||||
}
|
}
|
||||||
|
|
||||||
return items, func() {
|
t.Cleanup(func() {
|
||||||
for _, item := range items {
|
for _, item := range items {
|
||||||
err := tRepos.Items.Delete(context.Background(), item.ID)
|
_ = tRepos.Items.Delete(context.Background(), item.ID)
|
||||||
assert.NoError(t, err)
|
|
||||||
}
|
}
|
||||||
}
|
})
|
||||||
|
|
||||||
|
return items
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestItemsRepository_GetOne(t *testing.T) {
|
func TestItemsRepository_GetOne(t *testing.T) {
|
||||||
entity, cleanup := useItems(t, 3)
|
entity := useItems(t, 3)
|
||||||
defer cleanup()
|
|
||||||
|
|
||||||
for _, item := range entity {
|
for _, item := range entity {
|
||||||
result, err := tRepos.Items.GetOne(context.Background(), item.ID)
|
result, err := tRepos.Items.GetOne(context.Background(), item.ID)
|
||||||
|
@ -54,8 +55,7 @@ func TestItemsRepository_GetOne(t *testing.T) {
|
||||||
|
|
||||||
func TestItemsRepository_GetAll(t *testing.T) {
|
func TestItemsRepository_GetAll(t *testing.T) {
|
||||||
length := 10
|
length := 10
|
||||||
expected, cleanup := useItems(t, length)
|
expected := useItems(t, length)
|
||||||
defer cleanup()
|
|
||||||
|
|
||||||
results, err := tRepos.Items.GetAll(context.Background(), tGroup.ID)
|
results, err := tRepos.Items.GetAll(context.Background(), tGroup.ID)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
@ -119,7 +119,7 @@ func TestItemsRepository_Create_Location(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestItemsRepository_Delete(t *testing.T) {
|
func TestItemsRepository_Delete(t *testing.T) {
|
||||||
entities, _ := useItems(t, 3)
|
entities := useItems(t, 3)
|
||||||
|
|
||||||
for _, item := range entities {
|
for _, item := range entities {
|
||||||
err := tRepos.Items.Delete(context.Background(), item.ID)
|
err := tRepos.Items.Delete(context.Background(), item.ID)
|
||||||
|
@ -131,9 +131,68 @@ func TestItemsRepository_Delete(t *testing.T) {
|
||||||
assert.Empty(t, results)
|
assert.Empty(t, results)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestItemsRepository_Update_Labels(t *testing.T) {
|
||||||
|
entity := useItems(t, 1)[0]
|
||||||
|
labels := useLabels(t, 3)
|
||||||
|
|
||||||
|
labelsIDs := []uuid.UUID{labels[0].ID, labels[1].ID, labels[2].ID}
|
||||||
|
|
||||||
|
type args struct {
|
||||||
|
labelIds []uuid.UUID
|
||||||
|
}
|
||||||
|
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
args args
|
||||||
|
want []uuid.UUID
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "add all labels",
|
||||||
|
args: args{
|
||||||
|
labelIds: labelsIDs,
|
||||||
|
},
|
||||||
|
want: labelsIDs,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "update with one label",
|
||||||
|
args: args{
|
||||||
|
labelIds: labelsIDs[:1],
|
||||||
|
},
|
||||||
|
want: labelsIDs[:1],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "add one new label to existing single label",
|
||||||
|
args: args{
|
||||||
|
labelIds: labelsIDs[1:],
|
||||||
|
},
|
||||||
|
want: labelsIDs[1:],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
// Apply all labels to entity
|
||||||
|
updateData := types.ItemUpdate{
|
||||||
|
ID: entity.ID,
|
||||||
|
Name: entity.Name,
|
||||||
|
LocationID: entity.Edges.Location.ID,
|
||||||
|
LabelIDs: tt.args.labelIds,
|
||||||
|
}
|
||||||
|
|
||||||
|
updated, err := tRepos.Items.Update(context.Background(), updateData)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Len(t, tt.want, len(updated.Edges.Label))
|
||||||
|
|
||||||
|
for _, label := range updated.Edges.Label {
|
||||||
|
assert.Contains(t, tt.want, label.ID)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
func TestItemsRepository_Update(t *testing.T) {
|
func TestItemsRepository_Update(t *testing.T) {
|
||||||
entities, cleanup := useItems(t, 3)
|
entities := useItems(t, 3)
|
||||||
defer cleanup()
|
|
||||||
|
|
||||||
entity := entities[0]
|
entity := entities[0]
|
||||||
|
|
||||||
|
@ -141,20 +200,20 @@ func TestItemsRepository_Update(t *testing.T) {
|
||||||
ID: entity.ID,
|
ID: entity.ID,
|
||||||
Name: entity.Name,
|
Name: entity.Name,
|
||||||
LocationID: entity.Edges.Location.ID,
|
LocationID: entity.Edges.Location.ID,
|
||||||
SerialNumber: fk.RandomString(10),
|
SerialNumber: fk.Str(10),
|
||||||
LabelIDs: nil,
|
LabelIDs: nil,
|
||||||
ModelNumber: fk.RandomString(10),
|
ModelNumber: fk.Str(10),
|
||||||
Manufacturer: fk.RandomString(10),
|
Manufacturer: fk.Str(10),
|
||||||
PurchaseTime: time.Now(),
|
PurchaseTime: time.Now(),
|
||||||
PurchaseFrom: fk.RandomString(10),
|
PurchaseFrom: fk.Str(10),
|
||||||
PurchasePrice: 300.99,
|
PurchasePrice: 300.99,
|
||||||
SoldTime: time.Now(),
|
SoldTime: time.Now(),
|
||||||
SoldTo: fk.RandomString(10),
|
SoldTo: fk.Str(10),
|
||||||
SoldPrice: 300.99,
|
SoldPrice: 300.99,
|
||||||
SoldNotes: fk.RandomString(10),
|
SoldNotes: fk.Str(10),
|
||||||
Notes: fk.RandomString(10),
|
Notes: fk.Str(10),
|
||||||
WarrantyExpires: time.Now(),
|
WarrantyExpires: time.Now(),
|
||||||
WarrantyDetails: fk.RandomString(10),
|
WarrantyDetails: fk.Str(10),
|
||||||
LifetimeWarranty: true,
|
LifetimeWarranty: true,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -11,12 +11,12 @@ import (
|
||||||
|
|
||||||
func labelFactory() types.LabelCreate {
|
func labelFactory() types.LabelCreate {
|
||||||
return types.LabelCreate{
|
return types.LabelCreate{
|
||||||
Name: fk.RandomString(10),
|
Name: fk.Str(10),
|
||||||
Description: fk.RandomString(100),
|
Description: fk.Str(100),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func useLabels(t *testing.T, len int) ([]*ent.Label, func()) {
|
func useLabels(t *testing.T, len int) []*ent.Label {
|
||||||
t.Helper()
|
t.Helper()
|
||||||
|
|
||||||
labels := make([]*ent.Label, len)
|
labels := make([]*ent.Label, len)
|
||||||
|
@ -28,17 +28,17 @@ func useLabels(t *testing.T, len int) ([]*ent.Label, func()) {
|
||||||
labels[i] = item
|
labels[i] = item
|
||||||
}
|
}
|
||||||
|
|
||||||
return labels, func() {
|
t.Cleanup(func() {
|
||||||
for _, item := range labels {
|
for _, item := range labels {
|
||||||
err := tRepos.Labels.Delete(context.Background(), item.ID)
|
_ = tRepos.Labels.Delete(context.Background(), item.ID)
|
||||||
assert.NoError(t, err)
|
|
||||||
}
|
}
|
||||||
}
|
})
|
||||||
|
|
||||||
|
return labels
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestLabelRepository_Get(t *testing.T) {
|
func TestLabelRepository_Get(t *testing.T) {
|
||||||
labels, cleanup := useLabels(t, 1)
|
labels := useLabels(t, 1)
|
||||||
defer cleanup()
|
|
||||||
label := labels[0]
|
label := labels[0]
|
||||||
|
|
||||||
// Get by ID
|
// Get by ID
|
||||||
|
@ -48,8 +48,7 @@ func TestLabelRepository_Get(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestLabelRepositoryGetAll(t *testing.T) {
|
func TestLabelRepositoryGetAll(t *testing.T) {
|
||||||
_, cleanup := useLabels(t, 10)
|
useLabels(t, 10)
|
||||||
defer cleanup()
|
|
||||||
|
|
||||||
all, err := tRepos.Labels.GetAll(context.Background(), tGroup.ID)
|
all, err := tRepos.Labels.GetAll(context.Background(), tGroup.ID)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
@ -75,8 +74,8 @@ func TestLabelRepository_Update(t *testing.T) {
|
||||||
|
|
||||||
updateData := types.LabelUpdate{
|
updateData := types.LabelUpdate{
|
||||||
ID: loc.ID,
|
ID: loc.ID,
|
||||||
Name: fk.RandomString(10),
|
Name: fk.Str(10),
|
||||||
Description: fk.RandomString(100),
|
Description: fk.Str(100),
|
||||||
}
|
}
|
||||||
|
|
||||||
update, err := tRepos.Labels.Update(context.Background(), updateData)
|
update, err := tRepos.Labels.Update(context.Background(), updateData)
|
||||||
|
|
|
@ -10,8 +10,8 @@ import (
|
||||||
|
|
||||||
func locationFactory() types.LocationCreate {
|
func locationFactory() types.LocationCreate {
|
||||||
return types.LocationCreate{
|
return types.LocationCreate{
|
||||||
Name: fk.RandomString(10),
|
Name: fk.Str(10),
|
||||||
Description: fk.RandomString(100),
|
Description: fk.Str(100),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -31,14 +31,14 @@ func TestLocationRepository_Get(t *testing.T) {
|
||||||
func TestLocationRepositoryGetAllWithCount(t *testing.T) {
|
func TestLocationRepositoryGetAllWithCount(t *testing.T) {
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
result, err := tRepos.Locations.Create(ctx, tGroup.ID, types.LocationCreate{
|
result, err := tRepos.Locations.Create(ctx, tGroup.ID, types.LocationCreate{
|
||||||
Name: fk.RandomString(10),
|
Name: fk.Str(10),
|
||||||
Description: fk.RandomString(100),
|
Description: fk.Str(100),
|
||||||
})
|
})
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
_, err = tRepos.Items.Create(ctx, tGroup.ID, types.ItemCreate{
|
_, err = tRepos.Items.Create(ctx, tGroup.ID, types.ItemCreate{
|
||||||
Name: fk.RandomString(10),
|
Name: fk.Str(10),
|
||||||
Description: fk.RandomString(100),
|
Description: fk.Str(100),
|
||||||
LocationID: result.ID,
|
LocationID: result.ID,
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -74,8 +74,8 @@ func TestLocationRepository_Update(t *testing.T) {
|
||||||
|
|
||||||
updateData := types.LocationUpdate{
|
updateData := types.LocationUpdate{
|
||||||
ID: loc.ID,
|
ID: loc.ID,
|
||||||
Name: fk.RandomString(10),
|
Name: fk.Str(10),
|
||||||
Description: fk.RandomString(100),
|
Description: fk.Str(100),
|
||||||
}
|
}
|
||||||
|
|
||||||
update, err := tRepos.Locations.Update(context.Background(), updateData)
|
update, err := tRepos.Locations.Update(context.Background(), updateData)
|
||||||
|
|
|
@ -13,10 +13,10 @@ import (
|
||||||
func userFactory() types.UserCreate {
|
func userFactory() types.UserCreate {
|
||||||
|
|
||||||
return types.UserCreate{
|
return types.UserCreate{
|
||||||
Name: fk.RandomString(10),
|
Name: fk.Str(10),
|
||||||
Email: fk.RandomEmail(),
|
Email: fk.Email(),
|
||||||
Password: fk.RandomString(10),
|
Password: fk.Str(10),
|
||||||
IsSuperuser: fk.RandomBool(),
|
IsSuperuser: fk.Bool(),
|
||||||
GroupID: tGroup.ID,
|
GroupID: tGroup.ID,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -109,8 +109,8 @@ func TestUserRepo_Update(t *testing.T) {
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
updateData := types.UserUpdate{
|
updateData := types.UserUpdate{
|
||||||
Name: fk.RandomString(10),
|
Name: fk.Str(10),
|
||||||
Email: fk.RandomEmail(),
|
Email: fk.Email(),
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update
|
// Update
|
||||||
|
|
|
@ -4,21 +4,27 @@ import "github.com/hay-kot/content/backend/ent"
|
||||||
|
|
||||||
// AllRepos is a container for all the repository interfaces
|
// AllRepos is a container for all the repository interfaces
|
||||||
type AllRepos struct {
|
type AllRepos struct {
|
||||||
Users *UserRepository
|
Users *UserRepository
|
||||||
AuthTokens *TokenRepository
|
AuthTokens *TokenRepository
|
||||||
Groups *GroupRepository
|
Groups *GroupRepository
|
||||||
Locations *LocationRepository
|
Locations *LocationRepository
|
||||||
Labels *LabelRepository
|
Labels *LabelRepository
|
||||||
Items *ItemsRepository
|
Items *ItemsRepository
|
||||||
|
Docs *DocumentRepository
|
||||||
|
DocTokens *DocumentTokensRepository
|
||||||
|
Attachments *AttachmentRepo
|
||||||
}
|
}
|
||||||
|
|
||||||
func EntAllRepos(db *ent.Client) *AllRepos {
|
func EntAllRepos(db *ent.Client) *AllRepos {
|
||||||
return &AllRepos{
|
return &AllRepos{
|
||||||
Users: &UserRepository{db},
|
Users: &UserRepository{db},
|
||||||
AuthTokens: &TokenRepository{db},
|
AuthTokens: &TokenRepository{db},
|
||||||
Groups: &GroupRepository{db},
|
Groups: &GroupRepository{db},
|
||||||
Locations: &LocationRepository{db},
|
Locations: &LocationRepository{db},
|
||||||
Labels: &LabelRepository{db},
|
Labels: &LabelRepository{db},
|
||||||
Items: &ItemsRepository{db},
|
Items: &ItemsRepository{db},
|
||||||
|
Docs: &DocumentRepository{db},
|
||||||
|
DocTokens: &DocumentTokensRepository{db},
|
||||||
|
Attachments: &AttachmentRepo{db},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -16,6 +16,9 @@ func NewServices(repos *repo.AllRepos) *AllServices {
|
||||||
Admin: &AdminService{repos},
|
Admin: &AdminService{repos},
|
||||||
Location: &LocationService{repos},
|
Location: &LocationService{repos},
|
||||||
Labels: &LabelService{repos},
|
Labels: &LabelService{repos},
|
||||||
Items: &ItemService{repos},
|
Items: &ItemService{
|
||||||
|
repo: repos,
|
||||||
|
filepath: "/tmp/content",
|
||||||
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,6 +22,7 @@ var (
|
||||||
tRepos *repo.AllRepos
|
tRepos *repo.AllRepos
|
||||||
tUser *ent.User
|
tUser *ent.User
|
||||||
tGroup *ent.Group
|
tGroup *ent.Group
|
||||||
|
tSvc *AllServices
|
||||||
)
|
)
|
||||||
|
|
||||||
func bootstrap() {
|
func bootstrap() {
|
||||||
|
@ -36,10 +37,10 @@ func bootstrap() {
|
||||||
}
|
}
|
||||||
|
|
||||||
tUser, err = tRepos.Users.Create(ctx, types.UserCreate{
|
tUser, err = tRepos.Users.Create(ctx, types.UserCreate{
|
||||||
Name: fk.RandomString(10),
|
Name: fk.Str(10),
|
||||||
Email: fk.RandomEmail(),
|
Email: fk.Email(),
|
||||||
Password: fk.RandomString(10),
|
Password: fk.Str(10),
|
||||||
IsSuperuser: fk.RandomBool(),
|
IsSuperuser: fk.Bool(),
|
||||||
GroupID: tGroup.ID,
|
GroupID: tGroup.ID,
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -62,6 +63,7 @@ func TestMain(m *testing.M) {
|
||||||
|
|
||||||
tClient = client
|
tClient = client
|
||||||
tRepos = repo.EntAllRepos(tClient)
|
tRepos = repo.EntAllRepos(tClient)
|
||||||
|
tSvc = NewServices(tRepos)
|
||||||
defer client.Close()
|
defer client.Close()
|
||||||
|
|
||||||
bootstrap()
|
bootstrap()
|
||||||
|
|
|
@ -5,6 +5,19 @@ import (
|
||||||
"github.com/hay-kot/content/backend/internal/types"
|
"github.com/hay-kot/content/backend/internal/types"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
func ToItemAttachment(attachment *ent.Attachment) *types.ItemAttachment {
|
||||||
|
return &types.ItemAttachment{
|
||||||
|
ID: attachment.ID,
|
||||||
|
CreatedAt: attachment.CreatedAt,
|
||||||
|
UpdatedAt: attachment.UpdatedAt,
|
||||||
|
Document: types.DocumentOut{
|
||||||
|
ID: attachment.Edges.Document.ID,
|
||||||
|
Title: attachment.Edges.Document.Title,
|
||||||
|
Path: attachment.Edges.Document.Path,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func ToItemSummary(item *ent.Item) *types.ItemSummary {
|
func ToItemSummary(item *ent.Item) *types.ItemSummary {
|
||||||
var location *types.LocationSummary
|
var location *types.LocationSummary
|
||||||
if item.Edges.Location != nil {
|
if item.Edges.Location != nil {
|
||||||
|
@ -23,6 +36,14 @@ func ToItemSummary(item *ent.Item) *types.ItemSummary {
|
||||||
CreatedAt: item.CreatedAt,
|
CreatedAt: item.CreatedAt,
|
||||||
UpdatedAt: item.UpdatedAt,
|
UpdatedAt: item.UpdatedAt,
|
||||||
|
|
||||||
|
Quantity: item.Quantity,
|
||||||
|
Insured: item.Insured,
|
||||||
|
|
||||||
|
// Warranty
|
||||||
|
LifetimeWarranty: item.LifetimeWarranty,
|
||||||
|
WarrantyExpires: item.WarrantyExpires,
|
||||||
|
WarrantyDetails: item.WarrantyDetails,
|
||||||
|
|
||||||
// Edges
|
// Edges
|
||||||
Location: location,
|
Location: location,
|
||||||
Labels: labels,
|
Labels: labels,
|
||||||
|
@ -53,8 +74,14 @@ func ToItemSummaryErr(item *ent.Item, err error) (*types.ItemSummary, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func ToItemOut(item *ent.Item) *types.ItemOut {
|
func ToItemOut(item *ent.Item) *types.ItemOut {
|
||||||
|
var attachments []*types.ItemAttachment
|
||||||
|
if item.Edges.Attachments != nil {
|
||||||
|
attachments = MapEach(item.Edges.Attachments, ToItemAttachment)
|
||||||
|
}
|
||||||
|
|
||||||
return &types.ItemOut{
|
return &types.ItemOut{
|
||||||
ItemSummary: *ToItemSummary(item),
|
ItemSummary: *ToItemSummary(item),
|
||||||
|
Attachments: attachments,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -3,8 +3,12 @@ package services
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
|
||||||
"github.com/google/uuid"
|
"github.com/google/uuid"
|
||||||
|
"github.com/hay-kot/content/backend/ent/attachment"
|
||||||
"github.com/hay-kot/content/backend/internal/repo"
|
"github.com/hay-kot/content/backend/internal/repo"
|
||||||
"github.com/hay-kot/content/backend/internal/services/mappers"
|
"github.com/hay-kot/content/backend/internal/services/mappers"
|
||||||
"github.com/hay-kot/content/backend/internal/types"
|
"github.com/hay-kot/content/backend/internal/types"
|
||||||
|
@ -13,6 +17,9 @@ import (
|
||||||
|
|
||||||
type ItemService struct {
|
type ItemService struct {
|
||||||
repo *repo.AllRepos
|
repo *repo.AllRepos
|
||||||
|
|
||||||
|
// filepath is the root of the storage location that will be used to store all files from.
|
||||||
|
filepath string
|
||||||
}
|
}
|
||||||
|
|
||||||
func (svc *ItemService) GetOne(ctx context.Context, gid uuid.UUID, id uuid.UUID) (*types.ItemOut, error) {
|
func (svc *ItemService) GetOne(ctx context.Context, gid uuid.UUID, id uuid.UUID) (*types.ItemOut, error) {
|
||||||
|
@ -41,6 +48,7 @@ func (svc *ItemService) GetAll(ctx context.Context, gid uuid.UUID) ([]*types.Ite
|
||||||
|
|
||||||
return itemsOut, nil
|
return itemsOut, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (svc *ItemService) Create(ctx context.Context, gid uuid.UUID, data types.ItemCreate) (*types.ItemOut, error) {
|
func (svc *ItemService) Create(ctx context.Context, gid uuid.UUID, data types.ItemCreate) (*types.ItemOut, error) {
|
||||||
item, err := svc.repo.Items.Create(ctx, gid, data)
|
item, err := svc.repo.Items.Create(ctx, gid, data)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -49,6 +57,7 @@ func (svc *ItemService) Create(ctx context.Context, gid uuid.UUID, data types.It
|
||||||
|
|
||||||
return mappers.ToItemOut(item), nil
|
return mappers.ToItemOut(item), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (svc *ItemService) Delete(ctx context.Context, gid uuid.UUID, id uuid.UUID) error {
|
func (svc *ItemService) Delete(ctx context.Context, gid uuid.UUID, id uuid.UUID) error {
|
||||||
item, err := svc.repo.Items.GetOne(ctx, id)
|
item, err := svc.repo.Items.GetOne(ctx, id)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -66,8 +75,76 @@ func (svc *ItemService) Delete(ctx context.Context, gid uuid.UUID, id uuid.UUID)
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (svc *ItemService) Update(ctx context.Context, gid uuid.UUID, data types.ItemUpdate) (*types.ItemOut, error) {
|
func (svc *ItemService) Update(ctx context.Context, gid uuid.UUID, data types.ItemUpdate) (*types.ItemOut, error) {
|
||||||
panic("implement me")
|
item, err := svc.repo.Items.GetOne(ctx, data.ID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if item.Edges.Group.ID != gid {
|
||||||
|
return nil, ErrNotOwner
|
||||||
|
}
|
||||||
|
|
||||||
|
item, err = svc.repo.Items.Update(ctx, data)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return mappers.ToItemOut(item), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (svc *ItemService) attachmentPath(gid, itemId uuid.UUID, filename string) string {
|
||||||
|
return filepath.Join(svc.filepath, gid.String(), itemId.String(), filename)
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddAttachment adds an attachment to an item by creating an entry in the Documents table and linking it to the Attachment
|
||||||
|
// Table and Items table. The file provided via the reader is stored on the file system based on the provided
|
||||||
|
// relative path during construction of the service.
|
||||||
|
func (svc *ItemService) AddAttachment(ctx context.Context, gid, itemId uuid.UUID, filename string, file io.Reader) (*types.ItemOut, error) {
|
||||||
|
// Get the Item
|
||||||
|
item, err := svc.repo.Items.GetOne(ctx, itemId)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if item.Edges.Group.ID != gid {
|
||||||
|
return nil, ErrNotOwner
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create the document
|
||||||
|
doc, err := svc.repo.Docs.Create(ctx, gid, types.DocumentCreate{
|
||||||
|
Title: filename,
|
||||||
|
Path: svc.attachmentPath(gid, itemId, filename),
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create the attachment
|
||||||
|
_, err = svc.repo.Attachments.Create(ctx, itemId, doc.ID, attachment.TypeAttachment)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read the contents and write them to a file on the file system
|
||||||
|
err = os.MkdirAll(filepath.Dir(doc.Path), os.ModePerm)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
f, err := os.Create(doc.Path)
|
||||||
|
if err != nil {
|
||||||
|
log.Err(err).Msg("failed to create file")
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = io.Copy(f, file)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return svc.GetOne(ctx, gid, itemId)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (svc *ItemService) CsvImport(ctx context.Context, gid uuid.UUID, data [][]string) error {
|
func (svc *ItemService) CsvImport(ctx context.Context, gid uuid.UUID, data [][]string) error {
|
||||||
|
|
|
@ -2,13 +2,16 @@ package services
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"os"
|
||||||
|
"path"
|
||||||
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/hay-kot/content/backend/internal/types"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
func TestItemService_CsvImport(t *testing.T) {
|
func TestItemService_CsvImport(t *testing.T) {
|
||||||
data := loadcsv()
|
data := loadcsv()
|
||||||
svc := &ItemService{
|
svc := &ItemService{
|
||||||
|
@ -55,6 +58,14 @@ func TestItemService_CsvImport(t *testing.T) {
|
||||||
labelNames = append(labelNames, label.Name)
|
labelNames = append(labelNames, label.Name)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
ids := []uuid.UUID{}
|
||||||
|
t.Cleanup((func() {
|
||||||
|
for _, id := range ids {
|
||||||
|
err := svc.repo.Items.Delete(context.Background(), id)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
}
|
||||||
|
}))
|
||||||
|
|
||||||
for _, item := range items {
|
for _, item := range items {
|
||||||
assert.Contains(t, locNames, item.Location.Name)
|
assert.Contains(t, locNames, item.Location.Name)
|
||||||
for _, label := range item.Labels {
|
for _, label := range item.Labels {
|
||||||
|
@ -79,6 +90,55 @@ func TestItemService_CsvImport(t *testing.T) {
|
||||||
assert.Equal(t, csvRow.parsedSoldPrice(), item.SoldPrice)
|
assert.Equal(t, csvRow.parsedSoldPrice(), item.SoldPrice)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestItemService_AddAttachment(t *testing.T) {
|
||||||
|
temp := os.TempDir()
|
||||||
|
|
||||||
|
svc := &ItemService{
|
||||||
|
repo: tRepos,
|
||||||
|
filepath: temp,
|
||||||
|
}
|
||||||
|
|
||||||
|
loc, err := tSvc.Location.Create(context.Background(), tGroup.ID, types.LocationCreate{
|
||||||
|
Description: "test",
|
||||||
|
Name: "test",
|
||||||
|
})
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.NotNil(t, loc)
|
||||||
|
|
||||||
|
itmC := types.ItemCreate{
|
||||||
|
Name: fk.Str(10),
|
||||||
|
Description: fk.Str(10),
|
||||||
|
LocationID: loc.ID,
|
||||||
|
}
|
||||||
|
|
||||||
|
itm, err := svc.Create(context.Background(), tGroup.ID, itmC)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.NotNil(t, itm)
|
||||||
|
t.Cleanup(func() {
|
||||||
|
err := svc.repo.Items.Delete(context.Background(), itm.ID)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
})
|
||||||
|
|
||||||
|
contents := fk.Str(1000)
|
||||||
|
reader := strings.NewReader(contents)
|
||||||
|
|
||||||
|
// Setup
|
||||||
|
afterAttachment, err := svc.AddAttachment(context.Background(), tGroup.ID, itm.ID, "testfile.txt", reader)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.NotNil(t, afterAttachment)
|
||||||
|
|
||||||
|
// Check that the file exists
|
||||||
|
storedPath := afterAttachment.Attachments[0].Document.Path
|
||||||
|
|
||||||
|
// {root}/{group}/{item}/{attachment}
|
||||||
|
assert.Equal(t, path.Join(temp, tGroup.ID.String(), itm.ID.String(), "testfile.txt"), storedPath)
|
||||||
|
|
||||||
|
// Check that the file contents are correct
|
||||||
|
bts, err := os.ReadFile(storedPath)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Equal(t, contents, string(bts))
|
||||||
|
|
||||||
|
}
|
||||||
|
|
31
backend/internal/types/document_types.go
Normal file
31
backend/internal/types/document_types.go
Normal file
|
@ -0,0 +1,31 @@
|
||||||
|
package types
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
)
|
||||||
|
|
||||||
|
type DocumentOut struct {
|
||||||
|
ID uuid.UUID `json:"id"`
|
||||||
|
Title string `json:"title"`
|
||||||
|
Path string
|
||||||
|
}
|
||||||
|
|
||||||
|
type DocumentCreate struct {
|
||||||
|
Title string `json:"name"`
|
||||||
|
Path string `json:"path"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type DocumentUpdate = DocumentCreate
|
||||||
|
|
||||||
|
type DocumentToken struct {
|
||||||
|
Raw string `json:"raw"`
|
||||||
|
ExpiresAt time.Time `json:"expiresAt"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type DocumentTokenCreate struct {
|
||||||
|
TokenHash []byte `json:"tokenHash"`
|
||||||
|
DocumentID uuid.UUID `json:"documentId"`
|
||||||
|
ExpiresAt time.Time `json:"expiresAt"`
|
||||||
|
}
|
|
@ -19,6 +19,8 @@ type ItemUpdate struct {
|
||||||
ID uuid.UUID `json:"id"`
|
ID uuid.UUID `json:"id"`
|
||||||
Name string `json:"name"`
|
Name string `json:"name"`
|
||||||
Description string `json:"description"`
|
Description string `json:"description"`
|
||||||
|
Quantity int `json:"quantity"`
|
||||||
|
Insured bool `json:"insured"`
|
||||||
|
|
||||||
// Edges
|
// Edges
|
||||||
LocationID uuid.UUID `json:"locationId"`
|
LocationID uuid.UUID `json:"locationId"`
|
||||||
|
@ -37,12 +39,12 @@ type ItemUpdate struct {
|
||||||
// Purchase
|
// Purchase
|
||||||
PurchaseTime time.Time `json:"purchaseTime"`
|
PurchaseTime time.Time `json:"purchaseTime"`
|
||||||
PurchaseFrom string `json:"purchaseFrom"`
|
PurchaseFrom string `json:"purchaseFrom"`
|
||||||
PurchasePrice float64 `json:"purchasePrice"`
|
PurchasePrice float64 `json:"purchasePrice,string"`
|
||||||
|
|
||||||
// Sold
|
// Sold
|
||||||
SoldTime time.Time `json:"soldTime"`
|
SoldTime time.Time `json:"soldTime"`
|
||||||
SoldTo string `json:"soldTo"`
|
SoldTo string `json:"soldTo"`
|
||||||
SoldPrice float64 `json:"soldPrice"`
|
SoldPrice float64 `json:"soldPrice,string"`
|
||||||
SoldNotes string `json:"soldNotes"`
|
SoldNotes string `json:"soldNotes"`
|
||||||
|
|
||||||
// Extras
|
// Extras
|
||||||
|
@ -56,6 +58,8 @@ type ItemSummary struct {
|
||||||
Description string `json:"description"`
|
Description string `json:"description"`
|
||||||
CreatedAt time.Time `json:"createdAt"`
|
CreatedAt time.Time `json:"createdAt"`
|
||||||
UpdatedAt time.Time `json:"updatedAt"`
|
UpdatedAt time.Time `json:"updatedAt"`
|
||||||
|
Quantity int `json:"quantity"`
|
||||||
|
Insured bool `json:"insured"`
|
||||||
|
|
||||||
// Edges
|
// Edges
|
||||||
Location *LocationSummary `json:"location"`
|
Location *LocationSummary `json:"location"`
|
||||||
|
@ -74,12 +78,12 @@ type ItemSummary struct {
|
||||||
// Purchase
|
// Purchase
|
||||||
PurchaseTime time.Time `json:"purchaseTime"`
|
PurchaseTime time.Time `json:"purchaseTime"`
|
||||||
PurchaseFrom string `json:"purchaseFrom"`
|
PurchaseFrom string `json:"purchaseFrom"`
|
||||||
PurchasePrice float64 `json:"purchasePrice"`
|
PurchasePrice float64 `json:"purchasePrice,string"`
|
||||||
|
|
||||||
// Sold
|
// Sold
|
||||||
SoldTime time.Time `json:"soldTime"`
|
SoldTime time.Time `json:"soldTime"`
|
||||||
SoldTo string `json:"soldTo"`
|
SoldTo string `json:"soldTo"`
|
||||||
SoldPrice float64 `json:"soldPrice"`
|
SoldPrice float64 `json:"soldPrice,string"`
|
||||||
SoldNotes string `json:"soldNotes"`
|
SoldNotes string `json:"soldNotes"`
|
||||||
|
|
||||||
// Extras
|
// Extras
|
||||||
|
@ -88,6 +92,14 @@ type ItemSummary struct {
|
||||||
|
|
||||||
type ItemOut struct {
|
type ItemOut struct {
|
||||||
ItemSummary
|
ItemSummary
|
||||||
|
Attachments []*ItemAttachment `json:"attachments"`
|
||||||
// Future
|
// Future
|
||||||
// Fields []*FieldSummary `json:"fields"`
|
// Fields []*FieldSummary `json:"fields"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type ItemAttachment struct {
|
||||||
|
ID uuid.UUID `json:"id"`
|
||||||
|
CreatedAt time.Time `json:"createdAt"`
|
||||||
|
UpdatedAt time.Time `json:"updatedAt"`
|
||||||
|
Document DocumentOut `json:"document"`
|
||||||
|
}
|
||||||
|
|
|
@ -15,7 +15,11 @@ func NewFaker() *Faker {
|
||||||
return &Faker{}
|
return &Faker{}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (f *Faker) RandomString(length int) string {
|
func (f *Faker) Time() time.Time {
|
||||||
|
return time.Now().Add(time.Duration(f.Num(1, 100)) * time.Hour)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *Faker) Str(length int) string {
|
||||||
|
|
||||||
b := make([]rune, length)
|
b := make([]rune, length)
|
||||||
for i := range b {
|
for i := range b {
|
||||||
|
@ -24,14 +28,18 @@ func (f *Faker) RandomString(length int) string {
|
||||||
return string(b)
|
return string(b)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (f *Faker) RandomEmail() string {
|
func (f *Faker) Path() string {
|
||||||
return f.RandomString(10) + "@email.com"
|
return "/" + f.Str(10) + "/" + f.Str(10) + "/" + f.Str(10)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (f *Faker) RandomBool() bool {
|
func (f *Faker) Email() string {
|
||||||
|
return f.Str(10) + "@email.com"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *Faker) Bool() bool {
|
||||||
return rand.Intn(2) == 1
|
return rand.Intn(2) == 1
|
||||||
}
|
}
|
||||||
|
|
||||||
func (f *Faker) RandomNumber(min, max int) int {
|
func (f *Faker) Num(min, max int) int {
|
||||||
return rand.Intn(max-min) + min
|
return rand.Intn(max-min) + min
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,7 +25,7 @@ func Test_GetRandomString(t *testing.T) {
|
||||||
faker := NewFaker()
|
faker := NewFaker()
|
||||||
|
|
||||||
for i := 0; i < Loops; i++ {
|
for i := 0; i < Loops; i++ {
|
||||||
generated[i] = faker.RandomString(10)
|
generated[i] = faker.Str(10)
|
||||||
}
|
}
|
||||||
|
|
||||||
if !ValidateUnique(generated) {
|
if !ValidateUnique(generated) {
|
||||||
|
@ -41,7 +41,7 @@ func Test_GetRandomEmail(t *testing.T) {
|
||||||
faker := NewFaker()
|
faker := NewFaker()
|
||||||
|
|
||||||
for i := 0; i < Loops; i++ {
|
for i := 0; i < Loops; i++ {
|
||||||
generated[i] = faker.RandomEmail()
|
generated[i] = faker.Email()
|
||||||
}
|
}
|
||||||
|
|
||||||
if !ValidateUnique(generated) {
|
if !ValidateUnique(generated) {
|
||||||
|
@ -58,7 +58,7 @@ func Test_GetRandomBool(t *testing.T) {
|
||||||
faker := NewFaker()
|
faker := NewFaker()
|
||||||
|
|
||||||
for i := 0; i < Loops; i++ {
|
for i := 0; i < Loops; i++ {
|
||||||
if faker.RandomBool() {
|
if faker.Bool() {
|
||||||
trues++
|
trues++
|
||||||
} else {
|
} else {
|
||||||
falses++
|
falses++
|
||||||
|
@ -81,7 +81,7 @@ func Test_RandomNumber(t *testing.T) {
|
||||||
last := MIN - 1
|
last := MIN - 1
|
||||||
|
|
||||||
for i := 0; i < Loops; i++ {
|
for i := 0; i < Loops; i++ {
|
||||||
n := f.RandomNumber(MIN, MAX)
|
n := f.Num(MIN, MAX)
|
||||||
|
|
||||||
if n == last {
|
if n == last {
|
||||||
t.Errorf("RandomNumber() failed to generate unique number")
|
t.Errorf("RandomNumber() failed to generate unique number")
|
||||||
|
|
|
@ -9,7 +9,7 @@ import (
|
||||||
// body is decoded into the provided value.
|
// body is decoded into the provided value.
|
||||||
func Decode(r *http.Request, val interface{}) error {
|
func Decode(r *http.Request, val interface{}) error {
|
||||||
decoder := json.NewDecoder(r.Body)
|
decoder := json.NewDecoder(r.Body)
|
||||||
decoder.DisallowUnknownFields()
|
// decoder.DisallowUnknownFields()
|
||||||
if err := decoder.Decode(val); err != nil {
|
if err := decoder.Decode(val); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
|
@ -49,7 +49,7 @@ func Test_ErrorBuilder_AddError(t *testing.T) {
|
||||||
errorStrings := make([]string, 10)
|
errorStrings := make([]string, 10)
|
||||||
|
|
||||||
for i := 0; i < 10; i++ {
|
for i := 0; i < 10; i++ {
|
||||||
err := errors.New(f.RandomString(10))
|
err := errors.New(f.Str(10))
|
||||||
randomError[i] = err
|
randomError[i] = err
|
||||||
errorStrings[i] = err.Error()
|
errorStrings[i] = err.Error()
|
||||||
}
|
}
|
||||||
|
@ -72,7 +72,7 @@ func Test_ErrorBuilder_Respond(t *testing.T) {
|
||||||
randomError := make([]error, 5)
|
randomError := make([]error, 5)
|
||||||
|
|
||||||
for i := 0; i < 5; i++ {
|
for i := 0; i < 5; i++ {
|
||||||
err := errors.New(f.RandomString(5))
|
err := errors.New(f.Str(5))
|
||||||
randomError[i] = err
|
randomError[i] = err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
11
docs/docs/assets/img/favicon.svg
Normal file
11
docs/docs/assets/img/favicon.svg
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
<svg viewBox="0 0 10817 9730" xmlns="http://www.w3.org/2000/svg" xml:space="preserve" style="fill-rule:evenodd;clip-rule:evenodd;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:5.42683">
|
||||||
|
<path d="M9310.16 2560.9c245.302 249.894 419.711 539.916 565.373 845.231 47.039 98.872 36.229 215.514-28.2 304.05-64.391 88.536-172.099 134.676-280.631 120.28 0 .053-.039.053-.039.053" style="fill:gray;stroke:#000;stroke-width:206.41px"/>
|
||||||
|
<path d="M5401.56 487.044c-127.958 6.227-254.855 40.77-370.992 103.628-765.271 414.225-2397.45 1297.68-3193.03 1728.32-137.966 74.669-250.327 183.605-328.791 313.046l3963.09 2122.43s-249.048 416.428-470.593 786.926c-189.24 316.445-592.833 429.831-919.198 258.219l-2699.36-1419.32v2215.59c0 226.273 128.751 435.33 337.755 548.466 764.649 413.885 2620.97 1418.66 3385.59 1832.51 209.018 113.137 466.496 113.137 675.514 0 764.623-413.857 2620.94-1418.63 3385.59-1832.51 208.989-113.136 337.743-322.193 337.743-548.466v-3513.48c0-318.684-174.59-611.722-454.853-763.409-795.543-430.632-2427.75-1314.09-3193.02-1728.32-141.693-76.684-299.364-111.227-455.442-103.628" style="fill:#dadada;stroke:#000;stroke-width:206.42px"/>
|
||||||
|
<path d="M5471.83 4754.46V504.71c-127.958 6.226-325.127 23.1-441.264 85.958-765.271 414.225-2397.45 1297.68-3193.03 1728.32-137.966 74.669-250.327 183.605-328.791 313.046l3963.09 2122.43Z" style="fill:gray;stroke:#000;stroke-width:206.42px"/>
|
||||||
|
<path d="m1459.34 2725.96-373.791 715.667c-177.166 339.292-46.417 758 292.375 936.167l4.75 2.5m0 0 2699.37 1419.29c326.374 171.625 729.916 58.25 919.165-258.208 221.542-370.5 470.583-786.917 470.583-786.917l-3963.04-2122.42-2.167 3.458-47.25 90.458" style="fill:#dadada;stroke:#000;stroke-width:206.42px"/>
|
||||||
|
<path d="M5443.74 520.879v4149.79" style="fill:none;stroke:#000;stroke-width:153.5px"/>
|
||||||
|
<path d="M8951.41 4102.72c0-41.65-22.221-80.136-58.291-100.961-36.069-20.825-80.51-20.825-116.58 0l-2439.92 1408.69c-36.07 20.825-58.29 59.311-58.29 100.961V7058c0 41.65 22.22 80.136 58.29 100.961 36.07 20.825 80.51 20.825 116.58 0l2439.92-1408.69c36.07-20.825 58.291-59.312 58.291-100.962v-1546.59Z" style="fill:#567f67"/>
|
||||||
|
<path d="M8951.41 4102.72c0-41.65-22.221-80.136-58.291-100.961-36.069-20.825-80.51-20.825-116.58 0l-2439.92 1408.69c-36.07 20.825-58.29 59.311-58.29 100.961V7058c0 41.65 22.22 80.136 58.29 100.961 36.07 20.825 80.51 20.825 116.58 0l2439.92-1408.69c36.07-20.825 58.291-59.312 58.291-100.962v-1546.59ZM6463.98 5551.29v1387.06l2301.77-1328.92V4222.37L6463.98 5551.29Z"/>
|
||||||
|
<path d="M5443.76 9041.74v-4278.4" style="fill:none;stroke:#000;stroke-width:206.44px;stroke-linejoin:miter"/>
|
||||||
|
<path d="m5471.79 4773.86 3829.35-2188.22" style="fill:none;stroke:#000;stroke-width:206.43px;stroke-linejoin:miter"/>
|
||||||
|
</svg>
|
After Width: | Height: | Size: 2.7 KiB |
11
docs/docs/assets/img/lilbox.svg
Normal file
11
docs/docs/assets/img/lilbox.svg
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
<svg viewBox="0 0 10817 9730" xmlns="http://www.w3.org/2000/svg" xml:space="preserve" style="fill-rule:evenodd;clip-rule:evenodd;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:5.42683">
|
||||||
|
<path d="M9310.16 2560.9c245.302 249.894 419.711 539.916 565.373 845.231 47.039 98.872 36.229 215.514-28.2 304.05-64.391 88.536-172.099 134.676-280.631 120.28 0 .053-.039.053-.039.053" style="fill:gray;stroke:#000;stroke-width:206.41px"/>
|
||||||
|
<path d="M5401.56 487.044c-127.958 6.227-254.855 40.77-370.992 103.628-765.271 414.225-2397.45 1297.68-3193.03 1728.32-137.966 74.669-250.327 183.605-328.791 313.046l3963.09 2122.43s-249.048 416.428-470.593 786.926c-189.24 316.445-592.833 429.831-919.198 258.219l-2699.36-1419.32v2215.59c0 226.273 128.751 435.33 337.755 548.466 764.649 413.885 2620.97 1418.66 3385.59 1832.51 209.018 113.137 466.496 113.137 675.514 0 764.623-413.857 2620.94-1418.63 3385.59-1832.51 208.989-113.136 337.743-322.193 337.743-548.466v-3513.48c0-318.684-174.59-611.722-454.853-763.409-795.543-430.632-2427.75-1314.09-3193.02-1728.32-141.693-76.684-299.364-111.227-455.442-103.628" style="fill:#dadada;stroke:#000;stroke-width:206.42px"/>
|
||||||
|
<path d="M5471.83 4754.46V504.71c-127.958 6.226-325.127 23.1-441.264 85.958-765.271 414.225-2397.45 1297.68-3193.03 1728.32-137.966 74.669-250.327 183.605-328.791 313.046l3963.09 2122.43Z" style="fill:gray;stroke:#000;stroke-width:206.42px"/>
|
||||||
|
<path d="m1459.34 2725.96-373.791 715.667c-177.166 339.292-46.417 758 292.375 936.167l4.75 2.5m0 0 2699.37 1419.29c326.374 171.625 729.916 58.25 919.165-258.208 221.542-370.5 470.583-786.917 470.583-786.917l-3963.04-2122.42-2.167 3.458-47.25 90.458" style="fill:#dadada;stroke:#000;stroke-width:206.42px"/>
|
||||||
|
<path d="M5443.74 520.879v4149.79" style="fill:none;stroke:#000;stroke-width:153.5px"/>
|
||||||
|
<path d="M8951.41 4102.72c0-41.65-22.221-80.136-58.291-100.961-36.069-20.825-80.51-20.825-116.58 0l-2439.92 1408.69c-36.07 20.825-58.29 59.311-58.29 100.961V7058c0 41.65 22.22 80.136 58.29 100.961 36.07 20.825 80.51 20.825 116.58 0l2439.92-1408.69c36.07-20.825 58.291-59.312 58.291-100.962v-1546.59Z" style="fill:#567f67"/>
|
||||||
|
<path d="M8951.41 4102.72c0-41.65-22.221-80.136-58.291-100.961-36.069-20.825-80.51-20.825-116.58 0l-2439.92 1408.69c-36.07 20.825-58.29 59.311-58.29 100.961V7058c0 41.65 22.22 80.136 58.29 100.961 36.07 20.825 80.51 20.825 116.58 0l2439.92-1408.69c36.07-20.825 58.291-59.312 58.291-100.962v-1546.59ZM6463.98 5551.29v1387.06l2301.77-1328.92V4222.37L6463.98 5551.29Z"/>
|
||||||
|
<path d="M5443.76 9041.74v-4278.4" style="fill:none;stroke:#000;stroke-width:206.44px;stroke-linejoin:miter"/>
|
||||||
|
<path d="m5471.79 4773.86 3829.35-2188.22" style="fill:none;stroke:#000;stroke-width:206.43px;stroke-linejoin:miter"/>
|
||||||
|
</svg>
|
After Width: | Height: | Size: 2.7 KiB |
|
@ -1,4 +1,19 @@
|
||||||
# Welcome to Homebox!
|
<h1 align="center">
|
||||||
|
<br>
|
||||||
|
<img src="assets/img/lilbox.svg" width="200px">
|
||||||
|
<br>
|
||||||
|
Homebox
|
||||||
|
<br>
|
||||||
|
</h1>
|
||||||
|
<p align="center" style="width: 100; margin-top: -30px;">
|
||||||
|
<a href="https://hay-kot.github.io/homebox/">Docs</a>
|
||||||
|
|
|
||||||
|
<a href="https://homebox.fly.dev">Demo</a>
|
||||||
|
|
|
||||||
|
<a href="https://discord.gg/tuncmNrE4z">Discord</a>
|
||||||
|
</p>
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
Homebox is the inventory and organization system built for the Home User! With a focus on simplicity and ease of use, Homebox is the perfect solution for your home inventory, organization, and management needs. While developing this project I've tried to keep the following principles in mind:
|
Homebox is the inventory and organization system built for the Home User! With a focus on simplicity and ease of use, Homebox is the perfect solution for your home inventory, organization, and management needs. While developing this project I've tried to keep the following principles in mind:
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
site_name: Homebox
|
site_name: Homebox
|
||||||
site_url: https://hay-kot.github.io/homebox/
|
# site_url: https://hay-kot.github.io/homebox/
|
||||||
use_directory_urls: true
|
use_directory_urls: true
|
||||||
theme:
|
theme:
|
||||||
name: material
|
name: material
|
||||||
|
@ -22,9 +22,8 @@ theme:
|
||||||
- navigation.expand
|
- navigation.expand
|
||||||
- navigation.sections
|
- navigation.sections
|
||||||
- navigation.tabs.sticky
|
- navigation.tabs.sticky
|
||||||
favicon: assets/img/favicon.png
|
favicon: assets/img/favicon.svg
|
||||||
icon:
|
logo: assets/img/favicon.svg
|
||||||
logo: material/package-variant
|
|
||||||
|
|
||||||
extra_css:
|
extra_css:
|
||||||
- assets/stylesheets/extras.css
|
- assets/stylesheets/extras.css
|
||||||
|
|
8
frontend/.gitignore
vendored
8
frontend/.gitignore
vendored
|
@ -1,8 +0,0 @@
|
||||||
node_modules
|
|
||||||
*.log*
|
|
||||||
.nuxt
|
|
||||||
.nitro
|
|
||||||
.cache
|
|
||||||
.output
|
|
||||||
.env
|
|
||||||
dist
|
|
|
@ -1,42 +0,0 @@
|
||||||
# Nuxt 3 Minimal Starter
|
|
||||||
|
|
||||||
Look at the [nuxt 3 documentation](https://v3.nuxtjs.org) to learn more.
|
|
||||||
|
|
||||||
## Setup
|
|
||||||
|
|
||||||
Make sure to install the dependencies:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# yarn
|
|
||||||
yarn install
|
|
||||||
|
|
||||||
# npm
|
|
||||||
npm install
|
|
||||||
|
|
||||||
# pnpm
|
|
||||||
pnpm install --shamefully-hoist
|
|
||||||
```
|
|
||||||
|
|
||||||
## Development Server
|
|
||||||
|
|
||||||
Start the development server on http://localhost:3000
|
|
||||||
|
|
||||||
```bash
|
|
||||||
npm run dev
|
|
||||||
```
|
|
||||||
|
|
||||||
## Production
|
|
||||||
|
|
||||||
Build the application for production:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
npm run build
|
|
||||||
```
|
|
||||||
|
|
||||||
Locally preview production build:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
npm run preview
|
|
||||||
```
|
|
||||||
|
|
||||||
Checkout the [deployment documentation](https://v3.nuxtjs.org/guide/deploy/presets) for more information.
|
|
|
@ -70,7 +70,7 @@
|
||||||
<BaseContainer>
|
<BaseContainer>
|
||||||
<h2 class="mt-1 text-4xl font-bold tracking-tight text-neutral-content sm:text-5xl lg:text-6xl flex">
|
<h2 class="mt-1 text-4xl font-bold tracking-tight text-neutral-content sm:text-5xl lg:text-6xl flex">
|
||||||
HomeB
|
HomeB
|
||||||
<AppLogo class="w-12 -mb-4" style="padding-left: 3px; padding-right: 2px" />
|
<AppLogo class="w-12 -mb-4" />
|
||||||
x
|
x
|
||||||
</h2>
|
</h2>
|
||||||
<div class="ml-1 mt-2 text-lg text-neutral-content/75 space-x-2">
|
<div class="ml-1 mt-2 text-lg text-neutral-content/75 space-x-2">
|
||||||
|
|
|
@ -1,123 +1,47 @@
|
||||||
<template>
|
<template>
|
||||||
<svg version="1.1" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 596.5055138004384 585.369487986598">
|
<svg
|
||||||
<g
|
viewBox="0 0 10817 9730"
|
||||||
stroke-linecap="round"
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
transform="translate(437.568672588907 210.93877417794465) rotate(332.3235338946895 66.970006481548 27.559467997664797)"
|
xml:space="preserve"
|
||||||
>
|
style="
|
||||||
<path
|
fill-rule: evenodd;
|
||||||
d="M-0.3 -0.89 L131.27 -1.27 L131.05 52.32 L-2.89 53.3"
|
clip-rule: evenodd;
|
||||||
stroke="none"
|
stroke-linecap: round;
|
||||||
stroke-width="0"
|
stroke-linejoin: round;
|
||||||
fill="#15aabf"
|
stroke-miterlimit: 5.42683;
|
||||||
></path>
|
"
|
||||||
<path
|
>
|
||||||
d="M1.61 2.92 C34.39 0.43, 67.49 -3.76, 136.43 -0.16 M-1.81 1.81 C54.26 1.13, 105.28 -0.86, 133.28 -0.04 M132.66 3.06 C133.92 12.97, 132.16 31.97, 132.92 51.2 M134.28 1.16 C131.72 11.69, 133.56 23.47, 134.52 54.65 M133.93 53.09 C103.49 59.22, 80.28 58.51, -1.19 52.09 M133.03 54.88 C92.08 50.88, 53.71 52.46, -0.4 54.88 M-3.64 53.12 C-1.65 33.33, 3.49 15.58, -2.23 -1.93 M-1.08 54.38 C0.63 44.74, -0.42 33.82, 0.29 1.34"
|
<path
|
||||||
stroke="#000"
|
d="M9310.16 2560.9c245.302 249.894 419.711 539.916 565.373 845.231 47.039 98.872 36.229 215.514-28.2 304.05-64.391 88.536-172.099 134.676-280.631 120.28 0 .053-.039.053-.039.053"
|
||||||
stroke-width="2"
|
style="fill: gray; stroke: #000; stroke-width: 206.41px"
|
||||||
fill="none"
|
/>
|
||||||
></path>
|
<path
|
||||||
</g>
|
d="M5401.56 487.044c-127.958 6.227-254.855 40.77-370.992 103.628-765.271 414.225-2397.45 1297.68-3193.03 1728.32-137.966 74.669-250.327 183.605-328.791 313.046l3963.09 2122.43s-249.048 416.428-470.593 786.926c-189.24 316.445-592.833 429.831-919.198 258.219l-2699.36-1419.32v2215.59c0 226.273 128.751 435.33 337.755 548.466 764.649 413.885 2620.97 1418.66 3385.59 1832.51 209.018 113.137 466.496 113.137 675.514 0 764.623-413.857 2620.94-1418.63 3385.59-1832.51 208.989-113.136 337.743-322.193 337.743-548.466v-3513.48c0-318.684-174.59-611.722-454.853-763.409-795.543-430.632-2427.75-1314.09-3193.02-1728.32-141.693-76.684-299.364-111.227-455.442-103.628"
|
||||||
<g stroke-linecap="round">
|
style="fill: #dadada; stroke: #000; stroke-width: 206.42px"
|
||||||
<g transform="translate(308.4481755172761 281.2115533662909) rotate(0 1.1385609918289674 145.9953857867422)">
|
/>
|
||||||
<path
|
<path
|
||||||
d="M-1.01 -2.17 C-1.07 46.71, 0.3 244.44, -0.46 294.16 M3.63 2.83 C3.44 50.71, -0.72 241.22, -1.36 289.41"
|
d="M5471.83 4754.46V504.71c-127.958 6.226-325.127 23.1-441.264 85.958-765.271 414.225-2397.45 1297.68-3193.03 1728.32-137.966 74.669-250.327 183.605-328.791 313.046l3963.09 2122.43Z"
|
||||||
stroke="#000000"
|
style="fill: gray; stroke: #000; stroke-width: 206.42px"
|
||||||
stroke-width="2"
|
/>
|
||||||
fill="none"
|
<path
|
||||||
></path>
|
d="m1459.34 2725.96-373.791 715.667c-177.166 339.292-46.417 758 292.375 936.167l4.75 2.5m0 0 2699.37 1419.29c326.374 171.625 729.916 58.25 919.165-258.208 221.542-370.5 470.583-786.917 470.583-786.917l-3963.04-2122.42-2.167 3.458-47.25 90.458"
|
||||||
</g>
|
style="fill: #dadada; stroke: #000; stroke-width: 206.42px"
|
||||||
</g>
|
/>
|
||||||
<g stroke-linecap="round">
|
<path d="M5443.74 520.879v4149.79" style="fill: none; stroke: #000; stroke-width: 153.5px" />
|
||||||
<g transform="translate(308.16925883018916 284.66360015581995) rotate(0 135.1525798049602 -68.20042785962323)">
|
<path
|
||||||
<path
|
d="M8951.41 4102.72c0-41.65-22.221-80.136-58.291-100.961-36.069-20.825-80.51-20.825-116.58 0l-2439.92 1408.69c-36.07 20.825-58.29 59.311-58.29 100.961V7058c0 41.65 22.22 80.136 58.29 100.961 36.07 20.825 80.51 20.825 116.58 0l2439.92-1408.69c36.07-20.825 58.291-59.312 58.291-100.962v-1546.59Z"
|
||||||
d="M2.47 0.47 C46.8 -21.36, 220.33 -110.19, 264.96 -133.2 M0.37 -1.74 C45.62 -24.11, 225.01 -114.58, 269.93 -136.88"
|
style="fill: #567f67"
|
||||||
stroke="#000000"
|
/>
|
||||||
stroke-width="2"
|
<path
|
||||||
fill="none"
|
d="M8951.41 4102.72c0-41.65-22.221-80.136-58.291-100.961-36.069-20.825-80.51-20.825-116.58 0l-2439.92 1408.69c-36.07 20.825-58.29 59.311-58.29 100.961V7058c0 41.65 22.22 80.136 58.29 100.961 36.07 20.825 80.51 20.825 116.58 0l2439.92-1408.69c36.07-20.825 58.291-59.312 58.291-100.962v-1546.59ZM6463.98 5551.29v1387.06l2301.77-1328.92V4222.37L6463.98 5551.29Z"
|
||||||
></path>
|
/>
|
||||||
</g>
|
<path
|
||||||
</g>
|
d="M5443.76 9041.74v-4278.4"
|
||||||
<g stroke-linecap="round">
|
style="fill: none; stroke: #000; stroke-width: 206.44px; stroke-linejoin: miter"
|
||||||
<g transform="translate(311.39372316987726 570.9674003164946) rotate(0 136.24116036890297 -67.43777376368234)">
|
/>
|
||||||
<path
|
<path
|
||||||
d="M-2.63 2.46 C20.4 -9.48, 94.34 -47.87, 140.63 -71.17 C186.92 -94.47, 252.17 -126.49, 275.11 -137.33 M1.14 1.33 C23.81 -10.35, 94.55 -46.04, 139.83 -68.58 C185.12 -91.11, 249.48 -121.76, 272.86 -133.89"
|
d="m5471.79 4773.86 3829.35-2188.22"
|
||||||
stroke="#000000"
|
style="fill: none; stroke: #000; stroke-width: 206.43px; stroke-linejoin: miter"
|
||||||
stroke-width="2"
|
/>
|
||||||
fill="none"
|
|
||||||
></path>
|
|
||||||
</g>
|
|
||||||
</g>
|
|
||||||
<g stroke-linecap="round">
|
|
||||||
<g transform="translate(580.6092831051336 150.72201134532952) rotate(0 1.5678417062894852 142.75141008423634)">
|
|
||||||
<path
|
|
||||||
d="M2.66 -1.91 C3.6 45.58, 2.41 239.01, 2.99 287.41 M0.67 3.23 C1.39 51.03, -0.51 235.96, 0.31 282.74"
|
|
||||||
stroke="#000000"
|
|
||||||
stroke-width="2"
|
|
||||||
fill="none"
|
|
||||||
></path>
|
|
||||||
</g>
|
|
||||||
</g>
|
|
||||||
<g stroke-linecap="round">
|
|
||||||
<g transform="translate(306.6976102947664 283.14653391715) rotate(0 -140.18354779216435 -59.60806644015338)">
|
|
||||||
<path
|
|
||||||
d="M-0.81 0.62 C-48.48 -18.36, -235.46 -96.23, -283.34 -115.75 M3.94 -1.52 C-44.13 -21.12, -236.56 -99.84, -284.31 -119.83"
|
|
||||||
stroke="#000000"
|
|
||||||
stroke-width="2"
|
|
||||||
fill="none"
|
|
||||||
></path>
|
|
||||||
</g>
|
|
||||||
</g>
|
|
||||||
<g stroke-linecap="round">
|
|
||||||
<g transform="translate(304.3414324224632 572.5226612839633) rotate(0 -144.27019052747903 -64.7761163684645)">
|
|
||||||
<path
|
|
||||||
d="M2.34 1.71 C-46 -19.52, -242.75 -105.04, -290.88 -126.78 M0.17 0.18 C-47.25 -21.98, -237.9 -110.2, -285.78 -131.27"
|
|
||||||
stroke="#000000"
|
|
||||||
stroke-width="2"
|
|
||||||
fill="none"
|
|
||||||
></path>
|
|
||||||
</g>
|
|
||||||
</g>
|
|
||||||
<g stroke-linecap="round">
|
|
||||||
<g
|
|
||||||
transform="translate(15.275892138818847 448.50738095516135) rotate(0 -0.49579063445983707 -143.71703352554232)"
|
|
||||||
>
|
|
||||||
<path
|
|
||||||
d="M-2.4 0.97 C-2.94 -47.38, -0.78 -240.15, -0.9 -288.41 M1.49 -0.97 C0.55 -49.09, -0.95 -237.81, -2.03 -285.33"
|
|
||||||
stroke="#000000"
|
|
||||||
stroke-width="2"
|
|
||||||
fill="none"
|
|
||||||
></path>
|
|
||||||
</g>
|
|
||||||
</g>
|
|
||||||
<g stroke-linecap="round">
|
|
||||||
<g transform="translate(10.301143858432795 164.72182108536072) rotate(0 142.35890827057267 -76.26873721417542)">
|
|
||||||
<path
|
|
||||||
d="M2.04 -1.02 C49.94 -26.43, 238.14 -126.5, 285.02 -151.52 M-0.3 -4.04 C47.43 -29.16, 234.44 -124.45, 282.68 -148.52"
|
|
||||||
stroke="#000000"
|
|
||||||
stroke-width="2"
|
|
||||||
fill="none"
|
|
||||||
></path>
|
|
||||||
</g>
|
|
||||||
</g>
|
|
||||||
<g stroke-linecap="round">
|
|
||||||
<g transform="translate(291.46813332015165 14.258444139957646) rotate(0 143.3244001532809 66.53622476241344)">
|
|
||||||
<path
|
|
||||||
d="M-0.18 -1.16 C46.98 21.36, 236.83 111.22, 284.98 134.14 M-3.72 -4.26 C44.15 18.77, 241.76 114.69, 290.37 137.33"
|
|
||||||
stroke="#000000"
|
|
||||||
stroke-width="2"
|
|
||||||
fill="none"
|
|
||||||
></path>
|
|
||||||
</g>
|
|
||||||
</g>
|
|
||||||
<g stroke-linecap="round">
|
|
||||||
<g transform="translate(175.60844139934756 81.23280016017816) rotate(0 131.7777041676277 66.73388742398038)">
|
|
||||||
<path
|
|
||||||
d="M-1.87 -0.8 C42.4 22.26, 220.78 113.99, 265.42 137.17 M2.32 -3.7 C46.4 18.63, 220.35 109.95, 264.21 132.92"
|
|
||||||
stroke="#000000"
|
|
||||||
stroke-width="2"
|
|
||||||
fill="none"
|
|
||||||
></path>
|
|
||||||
</g>
|
|
||||||
</g>
|
|
||||||
</svg>
|
</svg>
|
||||||
</template>
|
</template>
|
||||||
|
|
|
@ -15,7 +15,7 @@
|
||||||
{{ dKey }}
|
{{ dKey }}
|
||||||
</dt>
|
</dt>
|
||||||
<dd class="mt-1 text-sm text-gray-900 sm:col-span-2 sm:mt-0">
|
<dd class="mt-1 text-sm text-gray-900 sm:col-span-2 sm:mt-0">
|
||||||
<slot :name="dKey" v-bind="{ key: dKey, value: dValue }">
|
<slot :name="rmSpace(dKey)" v-bind="{ key: dKey, value: dValue }">
|
||||||
{{ dValue }}
|
{{ dValue }}
|
||||||
</slot>
|
</slot>
|
||||||
</dd>
|
</dd>
|
||||||
|
@ -28,8 +28,13 @@
|
||||||
<script setup lang="ts">
|
<script setup lang="ts">
|
||||||
type StringLike = string | number | boolean;
|
type StringLike = string | number | boolean;
|
||||||
|
|
||||||
|
function rmSpace(str: string) {
|
||||||
|
return str.replace(" ", "");
|
||||||
|
}
|
||||||
|
|
||||||
defineProps({
|
defineProps({
|
||||||
details: {
|
details: {
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
type: Object as () => Record<string, StringLike | any>,
|
type: Object as () => Record<string, StringLike | any>,
|
||||||
required: true,
|
required: true,
|
||||||
},
|
},
|
||||||
|
|
35
frontend/components/Form/Checkbox.vue
Normal file
35
frontend/components/Form/Checkbox.vue
Normal file
|
@ -0,0 +1,35 @@
|
||||||
|
<template>
|
||||||
|
<div v-if="!inline" class="form-control w-full">
|
||||||
|
<label class="label cursor-pointer">
|
||||||
|
<span class="label-text"> {{ label }}</span>
|
||||||
|
<input v-model="value" type="checkbox" class="checkbox" />
|
||||||
|
</label>
|
||||||
|
</div>
|
||||||
|
<div v-else class="label cursor-pointer sm:grid sm:grid-cols-4 sm:items-start sm:gap-4">
|
||||||
|
<label>
|
||||||
|
<span class="label-text">
|
||||||
|
{{ label }}
|
||||||
|
</span>
|
||||||
|
</label>
|
||||||
|
<input v-model="value" type="checkbox" class="checkbox" />
|
||||||
|
</div>
|
||||||
|
</template>
|
||||||
|
|
||||||
|
<script setup lang="ts">
|
||||||
|
const props = defineProps({
|
||||||
|
modelValue: {
|
||||||
|
type: Boolean,
|
||||||
|
default: false,
|
||||||
|
},
|
||||||
|
inline: {
|
||||||
|
type: Boolean,
|
||||||
|
default: false,
|
||||||
|
},
|
||||||
|
label: {
|
||||||
|
type: String,
|
||||||
|
default: "",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const value = useVModel(props, "modelValue");
|
||||||
|
</script>
|
|
@ -52,9 +52,14 @@
|
||||||
|
|
||||||
const selected = useVModel(props, "modelValue", emit);
|
const selected = useVModel(props, "modelValue", emit);
|
||||||
const dateText = computed(() => {
|
const dateText = computed(() => {
|
||||||
|
if (!validDate(selected.value)) {
|
||||||
|
return "";
|
||||||
|
}
|
||||||
|
|
||||||
if (selected.value) {
|
if (selected.value) {
|
||||||
return selected.value.toLocaleDateString();
|
return selected.value.toLocaleDateString();
|
||||||
}
|
}
|
||||||
|
|
||||||
return "";
|
return "";
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -91,9 +96,7 @@
|
||||||
});
|
});
|
||||||
|
|
||||||
function select(e: MouseEvent, day: Date) {
|
function select(e: MouseEvent, day: Date) {
|
||||||
console.log(day);
|
|
||||||
selected.value = day;
|
selected.value = day;
|
||||||
console.log(selected.value);
|
|
||||||
// @ts-ignore - this is a vue3 bug
|
// @ts-ignore - this is a vue3 bug
|
||||||
e.target.blur();
|
e.target.blur();
|
||||||
resetTime();
|
resetTime();
|
||||||
|
|
|
@ -17,7 +17,7 @@
|
||||||
v-for="(obj, idx) in items"
|
v-for="(obj, idx) in items"
|
||||||
:key="idx"
|
:key="idx"
|
||||||
:class="{
|
:class="{
|
||||||
bordered: selectedIndexes[idx],
|
bordered: selected[idx],
|
||||||
}"
|
}"
|
||||||
>
|
>
|
||||||
<button type="button" @click="toggle(idx)">
|
<button type="button" @click="toggle(idx)">
|
||||||
|
@ -37,10 +37,12 @@
|
||||||
default: "",
|
default: "",
|
||||||
},
|
},
|
||||||
modelValue: {
|
modelValue: {
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
type: Array as () => any[],
|
type: Array as () => any[],
|
||||||
default: null,
|
default: null,
|
||||||
},
|
},
|
||||||
items: {
|
items: {
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
type: Array as () => any[],
|
type: Array as () => any[],
|
||||||
required: true,
|
required: true,
|
||||||
},
|
},
|
||||||
|
@ -54,28 +56,23 @@
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
const selectedIndexes = ref<Record<number, boolean>>({});
|
const value = useVModel(props, "modelValue", emit);
|
||||||
|
|
||||||
|
const selected = computed<Record<number, boolean>>(() => {
|
||||||
|
const obj: Record<number, boolean> = {};
|
||||||
|
value.value.forEach(itm => {
|
||||||
|
const idx = props.items.findIndex(item => item[props.name] === itm.name);
|
||||||
|
obj[idx] = true;
|
||||||
|
});
|
||||||
|
return obj;
|
||||||
|
});
|
||||||
|
|
||||||
function toggle(index: number) {
|
function toggle(index: number) {
|
||||||
selectedIndexes.value[index] = !selectedIndexes.value[index];
|
|
||||||
|
|
||||||
const item = props.items[index];
|
const item = props.items[index];
|
||||||
|
if (selected.value[index]) {
|
||||||
if (selectedIndexes.value[index]) {
|
value.value = value.value.filter(itm => itm.name !== item.name);
|
||||||
value.value = [...value.value, item];
|
|
||||||
} else {
|
} else {
|
||||||
value.value = value.value.filter(itm => itm !== item);
|
value.value = [...value.value, item];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
watchOnce(
|
|
||||||
() => props.items,
|
|
||||||
() => {
|
|
||||||
if (props.selectFirst && props.items.length > 0) {
|
|
||||||
value.value = props.items[0];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
const value = useVModel(props, "modelValue", emit);
|
|
||||||
</script>
|
</script>
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue