From 5bbb969763a585afdfcdecadd4a508f5afe0625d Mon Sep 17 00:00:00 2001
From: Hayden <64056131+hay-kot@users.noreply.github.com>
Date: Fri, 9 Dec 2022 20:57:57 -0900
Subject: [PATCH] feat: maintenance log  (#170)

* remove repo for document tokens

* remove schema for doc tokens

* fix id template and generate cmd

* schema updates

* code gen

* bump dependencies

* fix broken migrations + add maintenance entry type

* spelling

* remove debug logger

* implement repository layer

* routes

* API client

* wip: maintenance log

* remove depreciated call
---
 .../api/handlers/v1/v1_ctrl_maint_entry.go    |  125 ++
 backend/app/api/routes.go                     |    5 +
 backend/app/api/static/docs/docs.go           |  188 ++
 backend/app/api/static/docs/swagger.json      |  188 ++
 backend/app/api/static/docs/swagger.yaml      |  117 ++
 backend/app/tools/migrations/main.go          |    3 +
 backend/go.mod                                |   18 +-
 backend/go.sum                                |   36 +-
 backend/internal/data/ent/client.go           |  258 +--
 backend/internal/data/ent/config.go           |    2 +-
 backend/internal/data/ent/document.go         |   20 +-
 .../internal/data/ent/document/document.go    |    9 -
 backend/internal/data/ent/document/where.go   |   28 -
 backend/internal/data/ent/document_create.go  |   35 -
 backend/internal/data/ent/document_query.go   |  109 +-
 backend/internal/data/ent/document_update.go  |  181 --
 backend/internal/data/ent/documenttoken.go    |  190 --
 .../data/ent/documenttoken/documenttoken.go   |   85 -
 .../internal/data/ent/documenttoken/where.go  |  498 -----
 .../internal/data/ent/documenttoken_create.go |  398 ----
 .../internal/data/ent/documenttoken_delete.go |  115 --
 .../internal/data/ent/documenttoken_query.go  |  633 ------
 .../internal/data/ent/documenttoken_update.go |  542 ------
 backend/internal/data/ent/ent.go              |    4 +-
 backend/internal/data/ent/generate.go         |    2 +-
 backend/internal/data/ent/has_id.go           |   16 +-
 backend/internal/data/ent/hook/hook.go        |   26 +-
 backend/internal/data/ent/item.go             |   20 +-
 backend/internal/data/ent/item/item.go        |    9 +
 backend/internal/data/ent/item/where.go       |   28 +
 backend/internal/data/ent/item_create.go      |   35 +
 backend/internal/data/ent/item_query.go       |  125 +-
 backend/internal/data/ent/item_update.go      |  181 ++
 backend/internal/data/ent/maintenanceentry.go |  202 ++
 .../ent/maintenanceentry/maintenanceentry.go  |   82 +
 .../data/ent/maintenanceentry/where.go        |  696 +++++++
 .../data/ent/maintenanceentry_create.go       |  419 ++++
 .../data/ent/maintenanceentry_delete.go       |  115 ++
 .../data/ent/maintenanceentry_query.go        |  622 ++++++
 .../data/ent/maintenanceentry_update.go       |  594 ++++++
 backend/internal/data/ent/migrate/schema.go   |   62 +-
 backend/internal/data/ent/mutation.go         | 1706 +++++++++--------
 .../internal/data/ent/predicate/predicate.go  |    6 +-
 backend/internal/data/ent/runtime.go          |   82 +-
 .../internal/data/ent/schema/auth_tokens.go   |    6 +-
 backend/internal/data/ent/schema/document.go  |    4 -
 .../data/ent/schema/document_token.go         |   50 -
 backend/internal/data/ent/schema/item.go      |    4 +
 .../data/ent/schema/maintenance_entry.go      |   48 +
 .../data/ent/schema/templates/has_id.tmpl     |    7 +-
 backend/internal/data/ent/tx.go               |    6 +-
 .../internal/data/migrations/migrations.go    |    4 +-
 .../20221205230404_drop_document_tokens.sql   |    5 +
 ...20221205234214_add_maintenance_entries.sql |    2 +
 .../20221205234812_cascade_delete_roles.sql   |   16 +
 .../data/migrations/migrations/atlas.sum      |    5 +-
 backend/internal/data/repo/map_helpers.go     |   19 +-
 .../data/repo/repo_document_tokens.go         |   68 -
 .../data/repo/repo_document_tokens_test.go    |  150 --
 .../data/repo/repo_maintenance_entry.go       |  136 ++
 .../data/repo/repo_maintenance_entry_test.go  |   65 +
 backend/internal/data/repo/repos_all.go       |    4 +-
 backend/pkgs/hasher/password.go               |    2 +-
 frontend/components/Base/Button.vue           |    1 +
 frontend/components/Base/Card.vue             |    2 +-
 frontend/components/Form/DatePicker.vue       |    5 +-
 frontend/components/global/DateTime.vue       |   42 +-
 frontend/components/global/Markdown.vue       |    8 +-
 frontend/lib/api/__test__/user/items.test.ts  |   44 +-
 frontend/lib/api/classes/items.ts             |   94 +-
 frontend/lib/api/types/data-contracts.ts      |   62 +-
 frontend/nuxt.config.ts                       |   20 +-
 frontend/package.json                         |    4 +-
 frontend/pages/home.vue                       |    4 +-
 frontend/pages/item/[id]/edit.vue             |    6 +-
 frontend/pages/item/[id]/index.vue            |  148 +-
 frontend/pages/item/[id]/index/log.vue        |  173 ++
 frontend/pnpm-lock.yaml                       | 1247 ++++++------
 scripts/process-types.py                      |    1 +
 79 files changed, 6320 insertions(+), 4957 deletions(-)
 create mode 100644 backend/app/api/handlers/v1/v1_ctrl_maint_entry.go
 delete mode 100644 backend/internal/data/ent/documenttoken.go
 delete mode 100644 backend/internal/data/ent/documenttoken/documenttoken.go
 delete mode 100644 backend/internal/data/ent/documenttoken/where.go
 delete mode 100644 backend/internal/data/ent/documenttoken_create.go
 delete mode 100644 backend/internal/data/ent/documenttoken_delete.go
 delete mode 100644 backend/internal/data/ent/documenttoken_query.go
 delete mode 100644 backend/internal/data/ent/documenttoken_update.go
 create mode 100644 backend/internal/data/ent/maintenanceentry.go
 create mode 100644 backend/internal/data/ent/maintenanceentry/maintenanceentry.go
 create mode 100644 backend/internal/data/ent/maintenanceentry/where.go
 create mode 100644 backend/internal/data/ent/maintenanceentry_create.go
 create mode 100644 backend/internal/data/ent/maintenanceentry_delete.go
 create mode 100644 backend/internal/data/ent/maintenanceentry_query.go
 create mode 100644 backend/internal/data/ent/maintenanceentry_update.go
 delete mode 100644 backend/internal/data/ent/schema/document_token.go
 create mode 100644 backend/internal/data/ent/schema/maintenance_entry.go
 create mode 100644 backend/internal/data/migrations/migrations/20221205230404_drop_document_tokens.sql
 create mode 100644 backend/internal/data/migrations/migrations/20221205234214_add_maintenance_entries.sql
 create mode 100644 backend/internal/data/migrations/migrations/20221205234812_cascade_delete_roles.sql
 delete mode 100644 backend/internal/data/repo/repo_document_tokens.go
 delete mode 100644 backend/internal/data/repo/repo_document_tokens_test.go
 create mode 100644 backend/internal/data/repo/repo_maintenance_entry.go
 create mode 100644 backend/internal/data/repo/repo_maintenance_entry_test.go
 create mode 100644 frontend/pages/item/[id]/index/log.vue

diff --git a/backend/app/api/handlers/v1/v1_ctrl_maint_entry.go b/backend/app/api/handlers/v1/v1_ctrl_maint_entry.go
new file mode 100644
index 0000000..3f3f1a1
--- /dev/null
+++ b/backend/app/api/handlers/v1/v1_ctrl_maint_entry.go
@@ -0,0 +1,125 @@
+package v1
+
+import (
+	"net/http"
+
+	"github.com/hay-kot/homebox/backend/internal/core/services"
+	"github.com/hay-kot/homebox/backend/internal/data/repo"
+	"github.com/hay-kot/homebox/backend/internal/sys/validate"
+	"github.com/hay-kot/homebox/backend/pkgs/server"
+	"github.com/rs/zerolog/log"
+)
+
+// HandleMaintenanceGetLog godoc
+// @Summary  Get Maintenance Log
+// @Tags     Maintenance
+// @Produce  json
+// @Success  200       {object} repo.MaintenanceLog
+// @Router   /v1/items/{id}/maintenance [GET]
+// @Security Bearer
+func (ctrl *V1Controller) HandleMaintenanceLogGet() server.HandlerFunc {
+	return ctrl.handleMaintenanceLog()
+}
+
+// HandleMaintenanceEntryCreate godoc
+// @Summary  Create Maintenance Entry
+// @Tags     Maintenance
+// @Produce  json
+// @Param    payload body     repo.MaintenanceEntryCreate true "Entry Data"
+// @Success  200     {object} repo.MaintenanceEntry
+// @Router   /v1/items/{id}/maintenance [POST]
+// @Security Bearer
+func (ctrl *V1Controller) HandleMaintenanceEntryCreate() server.HandlerFunc {
+	return ctrl.handleMaintenanceLog()
+}
+
+// HandleMaintenanceEntryDelete godoc
+// @Summary  Delete Maintenance Entry
+// @Tags     Maintenance
+// @Produce  json
+// @Success  204
+// @Router   /v1/items/{id}/maintenance/{entry_id} [DELETE]
+// @Security Bearer
+func (ctrl *V1Controller) HandleMaintenanceEntryDelete() server.HandlerFunc {
+	return ctrl.handleMaintenanceLog()
+}
+
+// HandleMaintenanceEntryUpdate godoc
+// @Summary  Update Maintenance Entry
+// @Tags     Maintenance
+// @Produce  json
+// @Param    payload body     repo.MaintenanceEntryUpdate true "Entry Data"
+// @Success  200     {object} repo.MaintenanceEntry
+// @Router   /v1/items/{id}/maintenance/{entry_id} [PUT]
+// @Security Bearer
+func (ctrl *V1Controller) HandleMaintenanceEntryUpdate() server.HandlerFunc {
+	return ctrl.handleMaintenanceLog()
+}
+
+func (ctrl *V1Controller) handleMaintenanceLog() server.HandlerFunc {
+	return func(w http.ResponseWriter, r *http.Request) error {
+		ctx := services.NewContext(r.Context())
+		itemID, err := ctrl.routeID(r)
+		if err != nil {
+			return err
+		}
+
+		switch r.Method {
+		case http.MethodGet:
+			mlog, err := ctrl.repo.MaintEntry.GetLog(ctx, itemID)
+			if err != nil {
+				log.Err(err).Msg("failed to get items")
+				return validate.NewRequestError(err, http.StatusInternalServerError)
+			}
+			return server.Respond(w, http.StatusOK, mlog)
+		case http.MethodPost:
+			var create repo.MaintenanceEntryCreate
+			err := server.Decode(r, &create)
+			if err != nil {
+				return validate.NewRequestError(err, http.StatusBadRequest)
+			}
+
+			entry, err := ctrl.repo.MaintEntry.Create(ctx, itemID, create)
+			if err != nil {
+				log.Err(err).Msg("failed to create item")
+				return validate.NewRequestError(err, http.StatusInternalServerError)
+			}
+
+			return server.Respond(w, http.StatusCreated, entry)
+		case http.MethodPut:
+			entryID, err := ctrl.routeUUID(r, "entry_id")
+			if err != nil {
+				return err
+			}
+
+			var update repo.MaintenanceEntryUpdate
+			err = server.Decode(r, &update)
+			if err != nil {
+				return validate.NewRequestError(err, http.StatusBadRequest)
+			}
+
+			entry, err := ctrl.repo.MaintEntry.Update(ctx, entryID, update)
+			if err != nil {
+				log.Err(err).Msg("failed to update item")
+				return validate.NewRequestError(err, http.StatusInternalServerError)
+			}
+
+			return server.Respond(w, http.StatusOK, entry)
+		case http.MethodDelete:
+			entryID, err := ctrl.routeUUID(r, "entry_id")
+			if err != nil {
+				return err
+			}
+
+			err = ctrl.repo.MaintEntry.Delete(ctx, entryID)
+			if err != nil {
+				log.Err(err).Msg("failed to delete item")
+				return validate.NewRequestError(err, http.StatusInternalServerError)
+			}
+
+			return server.Respond(w, http.StatusNoContent, nil)
+		}
+
+		return nil
+	}
+}
diff --git a/backend/app/api/routes.go b/backend/app/api/routes.go
index 1a42aeb..61e13d4 100644
--- a/backend/app/api/routes.go
+++ b/backend/app/api/routes.go
@@ -112,6 +112,11 @@ func (a *app) mountRoutes(repos *repo.AllRepos) {
 	a.server.Put(v1Base("/items/{id}/attachments/{attachment_id}"), v1Ctrl.HandleItemAttachmentUpdate(), userMW...)
 	a.server.Delete(v1Base("/items/{id}/attachments/{attachment_id}"), v1Ctrl.HandleItemAttachmentDelete(), userMW...)
 
+	a.server.Get(v1Base("/items/{id}/maintenance"), v1Ctrl.HandleMaintenanceEntryCreate(), userMW...)
+	a.server.Post(v1Base("/items/{id}/maintenance"), v1Ctrl.HandleMaintenanceEntryCreate(), userMW...)
+	a.server.Put(v1Base("/items/{id}/maintenance/{entry_id}"), v1Ctrl.HandleMaintenanceEntryUpdate(), userMW...)
+	a.server.Delete(v1Base("/items/{id}/maintenance/{entry_id}"), v1Ctrl.HandleMaintenanceEntryDelete(), userMW...)
+
 	a.server.Get(
 		v1Base("/items/{id}/attachments/{attachment_id}"),
 		v1Ctrl.HandleItemAttachmentGet(),
diff --git a/backend/app/api/static/docs/docs.go b/backend/app/api/static/docs/docs.go
index 632b9e5..06cb95c 100644
--- a/backend/app/api/static/docs/docs.go
+++ b/backend/app/api/static/docs/docs.go
@@ -657,6 +657,117 @@ const docTemplate = `{
                 }
             }
         },
+        "/v1/items/{id}/maintenance": {
+            "get": {
+                "security": [
+                    {
+                        "Bearer": []
+                    }
+                ],
+                "produces": [
+                    "application/json"
+                ],
+                "tags": [
+                    "Maintenance"
+                ],
+                "summary": "Get Maintenance Log",
+                "responses": {
+                    "200": {
+                        "description": "OK",
+                        "schema": {
+                            "$ref": "#/definitions/repo.MaintenanceLog"
+                        }
+                    }
+                }
+            },
+            "post": {
+                "security": [
+                    {
+                        "Bearer": []
+                    }
+                ],
+                "produces": [
+                    "application/json"
+                ],
+                "tags": [
+                    "Maintenance"
+                ],
+                "summary": "Create Maintenance Entry",
+                "parameters": [
+                    {
+                        "description": "Entry Data",
+                        "name": "payload",
+                        "in": "body",
+                        "required": true,
+                        "schema": {
+                            "$ref": "#/definitions/repo.MaintenanceEntryCreate"
+                        }
+                    }
+                ],
+                "responses": {
+                    "200": {
+                        "description": "OK",
+                        "schema": {
+                            "$ref": "#/definitions/repo.MaintenanceEntry"
+                        }
+                    }
+                }
+            }
+        },
+        "/v1/items/{id}/maintenance/{entry_id}": {
+            "put": {
+                "security": [
+                    {
+                        "Bearer": []
+                    }
+                ],
+                "produces": [
+                    "application/json"
+                ],
+                "tags": [
+                    "Maintenance"
+                ],
+                "summary": "Update Maintenance Entry",
+                "parameters": [
+                    {
+                        "description": "Entry Data",
+                        "name": "payload",
+                        "in": "body",
+                        "required": true,
+                        "schema": {
+                            "$ref": "#/definitions/repo.MaintenanceEntryUpdate"
+                        }
+                    }
+                ],
+                "responses": {
+                    "200": {
+                        "description": "OK",
+                        "schema": {
+                            "$ref": "#/definitions/repo.MaintenanceEntry"
+                        }
+                    }
+                }
+            },
+            "delete": {
+                "security": [
+                    {
+                        "Bearer": []
+                    }
+                ],
+                "produces": [
+                    "application/json"
+                ],
+                "tags": [
+                    "Maintenance"
+                ],
+                "summary": "Delete Maintenance Entry",
+                "responses": {
+                    "204": {
+                        "description": "No Content"
+                    }
+                }
+            }
+        },
         "/v1/labels": {
             "get": {
                 "security": [
@@ -1825,6 +1936,83 @@ const docTemplate = `{
                 }
             }
         },
+        "repo.MaintenanceEntry": {
+            "type": "object",
+            "properties": {
+                "cost": {
+                    "type": "string",
+                    "example": "0"
+                },
+                "date": {
+                    "type": "string"
+                },
+                "description": {
+                    "type": "string"
+                },
+                "id": {
+                    "type": "string"
+                },
+                "name": {
+                    "type": "string"
+                }
+            }
+        },
+        "repo.MaintenanceEntryCreate": {
+            "type": "object",
+            "properties": {
+                "cost": {
+                    "type": "string",
+                    "example": "0"
+                },
+                "date": {
+                    "type": "string"
+                },
+                "description": {
+                    "type": "string"
+                },
+                "name": {
+                    "type": "string"
+                }
+            }
+        },
+        "repo.MaintenanceEntryUpdate": {
+            "type": "object",
+            "properties": {
+                "cost": {
+                    "type": "string",
+                    "example": "0"
+                },
+                "date": {
+                    "type": "string"
+                },
+                "description": {
+                    "type": "string"
+                },
+                "name": {
+                    "type": "string"
+                }
+            }
+        },
+        "repo.MaintenanceLog": {
+            "type": "object",
+            "properties": {
+                "costAverage": {
+                    "type": "number"
+                },
+                "costTotal": {
+                    "type": "number"
+                },
+                "entries": {
+                    "type": "array",
+                    "items": {
+                        "$ref": "#/definitions/repo.MaintenanceEntry"
+                    }
+                },
+                "itemId": {
+                    "type": "string"
+                }
+            }
+        },
         "repo.PaginationResult-repo_ItemSummary": {
             "type": "object",
             "properties": {
diff --git a/backend/app/api/static/docs/swagger.json b/backend/app/api/static/docs/swagger.json
index 69ba931..7e5ec85 100644
--- a/backend/app/api/static/docs/swagger.json
+++ b/backend/app/api/static/docs/swagger.json
@@ -649,6 +649,117 @@
                 }
             }
         },
+        "/v1/items/{id}/maintenance": {
+            "get": {
+                "security": [
+                    {
+                        "Bearer": []
+                    }
+                ],
+                "produces": [
+                    "application/json"
+                ],
+                "tags": [
+                    "Maintenance"
+                ],
+                "summary": "Get Maintenance Log",
+                "responses": {
+                    "200": {
+                        "description": "OK",
+                        "schema": {
+                            "$ref": "#/definitions/repo.MaintenanceLog"
+                        }
+                    }
+                }
+            },
+            "post": {
+                "security": [
+                    {
+                        "Bearer": []
+                    }
+                ],
+                "produces": [
+                    "application/json"
+                ],
+                "tags": [
+                    "Maintenance"
+                ],
+                "summary": "Create Maintenance Entry",
+                "parameters": [
+                    {
+                        "description": "Entry Data",
+                        "name": "payload",
+                        "in": "body",
+                        "required": true,
+                        "schema": {
+                            "$ref": "#/definitions/repo.MaintenanceEntryCreate"
+                        }
+                    }
+                ],
+                "responses": {
+                    "200": {
+                        "description": "OK",
+                        "schema": {
+                            "$ref": "#/definitions/repo.MaintenanceEntry"
+                        }
+                    }
+                }
+            }
+        },
+        "/v1/items/{id}/maintenance/{entry_id}": {
+            "put": {
+                "security": [
+                    {
+                        "Bearer": []
+                    }
+                ],
+                "produces": [
+                    "application/json"
+                ],
+                "tags": [
+                    "Maintenance"
+                ],
+                "summary": "Update Maintenance Entry",
+                "parameters": [
+                    {
+                        "description": "Entry Data",
+                        "name": "payload",
+                        "in": "body",
+                        "required": true,
+                        "schema": {
+                            "$ref": "#/definitions/repo.MaintenanceEntryUpdate"
+                        }
+                    }
+                ],
+                "responses": {
+                    "200": {
+                        "description": "OK",
+                        "schema": {
+                            "$ref": "#/definitions/repo.MaintenanceEntry"
+                        }
+                    }
+                }
+            },
+            "delete": {
+                "security": [
+                    {
+                        "Bearer": []
+                    }
+                ],
+                "produces": [
+                    "application/json"
+                ],
+                "tags": [
+                    "Maintenance"
+                ],
+                "summary": "Delete Maintenance Entry",
+                "responses": {
+                    "204": {
+                        "description": "No Content"
+                    }
+                }
+            }
+        },
         "/v1/labels": {
             "get": {
                 "security": [
@@ -1817,6 +1928,83 @@
                 }
             }
         },
+        "repo.MaintenanceEntry": {
+            "type": "object",
+            "properties": {
+                "cost": {
+                    "type": "string",
+                    "example": "0"
+                },
+                "date": {
+                    "type": "string"
+                },
+                "description": {
+                    "type": "string"
+                },
+                "id": {
+                    "type": "string"
+                },
+                "name": {
+                    "type": "string"
+                }
+            }
+        },
+        "repo.MaintenanceEntryCreate": {
+            "type": "object",
+            "properties": {
+                "cost": {
+                    "type": "string",
+                    "example": "0"
+                },
+                "date": {
+                    "type": "string"
+                },
+                "description": {
+                    "type": "string"
+                },
+                "name": {
+                    "type": "string"
+                }
+            }
+        },
+        "repo.MaintenanceEntryUpdate": {
+            "type": "object",
+            "properties": {
+                "cost": {
+                    "type": "string",
+                    "example": "0"
+                },
+                "date": {
+                    "type": "string"
+                },
+                "description": {
+                    "type": "string"
+                },
+                "name": {
+                    "type": "string"
+                }
+            }
+        },
+        "repo.MaintenanceLog": {
+            "type": "object",
+            "properties": {
+                "costAverage": {
+                    "type": "number"
+                },
+                "costTotal": {
+                    "type": "number"
+                },
+                "entries": {
+                    "type": "array",
+                    "items": {
+                        "$ref": "#/definitions/repo.MaintenanceEntry"
+                    }
+                },
+                "itemId": {
+                    "type": "string"
+                }
+            }
+        },
         "repo.PaginationResult-repo_ItemSummary": {
             "type": "object",
             "properties": {
diff --git a/backend/app/api/static/docs/swagger.yaml b/backend/app/api/static/docs/swagger.yaml
index 802fdf8..3d2fe2a 100644
--- a/backend/app/api/static/docs/swagger.yaml
+++ b/backend/app/api/static/docs/swagger.yaml
@@ -383,6 +383,57 @@ definitions:
         type: string
         x-nullable: true
     type: object
+  repo.MaintenanceEntry:
+    properties:
+      cost:
+        example: "0"
+        type: string
+      date:
+        type: string
+      description:
+        type: string
+      id:
+        type: string
+      name:
+        type: string
+    type: object
+  repo.MaintenanceEntryCreate:
+    properties:
+      cost:
+        example: "0"
+        type: string
+      date:
+        type: string
+      description:
+        type: string
+      name:
+        type: string
+    type: object
+  repo.MaintenanceEntryUpdate:
+    properties:
+      cost:
+        example: "0"
+        type: string
+      date:
+        type: string
+      description:
+        type: string
+      name:
+        type: string
+    type: object
+  repo.MaintenanceLog:
+    properties:
+      costAverage:
+        type: number
+      costTotal:
+        type: number
+      entries:
+        items:
+          $ref: '#/definitions/repo.MaintenanceEntry'
+        type: array
+      itemId:
+        type: string
+    type: object
   repo.PaginationResult-repo_ItemSummary:
     properties:
       items:
@@ -938,6 +989,72 @@ paths:
       summary: retrieves an attachment for an item
       tags:
       - Items Attachments
+  /v1/items/{id}/maintenance:
+    get:
+      produces:
+      - application/json
+      responses:
+        "200":
+          description: OK
+          schema:
+            $ref: '#/definitions/repo.MaintenanceLog'
+      security:
+      - Bearer: []
+      summary: Get Maintenance Log
+      tags:
+      - Maintenance
+    post:
+      parameters:
+      - description: Entry Data
+        in: body
+        name: payload
+        required: true
+        schema:
+          $ref: '#/definitions/repo.MaintenanceEntryCreate'
+      produces:
+      - application/json
+      responses:
+        "200":
+          description: OK
+          schema:
+            $ref: '#/definitions/repo.MaintenanceEntry'
+      security:
+      - Bearer: []
+      summary: Create Maintenance Entry
+      tags:
+      - Maintenance
+  /v1/items/{id}/maintenance/{entry_id}:
+    delete:
+      produces:
+      - application/json
+      responses:
+        "204":
+          description: No Content
+      security:
+      - Bearer: []
+      summary: Delete Maintenance Entry
+      tags:
+      - Maintenance
+    put:
+      parameters:
+      - description: Entry Data
+        in: body
+        name: payload
+        required: true
+        schema:
+          $ref: '#/definitions/repo.MaintenanceEntryUpdate'
+      produces:
+      - application/json
+      responses:
+        "200":
+          description: OK
+          schema:
+            $ref: '#/definitions/repo.MaintenanceEntry'
+      security:
+      - Bearer: []
+      summary: Update Maintenance Entry
+      tags:
+      - Maintenance
   /v1/items/import:
     post:
       parameters:
diff --git a/backend/app/tools/migrations/main.go b/backend/app/tools/migrations/main.go
index a2f6624..e53e7ba 100644
--- a/backend/app/tools/migrations/main.go
+++ b/backend/app/tools/migrations/main.go
@@ -2,6 +2,7 @@ package main
 
 import (
 	"context"
+	"fmt"
 	"log"
 	"os"
 
@@ -39,4 +40,6 @@ func main() {
 	if err != nil {
 		log.Fatalf("failed generating migration file: %v", err)
 	}
+
+	fmt.Println("Migration file generated successfully.")
 }
diff --git a/backend/go.mod b/backend/go.mod
index ab7e91b..d6eefce 100644
--- a/backend/go.mod
+++ b/backend/go.mod
@@ -3,7 +3,7 @@ module github.com/hay-kot/homebox/backend
 go 1.19
 
 require (
-	ariga.io/atlas v0.7.3-0.20221011160332-3ca609863edd
+	ariga.io/atlas v0.8.3
 	entgo.io/ent v0.11.4
 	github.com/ardanlabs/conf/v2 v2.2.0
 	github.com/go-chi/chi/v5 v5.0.7
@@ -14,7 +14,7 @@ require (
 	github.com/stretchr/testify v1.8.1
 	github.com/swaggo/http-swagger v1.3.3
 	github.com/swaggo/swag v1.8.8
-	golang.org/x/crypto v0.0.0-20220829220503-c86fa9a7ed90
+	golang.org/x/crypto v0.3.0
 )
 
 require (
@@ -30,7 +30,7 @@ require (
 	github.com/go-playground/locales v0.14.0 // indirect
 	github.com/go-playground/universal-translator v0.18.0 // indirect
 	github.com/google/go-cmp v0.5.9 // indirect
-	github.com/hashicorp/hcl/v2 v2.14.1 // indirect
+	github.com/hashicorp/hcl/v2 v2.15.0 // indirect
 	github.com/josharian/intern v1.0.0 // indirect
 	github.com/leodido/go-urn v1.2.1 // indirect
 	github.com/mailru/easyjson v0.7.7 // indirect
@@ -39,11 +39,11 @@ require (
 	github.com/mitchellh/go-wordwrap v1.0.1 // indirect
 	github.com/pmezard/go-difflib v1.0.0 // indirect
 	github.com/swaggo/files v0.0.0-20220728132757-551d4a08d97a // indirect
-	github.com/zclconf/go-cty v1.11.0 // indirect
-	golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4 // indirect
-	golang.org/x/net v0.0.0-20220923203811-8be639271d50 // indirect
-	golang.org/x/sys v0.0.0-20220919091848-fb04ddd9f9c8 // indirect
-	golang.org/x/text v0.3.7 // indirect
-	golang.org/x/tools v0.1.13-0.20220804200503-81c7dc4e4efa // indirect
+	github.com/zclconf/go-cty v1.12.1 // indirect
+	golang.org/x/mod v0.7.0 // indirect
+	golang.org/x/net v0.2.0 // indirect
+	golang.org/x/sys v0.3.0 // indirect
+	golang.org/x/text v0.5.0 // indirect
+	golang.org/x/tools v0.3.0 // indirect
 	gopkg.in/yaml.v3 v3.0.1 // indirect
 )
diff --git a/backend/go.sum b/backend/go.sum
index 19d0437..16b6ffb 100644
--- a/backend/go.sum
+++ b/backend/go.sum
@@ -1,5 +1,5 @@
-ariga.io/atlas v0.7.3-0.20221011160332-3ca609863edd h1:c3F2jvvEZzsoH/KUpDNhTsCVeUPnpXaF8kADZvUSiU0=
-ariga.io/atlas v0.7.3-0.20221011160332-3ca609863edd/go.mod h1:ft47uSh5hWGDCmQC9DsztZg6Xk+KagM5Ts/mZYKb9JE=
+ariga.io/atlas v0.8.3 h1:nddOywkhr/62Cwa+UsGgO35lAhUYh52XGVsbFwGzWZM=
+ariga.io/atlas v0.8.3/go.mod h1:T230JFcENj4ZZzMkZrXFDSkv+2kXkUgpJ5FQQ5hMcKU=
 entgo.io/ent v0.11.4 h1:grwVY0fp31BZ6oEo3YrXenAuv8VJmEw7F/Bi6WqeH3Q=
 entgo.io/ent v0.11.4/go.mod h1:fnQIXL36RYnCk/9nvG4aE7YHBFZhCycfh7wMjY5p7SE=
 github.com/DATA-DOG/go-sqlmock v1.5.0 h1:Shsta01QNfFxHCfpW6YH2STWB0MudeXXEWMr20OEh60=
@@ -7,7 +7,6 @@ github.com/KyleBanks/depth v1.2.1 h1:5h8fQADFrWtarTdtDudMmGsC7GPbOAu6RVB3ffsVFHc
 github.com/KyleBanks/depth v1.2.1/go.mod h1:jzSb9d0L43HxTQfT+oSA1EEp2q+ne2uh6XgeJcm8brE=
 github.com/agext/levenshtein v1.2.3 h1:YB2fHEn0UJagG8T1rrWknE3ZQzWM06O8AMAatNn7lmo=
 github.com/agext/levenshtein v1.2.3/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558=
-github.com/apparentlymart/go-dump v0.0.0-20180507223929-23540a00eaa3 h1:ZSTrOEhiM5J5RFxEaFvMZVEAM1KvT1YzbEOwB2EAGjA=
 github.com/apparentlymart/go-textseg/v13 v13.0.0 h1:Y+KvPE1NYz0xl601PVImeQfFyEy6iT90AvPUL1NNfNw=
 github.com/apparentlymart/go-textseg/v13 v13.0.0/go.mod h1:ZK2fH7c4NqDTLtiYLvIkEghdlcqw7yxLeM89kiTRPUo=
 github.com/ardanlabs/conf/v2 v2.2.0 h1:ar1+TYIYAh2Tdeg2DQroh7ruR56/vJR8BDfzDIrXgtk=
@@ -47,8 +46,8 @@ github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38=
 github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
 github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I=
 github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
-github.com/hashicorp/hcl/v2 v2.14.1 h1:x0BpjfZ+CYdbiz+8yZTQ+gdLO7IXvOut7Da+XJayx34=
-github.com/hashicorp/hcl/v2 v2.14.1/go.mod h1:e4z5nxYlWNPdDSNYX+ph14EvWYMFm3eP0zIUqPc2jr0=
+github.com/hashicorp/hcl/v2 v2.15.0 h1:CPDXO6+uORPjKflkWCCwoWc9uRp+zSIPcCQ+BrxV7m8=
+github.com/hashicorp/hcl/v2 v2.15.0/go.mod h1:JRmR89jycNkrrqnMmvPDMd56n1rQJ2Q6KocSLCMCXng=
 github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY=
 github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
 github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
@@ -109,17 +108,17 @@ github.com/swaggo/http-swagger v1.3.3 h1:Hu5Z0L9ssyBLofaama21iYaF2VbWyA8jdohaaCG
 github.com/swaggo/http-swagger v1.3.3/go.mod h1:sE+4PjD89IxMPm77FnkDz0sdO+p5lbXzrVWT6OTVVGo=
 github.com/swaggo/swag v1.8.8 h1:/GgJmrJ8/c0z4R4hoEPZ5UeEhVGdvsII4JbVDLbR7Xc=
 github.com/swaggo/swag v1.8.8/go.mod h1:ezQVUUhly8dludpVk+/PuwJWvLLanB13ygV5Pr9enSk=
-github.com/zclconf/go-cty v1.11.0 h1:726SxLdi2SDnjY+BStqB9J1hNp4+2WlzyXLuimibIe0=
-github.com/zclconf/go-cty v1.11.0/go.mod h1:s9IfD1LK5ccNMSWCVFCE2rJfHiZgi7JijgeWIMfhLvA=
+github.com/zclconf/go-cty v1.12.1 h1:PcupnljUm9EIvbgSHQnHhUr3fO6oFmkOrvs2BAFNXXY=
+github.com/zclconf/go-cty v1.12.1/go.mod h1:s9IfD1LK5ccNMSWCVFCE2rJfHiZgi7JijgeWIMfhLvA=
 golang.org/x/crypto v0.0.0-20211215153901-e495a2d5b3d3/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
-golang.org/x/crypto v0.0.0-20220829220503-c86fa9a7ed90 h1:Y/gsMcFOcR+6S6f3YeMKl5g+dZMEWqcz5Czj/GWYbkM=
-golang.org/x/crypto v0.0.0-20220829220503-c86fa9a7ed90/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
-golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4 h1:6zppjxzCulZykYSLyVDYbneBfbaBIQPYMevg0bEwv2s=
-golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
+golang.org/x/crypto v0.3.0 h1:a06MkbcxBrEFc0w0QIZWXrH/9cCX6KJyWbBOIwAn+7A=
+golang.org/x/crypto v0.3.0/go.mod h1:hebNnKkNXi2UzZN1eVRvBB7co0a+JxK6XbPiWVs/3J4=
+golang.org/x/mod v0.7.0 h1:LapD9S96VoQRhi/GrNTqeBJFrUjs5UHCAtTlgwA5oZA=
+golang.org/x/mod v0.7.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
 golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
 golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
-golang.org/x/net v0.0.0-20220923203811-8be639271d50 h1:vKyz8L3zkd+xrMeIaBsQ/MNVPVFSffdaU3ZyYlBGFnI=
-golang.org/x/net v0.0.0-20220923203811-8be639271d50/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk=
+golang.org/x/net v0.2.0 h1:sZfSu1wtKLGlWI4ZZayP0ck9Y73K1ynO6gqzTdBVdPU=
+golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY=
 golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
 golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
 golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
@@ -127,15 +126,16 @@ golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBc
 golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
 golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
 golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.0.0-20220919091848-fb04ddd9f9c8 h1:h+EGohizhe9XlX18rfpa8k8RAc5XyaeamM+0VHRd4lc=
-golang.org/x/sys v0.0.0-20220919091848-fb04ddd9f9c8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.3.0 h1:w8ZOecv6NaNa/zC8944JTU3vz4u6Lagfk4RPQxv92NQ=
+golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
 golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
 golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
-golang.org/x/text v0.3.7 h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk=
 golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
+golang.org/x/text v0.5.0 h1:OLmvp0KP+FVG99Ct/qFiL/Fhk4zp4QQnZ7b2U+5piUM=
+golang.org/x/text v0.5.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
 golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
-golang.org/x/tools v0.1.13-0.20220804200503-81c7dc4e4efa h1:uKcci2q7Qtp6nMTC/AAvfNUAldFtJuHWV9/5QWiypts=
-golang.org/x/tools v0.1.13-0.20220804200503-81c7dc4e4efa/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
+golang.org/x/tools v0.3.0 h1:SrNbZl6ECOS1qFzgTdQfWXZM9XBkiA6tkFrH9YSTPHM=
+golang.org/x/tools v0.3.0/go.mod h1:/rWhSS2+zyEVwoJf8YAX6L2f0ntZ7Kn/mGgAWcipA5k=
 gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
 gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
 gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
diff --git a/backend/internal/data/ent/client.go b/backend/internal/data/ent/client.go
index 67a57ef..eb66c20 100644
--- a/backend/internal/data/ent/client.go
+++ b/backend/internal/data/ent/client.go
@@ -15,13 +15,13 @@ import (
 	"github.com/hay-kot/homebox/backend/internal/data/ent/authroles"
 	"github.com/hay-kot/homebox/backend/internal/data/ent/authtokens"
 	"github.com/hay-kot/homebox/backend/internal/data/ent/document"
-	"github.com/hay-kot/homebox/backend/internal/data/ent/documenttoken"
 	"github.com/hay-kot/homebox/backend/internal/data/ent/group"
 	"github.com/hay-kot/homebox/backend/internal/data/ent/groupinvitationtoken"
 	"github.com/hay-kot/homebox/backend/internal/data/ent/item"
 	"github.com/hay-kot/homebox/backend/internal/data/ent/itemfield"
 	"github.com/hay-kot/homebox/backend/internal/data/ent/label"
 	"github.com/hay-kot/homebox/backend/internal/data/ent/location"
+	"github.com/hay-kot/homebox/backend/internal/data/ent/maintenanceentry"
 	"github.com/hay-kot/homebox/backend/internal/data/ent/user"
 
 	"entgo.io/ent/dialect"
@@ -42,8 +42,6 @@ type Client struct {
 	AuthTokens *AuthTokensClient
 	// Document is the client for interacting with the Document builders.
 	Document *DocumentClient
-	// DocumentToken is the client for interacting with the DocumentToken builders.
-	DocumentToken *DocumentTokenClient
 	// Group is the client for interacting with the Group builders.
 	Group *GroupClient
 	// GroupInvitationToken is the client for interacting with the GroupInvitationToken builders.
@@ -56,6 +54,8 @@ type Client struct {
 	Label *LabelClient
 	// Location is the client for interacting with the Location builders.
 	Location *LocationClient
+	// MaintenanceEntry is the client for interacting with the MaintenanceEntry builders.
+	MaintenanceEntry *MaintenanceEntryClient
 	// User is the client for interacting with the User builders.
 	User *UserClient
 }
@@ -75,13 +75,13 @@ func (c *Client) init() {
 	c.AuthRoles = NewAuthRolesClient(c.config)
 	c.AuthTokens = NewAuthTokensClient(c.config)
 	c.Document = NewDocumentClient(c.config)
-	c.DocumentToken = NewDocumentTokenClient(c.config)
 	c.Group = NewGroupClient(c.config)
 	c.GroupInvitationToken = NewGroupInvitationTokenClient(c.config)
 	c.Item = NewItemClient(c.config)
 	c.ItemField = NewItemFieldClient(c.config)
 	c.Label = NewLabelClient(c.config)
 	c.Location = NewLocationClient(c.config)
+	c.MaintenanceEntry = NewMaintenanceEntryClient(c.config)
 	c.User = NewUserClient(c.config)
 }
 
@@ -120,13 +120,13 @@ func (c *Client) Tx(ctx context.Context) (*Tx, error) {
 		AuthRoles:            NewAuthRolesClient(cfg),
 		AuthTokens:           NewAuthTokensClient(cfg),
 		Document:             NewDocumentClient(cfg),
-		DocumentToken:        NewDocumentTokenClient(cfg),
 		Group:                NewGroupClient(cfg),
 		GroupInvitationToken: NewGroupInvitationTokenClient(cfg),
 		Item:                 NewItemClient(cfg),
 		ItemField:            NewItemFieldClient(cfg),
 		Label:                NewLabelClient(cfg),
 		Location:             NewLocationClient(cfg),
+		MaintenanceEntry:     NewMaintenanceEntryClient(cfg),
 		User:                 NewUserClient(cfg),
 	}, nil
 }
@@ -151,13 +151,13 @@ func (c *Client) BeginTx(ctx context.Context, opts *sql.TxOptions) (*Tx, error)
 		AuthRoles:            NewAuthRolesClient(cfg),
 		AuthTokens:           NewAuthTokensClient(cfg),
 		Document:             NewDocumentClient(cfg),
-		DocumentToken:        NewDocumentTokenClient(cfg),
 		Group:                NewGroupClient(cfg),
 		GroupInvitationToken: NewGroupInvitationTokenClient(cfg),
 		Item:                 NewItemClient(cfg),
 		ItemField:            NewItemFieldClient(cfg),
 		Label:                NewLabelClient(cfg),
 		Location:             NewLocationClient(cfg),
+		MaintenanceEntry:     NewMaintenanceEntryClient(cfg),
 		User:                 NewUserClient(cfg),
 	}, nil
 }
@@ -191,13 +191,13 @@ func (c *Client) Use(hooks ...Hook) {
 	c.AuthRoles.Use(hooks...)
 	c.AuthTokens.Use(hooks...)
 	c.Document.Use(hooks...)
-	c.DocumentToken.Use(hooks...)
 	c.Group.Use(hooks...)
 	c.GroupInvitationToken.Use(hooks...)
 	c.Item.Use(hooks...)
 	c.ItemField.Use(hooks...)
 	c.Label.Use(hooks...)
 	c.Location.Use(hooks...)
+	c.MaintenanceEntry.Use(hooks...)
 	c.User.Use(hooks...)
 }
 
@@ -652,22 +652,6 @@ func (c *DocumentClient) QueryGroup(d *Document) *GroupQuery {
 	return query
 }
 
-// QueryDocumentTokens queries the document_tokens edge of a Document.
-func (c *DocumentClient) QueryDocumentTokens(d *Document) *DocumentTokenQuery {
-	query := &DocumentTokenQuery{config: c.config}
-	query.path = func(context.Context) (fromV *sql.Selector, _ error) {
-		id := d.ID
-		step := sqlgraph.NewStep(
-			sqlgraph.From(document.Table, document.FieldID, id),
-			sqlgraph.To(documenttoken.Table, documenttoken.FieldID),
-			sqlgraph.Edge(sqlgraph.O2M, false, document.DocumentTokensTable, document.DocumentTokensColumn),
-		)
-		fromV = sqlgraph.Neighbors(d.driver.Dialect(), step)
-		return fromV, nil
-	}
-	return query
-}
-
 // QueryAttachments queries the attachments edge of a Document.
 func (c *DocumentClient) QueryAttachments(d *Document) *AttachmentQuery {
 	query := &AttachmentQuery{config: c.config}
@@ -689,112 +673,6 @@ func (c *DocumentClient) Hooks() []Hook {
 	return c.hooks.Document
 }
 
-// DocumentTokenClient is a client for the DocumentToken schema.
-type DocumentTokenClient struct {
-	config
-}
-
-// NewDocumentTokenClient returns a client for the DocumentToken from the given config.
-func NewDocumentTokenClient(c config) *DocumentTokenClient {
-	return &DocumentTokenClient{config: c}
-}
-
-// Use adds a list of mutation hooks to the hooks stack.
-// A call to `Use(f, g, h)` equals to `documenttoken.Hooks(f(g(h())))`.
-func (c *DocumentTokenClient) Use(hooks ...Hook) {
-	c.hooks.DocumentToken = append(c.hooks.DocumentToken, hooks...)
-}
-
-// Create returns a builder for creating a DocumentToken entity.
-func (c *DocumentTokenClient) Create() *DocumentTokenCreate {
-	mutation := newDocumentTokenMutation(c.config, OpCreate)
-	return &DocumentTokenCreate{config: c.config, hooks: c.Hooks(), mutation: mutation}
-}
-
-// CreateBulk returns a builder for creating a bulk of DocumentToken entities.
-func (c *DocumentTokenClient) CreateBulk(builders ...*DocumentTokenCreate) *DocumentTokenCreateBulk {
-	return &DocumentTokenCreateBulk{config: c.config, builders: builders}
-}
-
-// Update returns an update builder for DocumentToken.
-func (c *DocumentTokenClient) Update() *DocumentTokenUpdate {
-	mutation := newDocumentTokenMutation(c.config, OpUpdate)
-	return &DocumentTokenUpdate{config: c.config, hooks: c.Hooks(), mutation: mutation}
-}
-
-// UpdateOne returns an update builder for the given entity.
-func (c *DocumentTokenClient) UpdateOne(dt *DocumentToken) *DocumentTokenUpdateOne {
-	mutation := newDocumentTokenMutation(c.config, OpUpdateOne, withDocumentToken(dt))
-	return &DocumentTokenUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation}
-}
-
-// UpdateOneID returns an update builder for the given id.
-func (c *DocumentTokenClient) UpdateOneID(id uuid.UUID) *DocumentTokenUpdateOne {
-	mutation := newDocumentTokenMutation(c.config, OpUpdateOne, withDocumentTokenID(id))
-	return &DocumentTokenUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation}
-}
-
-// Delete returns a delete builder for DocumentToken.
-func (c *DocumentTokenClient) Delete() *DocumentTokenDelete {
-	mutation := newDocumentTokenMutation(c.config, OpDelete)
-	return &DocumentTokenDelete{config: c.config, hooks: c.Hooks(), mutation: mutation}
-}
-
-// DeleteOne returns a builder for deleting the given entity.
-func (c *DocumentTokenClient) DeleteOne(dt *DocumentToken) *DocumentTokenDeleteOne {
-	return c.DeleteOneID(dt.ID)
-}
-
-// DeleteOneID returns a builder for deleting the given entity by its id.
-func (c *DocumentTokenClient) DeleteOneID(id uuid.UUID) *DocumentTokenDeleteOne {
-	builder := c.Delete().Where(documenttoken.ID(id))
-	builder.mutation.id = &id
-	builder.mutation.op = OpDeleteOne
-	return &DocumentTokenDeleteOne{builder}
-}
-
-// Query returns a query builder for DocumentToken.
-func (c *DocumentTokenClient) Query() *DocumentTokenQuery {
-	return &DocumentTokenQuery{
-		config: c.config,
-	}
-}
-
-// Get returns a DocumentToken entity by its id.
-func (c *DocumentTokenClient) Get(ctx context.Context, id uuid.UUID) (*DocumentToken, error) {
-	return c.Query().Where(documenttoken.ID(id)).Only(ctx)
-}
-
-// GetX is like Get, but panics if an error occurs.
-func (c *DocumentTokenClient) GetX(ctx context.Context, id uuid.UUID) *DocumentToken {
-	obj, err := c.Get(ctx, id)
-	if err != nil {
-		panic(err)
-	}
-	return obj
-}
-
-// QueryDocument queries the document edge of a DocumentToken.
-func (c *DocumentTokenClient) QueryDocument(dt *DocumentToken) *DocumentQuery {
-	query := &DocumentQuery{config: c.config}
-	query.path = func(context.Context) (fromV *sql.Selector, _ error) {
-		id := dt.ID
-		step := sqlgraph.NewStep(
-			sqlgraph.From(documenttoken.Table, documenttoken.FieldID, id),
-			sqlgraph.To(document.Table, document.FieldID),
-			sqlgraph.Edge(sqlgraph.M2O, true, documenttoken.DocumentTable, documenttoken.DocumentColumn),
-		)
-		fromV = sqlgraph.Neighbors(dt.driver.Dialect(), step)
-		return fromV, nil
-	}
-	return query
-}
-
-// Hooks returns the client hooks.
-func (c *DocumentTokenClient) Hooks() []Hook {
-	return c.hooks.DocumentToken
-}
-
 // GroupClient is a client for the Group schema.
 type GroupClient struct {
 	config
@@ -1268,6 +1146,22 @@ func (c *ItemClient) QueryFields(i *Item) *ItemFieldQuery {
 	return query
 }
 
+// QueryMaintenanceEntries queries the maintenance_entries edge of a Item.
+func (c *ItemClient) QueryMaintenanceEntries(i *Item) *MaintenanceEntryQuery {
+	query := &MaintenanceEntryQuery{config: c.config}
+	query.path = func(context.Context) (fromV *sql.Selector, _ error) {
+		id := i.ID
+		step := sqlgraph.NewStep(
+			sqlgraph.From(item.Table, item.FieldID, id),
+			sqlgraph.To(maintenanceentry.Table, maintenanceentry.FieldID),
+			sqlgraph.Edge(sqlgraph.O2M, false, item.MaintenanceEntriesTable, item.MaintenanceEntriesColumn),
+		)
+		fromV = sqlgraph.Neighbors(i.driver.Dialect(), step)
+		return fromV, nil
+	}
+	return query
+}
+
 // QueryAttachments queries the attachments edge of a Item.
 func (c *ItemClient) QueryAttachments(i *Item) *AttachmentQuery {
 	query := &AttachmentQuery{config: c.config}
@@ -1671,6 +1565,112 @@ func (c *LocationClient) Hooks() []Hook {
 	return c.hooks.Location
 }
 
+// MaintenanceEntryClient is a client for the MaintenanceEntry schema.
+type MaintenanceEntryClient struct {
+	config
+}
+
+// NewMaintenanceEntryClient returns a client for the MaintenanceEntry from the given config.
+func NewMaintenanceEntryClient(c config) *MaintenanceEntryClient {
+	return &MaintenanceEntryClient{config: c}
+}
+
+// Use adds a list of mutation hooks to the hooks stack.
+// A call to `Use(f, g, h)` equals to `maintenanceentry.Hooks(f(g(h())))`.
+func (c *MaintenanceEntryClient) Use(hooks ...Hook) {
+	c.hooks.MaintenanceEntry = append(c.hooks.MaintenanceEntry, hooks...)
+}
+
+// Create returns a builder for creating a MaintenanceEntry entity.
+func (c *MaintenanceEntryClient) Create() *MaintenanceEntryCreate {
+	mutation := newMaintenanceEntryMutation(c.config, OpCreate)
+	return &MaintenanceEntryCreate{config: c.config, hooks: c.Hooks(), mutation: mutation}
+}
+
+// CreateBulk returns a builder for creating a bulk of MaintenanceEntry entities.
+func (c *MaintenanceEntryClient) CreateBulk(builders ...*MaintenanceEntryCreate) *MaintenanceEntryCreateBulk {
+	return &MaintenanceEntryCreateBulk{config: c.config, builders: builders}
+}
+
+// Update returns an update builder for MaintenanceEntry.
+func (c *MaintenanceEntryClient) Update() *MaintenanceEntryUpdate {
+	mutation := newMaintenanceEntryMutation(c.config, OpUpdate)
+	return &MaintenanceEntryUpdate{config: c.config, hooks: c.Hooks(), mutation: mutation}
+}
+
+// UpdateOne returns an update builder for the given entity.
+func (c *MaintenanceEntryClient) UpdateOne(me *MaintenanceEntry) *MaintenanceEntryUpdateOne {
+	mutation := newMaintenanceEntryMutation(c.config, OpUpdateOne, withMaintenanceEntry(me))
+	return &MaintenanceEntryUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation}
+}
+
+// UpdateOneID returns an update builder for the given id.
+func (c *MaintenanceEntryClient) UpdateOneID(id uuid.UUID) *MaintenanceEntryUpdateOne {
+	mutation := newMaintenanceEntryMutation(c.config, OpUpdateOne, withMaintenanceEntryID(id))
+	return &MaintenanceEntryUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation}
+}
+
+// Delete returns a delete builder for MaintenanceEntry.
+func (c *MaintenanceEntryClient) Delete() *MaintenanceEntryDelete {
+	mutation := newMaintenanceEntryMutation(c.config, OpDelete)
+	return &MaintenanceEntryDelete{config: c.config, hooks: c.Hooks(), mutation: mutation}
+}
+
+// DeleteOne returns a builder for deleting the given entity.
+func (c *MaintenanceEntryClient) DeleteOne(me *MaintenanceEntry) *MaintenanceEntryDeleteOne {
+	return c.DeleteOneID(me.ID)
+}
+
+// DeleteOneID returns a builder for deleting the given entity by its id.
+func (c *MaintenanceEntryClient) DeleteOneID(id uuid.UUID) *MaintenanceEntryDeleteOne {
+	builder := c.Delete().Where(maintenanceentry.ID(id))
+	builder.mutation.id = &id
+	builder.mutation.op = OpDeleteOne
+	return &MaintenanceEntryDeleteOne{builder}
+}
+
+// Query returns a query builder for MaintenanceEntry.
+func (c *MaintenanceEntryClient) Query() *MaintenanceEntryQuery {
+	return &MaintenanceEntryQuery{
+		config: c.config,
+	}
+}
+
+// Get returns a MaintenanceEntry entity by its id.
+func (c *MaintenanceEntryClient) Get(ctx context.Context, id uuid.UUID) (*MaintenanceEntry, error) {
+	return c.Query().Where(maintenanceentry.ID(id)).Only(ctx)
+}
+
+// GetX is like Get, but panics if an error occurs.
+func (c *MaintenanceEntryClient) GetX(ctx context.Context, id uuid.UUID) *MaintenanceEntry {
+	obj, err := c.Get(ctx, id)
+	if err != nil {
+		panic(err)
+	}
+	return obj
+}
+
+// QueryItem queries the item edge of a MaintenanceEntry.
+func (c *MaintenanceEntryClient) QueryItem(me *MaintenanceEntry) *ItemQuery {
+	query := &ItemQuery{config: c.config}
+	query.path = func(context.Context) (fromV *sql.Selector, _ error) {
+		id := me.ID
+		step := sqlgraph.NewStep(
+			sqlgraph.From(maintenanceentry.Table, maintenanceentry.FieldID, id),
+			sqlgraph.To(item.Table, item.FieldID),
+			sqlgraph.Edge(sqlgraph.M2O, true, maintenanceentry.ItemTable, maintenanceentry.ItemColumn),
+		)
+		fromV = sqlgraph.Neighbors(me.driver.Dialect(), step)
+		return fromV, nil
+	}
+	return query
+}
+
+// Hooks returns the client hooks.
+func (c *MaintenanceEntryClient) Hooks() []Hook {
+	return c.hooks.MaintenanceEntry
+}
+
 // UserClient is a client for the User schema.
 type UserClient struct {
 	config
diff --git a/backend/internal/data/ent/config.go b/backend/internal/data/ent/config.go
index e3ce09d..9cba253 100644
--- a/backend/internal/data/ent/config.go
+++ b/backend/internal/data/ent/config.go
@@ -28,13 +28,13 @@ type hooks struct {
 	AuthRoles            []ent.Hook
 	AuthTokens           []ent.Hook
 	Document             []ent.Hook
-	DocumentToken        []ent.Hook
 	Group                []ent.Hook
 	GroupInvitationToken []ent.Hook
 	Item                 []ent.Hook
 	ItemField            []ent.Hook
 	Label                []ent.Hook
 	Location             []ent.Hook
+	MaintenanceEntry     []ent.Hook
 	User                 []ent.Hook
 }
 
diff --git a/backend/internal/data/ent/document.go b/backend/internal/data/ent/document.go
index 0c84d7d..50c1612 100644
--- a/backend/internal/data/ent/document.go
+++ b/backend/internal/data/ent/document.go
@@ -36,13 +36,11 @@ type Document struct {
 type DocumentEdges struct {
 	// Group holds the value of the group edge.
 	Group *Group `json:"group,omitempty"`
-	// DocumentTokens holds the value of the document_tokens edge.
-	DocumentTokens []*DocumentToken `json:"document_tokens,omitempty"`
 	// Attachments holds the value of the attachments edge.
 	Attachments []*Attachment `json:"attachments,omitempty"`
 	// loadedTypes holds the information for reporting if a
 	// type was loaded (or requested) in eager-loading or not.
-	loadedTypes [3]bool
+	loadedTypes [2]bool
 }
 
 // GroupOrErr returns the Group value or an error if the edge
@@ -58,19 +56,10 @@ func (e DocumentEdges) GroupOrErr() (*Group, error) {
 	return nil, &NotLoadedError{edge: "group"}
 }
 
-// DocumentTokensOrErr returns the DocumentTokens value or an error if the edge
-// was not loaded in eager-loading.
-func (e DocumentEdges) DocumentTokensOrErr() ([]*DocumentToken, error) {
-	if e.loadedTypes[1] {
-		return e.DocumentTokens, nil
-	}
-	return nil, &NotLoadedError{edge: "document_tokens"}
-}
-
 // AttachmentsOrErr returns the Attachments value or an error if the edge
 // was not loaded in eager-loading.
 func (e DocumentEdges) AttachmentsOrErr() ([]*Attachment, error) {
-	if e.loadedTypes[2] {
+	if e.loadedTypes[1] {
 		return e.Attachments, nil
 	}
 	return nil, &NotLoadedError{edge: "attachments"}
@@ -151,11 +140,6 @@ func (d *Document) QueryGroup() *GroupQuery {
 	return (&DocumentClient{config: d.config}).QueryGroup(d)
 }
 
-// QueryDocumentTokens queries the "document_tokens" edge of the Document entity.
-func (d *Document) QueryDocumentTokens() *DocumentTokenQuery {
-	return (&DocumentClient{config: d.config}).QueryDocumentTokens(d)
-}
-
 // QueryAttachments queries the "attachments" edge of the Document entity.
 func (d *Document) QueryAttachments() *AttachmentQuery {
 	return (&DocumentClient{config: d.config}).QueryAttachments(d)
diff --git a/backend/internal/data/ent/document/document.go b/backend/internal/data/ent/document/document.go
index bfc3881..b6a15eb 100644
--- a/backend/internal/data/ent/document/document.go
+++ b/backend/internal/data/ent/document/document.go
@@ -23,8 +23,6 @@ const (
 	FieldPath = "path"
 	// EdgeGroup holds the string denoting the group edge name in mutations.
 	EdgeGroup = "group"
-	// EdgeDocumentTokens holds the string denoting the document_tokens edge name in mutations.
-	EdgeDocumentTokens = "document_tokens"
 	// EdgeAttachments holds the string denoting the attachments edge name in mutations.
 	EdgeAttachments = "attachments"
 	// Table holds the table name of the document in the database.
@@ -36,13 +34,6 @@ const (
 	GroupInverseTable = "groups"
 	// GroupColumn is the table column denoting the group relation/edge.
 	GroupColumn = "group_documents"
-	// DocumentTokensTable is the table that holds the document_tokens relation/edge.
-	DocumentTokensTable = "document_tokens"
-	// DocumentTokensInverseTable is the table name for the DocumentToken entity.
-	// It exists in this package in order to avoid circular dependency with the "documenttoken" package.
-	DocumentTokensInverseTable = "document_tokens"
-	// DocumentTokensColumn is the table column denoting the document_tokens relation/edge.
-	DocumentTokensColumn = "document_document_tokens"
 	// AttachmentsTable is the table that holds the attachments relation/edge.
 	AttachmentsTable = "attachments"
 	// AttachmentsInverseTable is the table name for the Attachment entity.
diff --git a/backend/internal/data/ent/document/where.go b/backend/internal/data/ent/document/where.go
index dc02fa4..6f1bd69 100644
--- a/backend/internal/data/ent/document/where.go
+++ b/backend/internal/data/ent/document/where.go
@@ -464,34 +464,6 @@ func HasGroupWith(preds ...predicate.Group) predicate.Document {
 	})
 }
 
-// HasDocumentTokens applies the HasEdge predicate on the "document_tokens" edge.
-func HasDocumentTokens() predicate.Document {
-	return predicate.Document(func(s *sql.Selector) {
-		step := sqlgraph.NewStep(
-			sqlgraph.From(Table, FieldID),
-			sqlgraph.To(DocumentTokensTable, FieldID),
-			sqlgraph.Edge(sqlgraph.O2M, false, DocumentTokensTable, DocumentTokensColumn),
-		)
-		sqlgraph.HasNeighbors(s, step)
-	})
-}
-
-// HasDocumentTokensWith applies the HasEdge predicate on the "document_tokens" edge with a given conditions (other predicates).
-func HasDocumentTokensWith(preds ...predicate.DocumentToken) predicate.Document {
-	return predicate.Document(func(s *sql.Selector) {
-		step := sqlgraph.NewStep(
-			sqlgraph.From(Table, FieldID),
-			sqlgraph.To(DocumentTokensInverseTable, FieldID),
-			sqlgraph.Edge(sqlgraph.O2M, false, DocumentTokensTable, DocumentTokensColumn),
-		)
-		sqlgraph.HasNeighborsWith(s, step, func(s *sql.Selector) {
-			for _, p := range preds {
-				p(s)
-			}
-		})
-	})
-}
-
 // HasAttachments applies the HasEdge predicate on the "attachments" edge.
 func HasAttachments() predicate.Document {
 	return predicate.Document(func(s *sql.Selector) {
diff --git a/backend/internal/data/ent/document_create.go b/backend/internal/data/ent/document_create.go
index b3969d6..67b5baa 100644
--- a/backend/internal/data/ent/document_create.go
+++ b/backend/internal/data/ent/document_create.go
@@ -13,7 +13,6 @@ import (
 	"github.com/google/uuid"
 	"github.com/hay-kot/homebox/backend/internal/data/ent/attachment"
 	"github.com/hay-kot/homebox/backend/internal/data/ent/document"
-	"github.com/hay-kot/homebox/backend/internal/data/ent/documenttoken"
 	"github.com/hay-kot/homebox/backend/internal/data/ent/group"
 )
 
@@ -89,21 +88,6 @@ func (dc *DocumentCreate) SetGroup(g *Group) *DocumentCreate {
 	return dc.SetGroupID(g.ID)
 }
 
-// AddDocumentTokenIDs adds the "document_tokens" edge to the DocumentToken entity by IDs.
-func (dc *DocumentCreate) AddDocumentTokenIDs(ids ...uuid.UUID) *DocumentCreate {
-	dc.mutation.AddDocumentTokenIDs(ids...)
-	return dc
-}
-
-// AddDocumentTokens adds the "document_tokens" edges to the DocumentToken entity.
-func (dc *DocumentCreate) AddDocumentTokens(d ...*DocumentToken) *DocumentCreate {
-	ids := make([]uuid.UUID, len(d))
-	for i := range d {
-		ids[i] = d[i].ID
-	}
-	return dc.AddDocumentTokenIDs(ids...)
-}
-
 // AddAttachmentIDs adds the "attachments" edge to the Attachment entity by IDs.
 func (dc *DocumentCreate) AddAttachmentIDs(ids ...uuid.UUID) *DocumentCreate {
 	dc.mutation.AddAttachmentIDs(ids...)
@@ -309,25 +293,6 @@ func (dc *DocumentCreate) createSpec() (*Document, *sqlgraph.CreateSpec) {
 		_node.group_documents = &nodes[0]
 		_spec.Edges = append(_spec.Edges, edge)
 	}
-	if nodes := dc.mutation.DocumentTokensIDs(); len(nodes) > 0 {
-		edge := &sqlgraph.EdgeSpec{
-			Rel:     sqlgraph.O2M,
-			Inverse: false,
-			Table:   document.DocumentTokensTable,
-			Columns: []string{document.DocumentTokensColumn},
-			Bidi:    false,
-			Target: &sqlgraph.EdgeTarget{
-				IDSpec: &sqlgraph.FieldSpec{
-					Type:   field.TypeUUID,
-					Column: documenttoken.FieldID,
-				},
-			},
-		}
-		for _, k := range nodes {
-			edge.Target.Nodes = append(edge.Target.Nodes, k)
-		}
-		_spec.Edges = append(_spec.Edges, edge)
-	}
 	if nodes := dc.mutation.AttachmentsIDs(); len(nodes) > 0 {
 		edge := &sqlgraph.EdgeSpec{
 			Rel:     sqlgraph.O2M,
diff --git a/backend/internal/data/ent/document_query.go b/backend/internal/data/ent/document_query.go
index 496de53..5ef4f67 100644
--- a/backend/internal/data/ent/document_query.go
+++ b/backend/internal/data/ent/document_query.go
@@ -14,7 +14,6 @@ import (
 	"github.com/google/uuid"
 	"github.com/hay-kot/homebox/backend/internal/data/ent/attachment"
 	"github.com/hay-kot/homebox/backend/internal/data/ent/document"
-	"github.com/hay-kot/homebox/backend/internal/data/ent/documenttoken"
 	"github.com/hay-kot/homebox/backend/internal/data/ent/group"
 	"github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
 )
@@ -22,16 +21,15 @@ import (
 // DocumentQuery is the builder for querying Document entities.
 type DocumentQuery struct {
 	config
-	limit              *int
-	offset             *int
-	unique             *bool
-	order              []OrderFunc
-	fields             []string
-	predicates         []predicate.Document
-	withGroup          *GroupQuery
-	withDocumentTokens *DocumentTokenQuery
-	withAttachments    *AttachmentQuery
-	withFKs            bool
+	limit           *int
+	offset          *int
+	unique          *bool
+	order           []OrderFunc
+	fields          []string
+	predicates      []predicate.Document
+	withGroup       *GroupQuery
+	withAttachments *AttachmentQuery
+	withFKs         bool
 	// intermediate query (i.e. traversal path).
 	sql  *sql.Selector
 	path func(context.Context) (*sql.Selector, error)
@@ -90,28 +88,6 @@ func (dq *DocumentQuery) QueryGroup() *GroupQuery {
 	return query
 }
 
-// QueryDocumentTokens chains the current query on the "document_tokens" edge.
-func (dq *DocumentQuery) QueryDocumentTokens() *DocumentTokenQuery {
-	query := &DocumentTokenQuery{config: dq.config}
-	query.path = func(ctx context.Context) (fromU *sql.Selector, err error) {
-		if err := dq.prepareQuery(ctx); err != nil {
-			return nil, err
-		}
-		selector := dq.sqlQuery(ctx)
-		if err := selector.Err(); err != nil {
-			return nil, err
-		}
-		step := sqlgraph.NewStep(
-			sqlgraph.From(document.Table, document.FieldID, selector),
-			sqlgraph.To(documenttoken.Table, documenttoken.FieldID),
-			sqlgraph.Edge(sqlgraph.O2M, false, document.DocumentTokensTable, document.DocumentTokensColumn),
-		)
-		fromU = sqlgraph.SetNeighbors(dq.driver.Dialect(), step)
-		return fromU, nil
-	}
-	return query
-}
-
 // QueryAttachments chains the current query on the "attachments" edge.
 func (dq *DocumentQuery) QueryAttachments() *AttachmentQuery {
 	query := &AttachmentQuery{config: dq.config}
@@ -310,14 +286,13 @@ func (dq *DocumentQuery) Clone() *DocumentQuery {
 		return nil
 	}
 	return &DocumentQuery{
-		config:             dq.config,
-		limit:              dq.limit,
-		offset:             dq.offset,
-		order:              append([]OrderFunc{}, dq.order...),
-		predicates:         append([]predicate.Document{}, dq.predicates...),
-		withGroup:          dq.withGroup.Clone(),
-		withDocumentTokens: dq.withDocumentTokens.Clone(),
-		withAttachments:    dq.withAttachments.Clone(),
+		config:          dq.config,
+		limit:           dq.limit,
+		offset:          dq.offset,
+		order:           append([]OrderFunc{}, dq.order...),
+		predicates:      append([]predicate.Document{}, dq.predicates...),
+		withGroup:       dq.withGroup.Clone(),
+		withAttachments: dq.withAttachments.Clone(),
 		// clone intermediate query.
 		sql:    dq.sql.Clone(),
 		path:   dq.path,
@@ -336,17 +311,6 @@ func (dq *DocumentQuery) WithGroup(opts ...func(*GroupQuery)) *DocumentQuery {
 	return dq
 }
 
-// WithDocumentTokens tells the query-builder to eager-load the nodes that are connected to
-// the "document_tokens" edge. The optional arguments are used to configure the query builder of the edge.
-func (dq *DocumentQuery) WithDocumentTokens(opts ...func(*DocumentTokenQuery)) *DocumentQuery {
-	query := &DocumentTokenQuery{config: dq.config}
-	for _, opt := range opts {
-		opt(query)
-	}
-	dq.withDocumentTokens = query
-	return dq
-}
-
 // WithAttachments tells the query-builder to eager-load the nodes that are connected to
 // the "attachments" edge. The optional arguments are used to configure the query builder of the edge.
 func (dq *DocumentQuery) WithAttachments(opts ...func(*AttachmentQuery)) *DocumentQuery {
@@ -432,9 +396,8 @@ func (dq *DocumentQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*Doc
 		nodes       = []*Document{}
 		withFKs     = dq.withFKs
 		_spec       = dq.querySpec()
-		loadedTypes = [3]bool{
+		loadedTypes = [2]bool{
 			dq.withGroup != nil,
-			dq.withDocumentTokens != nil,
 			dq.withAttachments != nil,
 		}
 	)
@@ -468,13 +431,6 @@ func (dq *DocumentQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*Doc
 			return nil, err
 		}
 	}
-	if query := dq.withDocumentTokens; query != nil {
-		if err := dq.loadDocumentTokens(ctx, query, nodes,
-			func(n *Document) { n.Edges.DocumentTokens = []*DocumentToken{} },
-			func(n *Document, e *DocumentToken) { n.Edges.DocumentTokens = append(n.Edges.DocumentTokens, e) }); err != nil {
-			return nil, err
-		}
-	}
 	if query := dq.withAttachments; query != nil {
 		if err := dq.loadAttachments(ctx, query, nodes,
 			func(n *Document) { n.Edges.Attachments = []*Attachment{} },
@@ -514,37 +470,6 @@ func (dq *DocumentQuery) loadGroup(ctx context.Context, query *GroupQuery, nodes
 	}
 	return nil
 }
-func (dq *DocumentQuery) loadDocumentTokens(ctx context.Context, query *DocumentTokenQuery, nodes []*Document, init func(*Document), assign func(*Document, *DocumentToken)) error {
-	fks := make([]driver.Value, 0, len(nodes))
-	nodeids := make(map[uuid.UUID]*Document)
-	for i := range nodes {
-		fks = append(fks, nodes[i].ID)
-		nodeids[nodes[i].ID] = nodes[i]
-		if init != nil {
-			init(nodes[i])
-		}
-	}
-	query.withFKs = true
-	query.Where(predicate.DocumentToken(func(s *sql.Selector) {
-		s.Where(sql.InValues(document.DocumentTokensColumn, fks...))
-	}))
-	neighbors, err := query.All(ctx)
-	if err != nil {
-		return err
-	}
-	for _, n := range neighbors {
-		fk := n.document_document_tokens
-		if fk == nil {
-			return fmt.Errorf(`foreign-key "document_document_tokens" is nil for node %v`, n.ID)
-		}
-		node, ok := nodeids[*fk]
-		if !ok {
-			return fmt.Errorf(`unexpected foreign-key "document_document_tokens" returned %v for node %v`, *fk, n.ID)
-		}
-		assign(node, n)
-	}
-	return nil
-}
 func (dq *DocumentQuery) loadAttachments(ctx context.Context, query *AttachmentQuery, nodes []*Document, init func(*Document), assign func(*Document, *Attachment)) error {
 	fks := make([]driver.Value, 0, len(nodes))
 	nodeids := make(map[uuid.UUID]*Document)
diff --git a/backend/internal/data/ent/document_update.go b/backend/internal/data/ent/document_update.go
index 880df95..4a7ae7e 100644
--- a/backend/internal/data/ent/document_update.go
+++ b/backend/internal/data/ent/document_update.go
@@ -14,7 +14,6 @@ import (
 	"github.com/google/uuid"
 	"github.com/hay-kot/homebox/backend/internal/data/ent/attachment"
 	"github.com/hay-kot/homebox/backend/internal/data/ent/document"
-	"github.com/hay-kot/homebox/backend/internal/data/ent/documenttoken"
 	"github.com/hay-kot/homebox/backend/internal/data/ent/group"
 	"github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
 )
@@ -61,21 +60,6 @@ func (du *DocumentUpdate) SetGroup(g *Group) *DocumentUpdate {
 	return du.SetGroupID(g.ID)
 }
 
-// AddDocumentTokenIDs adds the "document_tokens" edge to the DocumentToken entity by IDs.
-func (du *DocumentUpdate) AddDocumentTokenIDs(ids ...uuid.UUID) *DocumentUpdate {
-	du.mutation.AddDocumentTokenIDs(ids...)
-	return du
-}
-
-// AddDocumentTokens adds the "document_tokens" edges to the DocumentToken entity.
-func (du *DocumentUpdate) AddDocumentTokens(d ...*DocumentToken) *DocumentUpdate {
-	ids := make([]uuid.UUID, len(d))
-	for i := range d {
-		ids[i] = d[i].ID
-	}
-	return du.AddDocumentTokenIDs(ids...)
-}
-
 // AddAttachmentIDs adds the "attachments" edge to the Attachment entity by IDs.
 func (du *DocumentUpdate) AddAttachmentIDs(ids ...uuid.UUID) *DocumentUpdate {
 	du.mutation.AddAttachmentIDs(ids...)
@@ -102,27 +86,6 @@ func (du *DocumentUpdate) ClearGroup() *DocumentUpdate {
 	return du
 }
 
-// ClearDocumentTokens clears all "document_tokens" edges to the DocumentToken entity.
-func (du *DocumentUpdate) ClearDocumentTokens() *DocumentUpdate {
-	du.mutation.ClearDocumentTokens()
-	return du
-}
-
-// RemoveDocumentTokenIDs removes the "document_tokens" edge to DocumentToken entities by IDs.
-func (du *DocumentUpdate) RemoveDocumentTokenIDs(ids ...uuid.UUID) *DocumentUpdate {
-	du.mutation.RemoveDocumentTokenIDs(ids...)
-	return du
-}
-
-// RemoveDocumentTokens removes "document_tokens" edges to DocumentToken entities.
-func (du *DocumentUpdate) RemoveDocumentTokens(d ...*DocumentToken) *DocumentUpdate {
-	ids := make([]uuid.UUID, len(d))
-	for i := range d {
-		ids[i] = d[i].ID
-	}
-	return du.RemoveDocumentTokenIDs(ids...)
-}
-
 // ClearAttachments clears all "attachments" edges to the Attachment entity.
 func (du *DocumentUpdate) ClearAttachments() *DocumentUpdate {
 	du.mutation.ClearAttachments()
@@ -293,60 +256,6 @@ func (du *DocumentUpdate) sqlSave(ctx context.Context) (n int, err error) {
 		}
 		_spec.Edges.Add = append(_spec.Edges.Add, edge)
 	}
-	if du.mutation.DocumentTokensCleared() {
-		edge := &sqlgraph.EdgeSpec{
-			Rel:     sqlgraph.O2M,
-			Inverse: false,
-			Table:   document.DocumentTokensTable,
-			Columns: []string{document.DocumentTokensColumn},
-			Bidi:    false,
-			Target: &sqlgraph.EdgeTarget{
-				IDSpec: &sqlgraph.FieldSpec{
-					Type:   field.TypeUUID,
-					Column: documenttoken.FieldID,
-				},
-			},
-		}
-		_spec.Edges.Clear = append(_spec.Edges.Clear, edge)
-	}
-	if nodes := du.mutation.RemovedDocumentTokensIDs(); len(nodes) > 0 && !du.mutation.DocumentTokensCleared() {
-		edge := &sqlgraph.EdgeSpec{
-			Rel:     sqlgraph.O2M,
-			Inverse: false,
-			Table:   document.DocumentTokensTable,
-			Columns: []string{document.DocumentTokensColumn},
-			Bidi:    false,
-			Target: &sqlgraph.EdgeTarget{
-				IDSpec: &sqlgraph.FieldSpec{
-					Type:   field.TypeUUID,
-					Column: documenttoken.FieldID,
-				},
-			},
-		}
-		for _, k := range nodes {
-			edge.Target.Nodes = append(edge.Target.Nodes, k)
-		}
-		_spec.Edges.Clear = append(_spec.Edges.Clear, edge)
-	}
-	if nodes := du.mutation.DocumentTokensIDs(); len(nodes) > 0 {
-		edge := &sqlgraph.EdgeSpec{
-			Rel:     sqlgraph.O2M,
-			Inverse: false,
-			Table:   document.DocumentTokensTable,
-			Columns: []string{document.DocumentTokensColumn},
-			Bidi:    false,
-			Target: &sqlgraph.EdgeTarget{
-				IDSpec: &sqlgraph.FieldSpec{
-					Type:   field.TypeUUID,
-					Column: documenttoken.FieldID,
-				},
-			},
-		}
-		for _, k := range nodes {
-			edge.Target.Nodes = append(edge.Target.Nodes, k)
-		}
-		_spec.Edges.Add = append(_spec.Edges.Add, edge)
-	}
 	if du.mutation.AttachmentsCleared() {
 		edge := &sqlgraph.EdgeSpec{
 			Rel:     sqlgraph.O2M,
@@ -449,21 +358,6 @@ func (duo *DocumentUpdateOne) SetGroup(g *Group) *DocumentUpdateOne {
 	return duo.SetGroupID(g.ID)
 }
 
-// AddDocumentTokenIDs adds the "document_tokens" edge to the DocumentToken entity by IDs.
-func (duo *DocumentUpdateOne) AddDocumentTokenIDs(ids ...uuid.UUID) *DocumentUpdateOne {
-	duo.mutation.AddDocumentTokenIDs(ids...)
-	return duo
-}
-
-// AddDocumentTokens adds the "document_tokens" edges to the DocumentToken entity.
-func (duo *DocumentUpdateOne) AddDocumentTokens(d ...*DocumentToken) *DocumentUpdateOne {
-	ids := make([]uuid.UUID, len(d))
-	for i := range d {
-		ids[i] = d[i].ID
-	}
-	return duo.AddDocumentTokenIDs(ids...)
-}
-
 // AddAttachmentIDs adds the "attachments" edge to the Attachment entity by IDs.
 func (duo *DocumentUpdateOne) AddAttachmentIDs(ids ...uuid.UUID) *DocumentUpdateOne {
 	duo.mutation.AddAttachmentIDs(ids...)
@@ -490,27 +384,6 @@ func (duo *DocumentUpdateOne) ClearGroup() *DocumentUpdateOne {
 	return duo
 }
 
-// ClearDocumentTokens clears all "document_tokens" edges to the DocumentToken entity.
-func (duo *DocumentUpdateOne) ClearDocumentTokens() *DocumentUpdateOne {
-	duo.mutation.ClearDocumentTokens()
-	return duo
-}
-
-// RemoveDocumentTokenIDs removes the "document_tokens" edge to DocumentToken entities by IDs.
-func (duo *DocumentUpdateOne) RemoveDocumentTokenIDs(ids ...uuid.UUID) *DocumentUpdateOne {
-	duo.mutation.RemoveDocumentTokenIDs(ids...)
-	return duo
-}
-
-// RemoveDocumentTokens removes "document_tokens" edges to DocumentToken entities.
-func (duo *DocumentUpdateOne) RemoveDocumentTokens(d ...*DocumentToken) *DocumentUpdateOne {
-	ids := make([]uuid.UUID, len(d))
-	for i := range d {
-		ids[i] = d[i].ID
-	}
-	return duo.RemoveDocumentTokenIDs(ids...)
-}
-
 // ClearAttachments clears all "attachments" edges to the Attachment entity.
 func (duo *DocumentUpdateOne) ClearAttachments() *DocumentUpdateOne {
 	duo.mutation.ClearAttachments()
@@ -711,60 +584,6 @@ func (duo *DocumentUpdateOne) sqlSave(ctx context.Context) (_node *Document, err
 		}
 		_spec.Edges.Add = append(_spec.Edges.Add, edge)
 	}
-	if duo.mutation.DocumentTokensCleared() {
-		edge := &sqlgraph.EdgeSpec{
-			Rel:     sqlgraph.O2M,
-			Inverse: false,
-			Table:   document.DocumentTokensTable,
-			Columns: []string{document.DocumentTokensColumn},
-			Bidi:    false,
-			Target: &sqlgraph.EdgeTarget{
-				IDSpec: &sqlgraph.FieldSpec{
-					Type:   field.TypeUUID,
-					Column: documenttoken.FieldID,
-				},
-			},
-		}
-		_spec.Edges.Clear = append(_spec.Edges.Clear, edge)
-	}
-	if nodes := duo.mutation.RemovedDocumentTokensIDs(); len(nodes) > 0 && !duo.mutation.DocumentTokensCleared() {
-		edge := &sqlgraph.EdgeSpec{
-			Rel:     sqlgraph.O2M,
-			Inverse: false,
-			Table:   document.DocumentTokensTable,
-			Columns: []string{document.DocumentTokensColumn},
-			Bidi:    false,
-			Target: &sqlgraph.EdgeTarget{
-				IDSpec: &sqlgraph.FieldSpec{
-					Type:   field.TypeUUID,
-					Column: documenttoken.FieldID,
-				},
-			},
-		}
-		for _, k := range nodes {
-			edge.Target.Nodes = append(edge.Target.Nodes, k)
-		}
-		_spec.Edges.Clear = append(_spec.Edges.Clear, edge)
-	}
-	if nodes := duo.mutation.DocumentTokensIDs(); len(nodes) > 0 {
-		edge := &sqlgraph.EdgeSpec{
-			Rel:     sqlgraph.O2M,
-			Inverse: false,
-			Table:   document.DocumentTokensTable,
-			Columns: []string{document.DocumentTokensColumn},
-			Bidi:    false,
-			Target: &sqlgraph.EdgeTarget{
-				IDSpec: &sqlgraph.FieldSpec{
-					Type:   field.TypeUUID,
-					Column: documenttoken.FieldID,
-				},
-			},
-		}
-		for _, k := range nodes {
-			edge.Target.Nodes = append(edge.Target.Nodes, k)
-		}
-		_spec.Edges.Add = append(_spec.Edges.Add, edge)
-	}
 	if duo.mutation.AttachmentsCleared() {
 		edge := &sqlgraph.EdgeSpec{
 			Rel:     sqlgraph.O2M,
diff --git a/backend/internal/data/ent/documenttoken.go b/backend/internal/data/ent/documenttoken.go
deleted file mode 100644
index c484a9e..0000000
--- a/backend/internal/data/ent/documenttoken.go
+++ /dev/null
@@ -1,190 +0,0 @@
-// Code generated by ent, DO NOT EDIT.
-
-package ent
-
-import (
-	"fmt"
-	"strings"
-	"time"
-
-	"entgo.io/ent/dialect/sql"
-	"github.com/google/uuid"
-	"github.com/hay-kot/homebox/backend/internal/data/ent/document"
-	"github.com/hay-kot/homebox/backend/internal/data/ent/documenttoken"
-)
-
-// DocumentToken is the model entity for the DocumentToken schema.
-type DocumentToken struct {
-	config `json:"-"`
-	// ID of the ent.
-	ID uuid.UUID `json:"id,omitempty"`
-	// CreatedAt holds the value of the "created_at" field.
-	CreatedAt time.Time `json:"created_at,omitempty"`
-	// UpdatedAt holds the value of the "updated_at" field.
-	UpdatedAt time.Time `json:"updated_at,omitempty"`
-	// Token holds the value of the "token" field.
-	Token []byte `json:"token,omitempty"`
-	// Uses holds the value of the "uses" field.
-	Uses int `json:"uses,omitempty"`
-	// ExpiresAt holds the value of the "expires_at" field.
-	ExpiresAt time.Time `json:"expires_at,omitempty"`
-	// Edges holds the relations/edges for other nodes in the graph.
-	// The values are being populated by the DocumentTokenQuery when eager-loading is set.
-	Edges                    DocumentTokenEdges `json:"edges"`
-	document_document_tokens *uuid.UUID
-}
-
-// DocumentTokenEdges holds the relations/edges for other nodes in the graph.
-type DocumentTokenEdges struct {
-	// Document holds the value of the document edge.
-	Document *Document `json:"document,omitempty"`
-	// loadedTypes holds the information for reporting if a
-	// type was loaded (or requested) in eager-loading or not.
-	loadedTypes [1]bool
-}
-
-// DocumentOrErr returns the Document value or an error if the edge
-// was not loaded in eager-loading, or loaded but was not found.
-func (e DocumentTokenEdges) DocumentOrErr() (*Document, error) {
-	if e.loadedTypes[0] {
-		if e.Document == nil {
-			// Edge was loaded but was not found.
-			return nil, &NotFoundError{label: document.Label}
-		}
-		return e.Document, nil
-	}
-	return nil, &NotLoadedError{edge: "document"}
-}
-
-// scanValues returns the types for scanning values from sql.Rows.
-func (*DocumentToken) scanValues(columns []string) ([]any, error) {
-	values := make([]any, len(columns))
-	for i := range columns {
-		switch columns[i] {
-		case documenttoken.FieldToken:
-			values[i] = new([]byte)
-		case documenttoken.FieldUses:
-			values[i] = new(sql.NullInt64)
-		case documenttoken.FieldCreatedAt, documenttoken.FieldUpdatedAt, documenttoken.FieldExpiresAt:
-			values[i] = new(sql.NullTime)
-		case documenttoken.FieldID:
-			values[i] = new(uuid.UUID)
-		case documenttoken.ForeignKeys[0]: // document_document_tokens
-			values[i] = &sql.NullScanner{S: new(uuid.UUID)}
-		default:
-			return nil, fmt.Errorf("unexpected column %q for type DocumentToken", columns[i])
-		}
-	}
-	return values, nil
-}
-
-// assignValues assigns the values that were returned from sql.Rows (after scanning)
-// to the DocumentToken fields.
-func (dt *DocumentToken) assignValues(columns []string, values []any) error {
-	if m, n := len(values), len(columns); m < n {
-		return fmt.Errorf("mismatch number of scan values: %d != %d", m, n)
-	}
-	for i := range columns {
-		switch columns[i] {
-		case documenttoken.FieldID:
-			if value, ok := values[i].(*uuid.UUID); !ok {
-				return fmt.Errorf("unexpected type %T for field id", values[i])
-			} else if value != nil {
-				dt.ID = *value
-			}
-		case documenttoken.FieldCreatedAt:
-			if value, ok := values[i].(*sql.NullTime); !ok {
-				return fmt.Errorf("unexpected type %T for field created_at", values[i])
-			} else if value.Valid {
-				dt.CreatedAt = value.Time
-			}
-		case documenttoken.FieldUpdatedAt:
-			if value, ok := values[i].(*sql.NullTime); !ok {
-				return fmt.Errorf("unexpected type %T for field updated_at", values[i])
-			} else if value.Valid {
-				dt.UpdatedAt = value.Time
-			}
-		case documenttoken.FieldToken:
-			if value, ok := values[i].(*[]byte); !ok {
-				return fmt.Errorf("unexpected type %T for field token", values[i])
-			} else if value != nil {
-				dt.Token = *value
-			}
-		case documenttoken.FieldUses:
-			if value, ok := values[i].(*sql.NullInt64); !ok {
-				return fmt.Errorf("unexpected type %T for field uses", values[i])
-			} else if value.Valid {
-				dt.Uses = int(value.Int64)
-			}
-		case documenttoken.FieldExpiresAt:
-			if value, ok := values[i].(*sql.NullTime); !ok {
-				return fmt.Errorf("unexpected type %T for field expires_at", values[i])
-			} else if value.Valid {
-				dt.ExpiresAt = value.Time
-			}
-		case documenttoken.ForeignKeys[0]:
-			if value, ok := values[i].(*sql.NullScanner); !ok {
-				return fmt.Errorf("unexpected type %T for field document_document_tokens", values[i])
-			} else if value.Valid {
-				dt.document_document_tokens = new(uuid.UUID)
-				*dt.document_document_tokens = *value.S.(*uuid.UUID)
-			}
-		}
-	}
-	return nil
-}
-
-// QueryDocument queries the "document" edge of the DocumentToken entity.
-func (dt *DocumentToken) QueryDocument() *DocumentQuery {
-	return (&DocumentTokenClient{config: dt.config}).QueryDocument(dt)
-}
-
-// Update returns a builder for updating this DocumentToken.
-// Note that you need to call DocumentToken.Unwrap() before calling this method if this DocumentToken
-// was returned from a transaction, and the transaction was committed or rolled back.
-func (dt *DocumentToken) Update() *DocumentTokenUpdateOne {
-	return (&DocumentTokenClient{config: dt.config}).UpdateOne(dt)
-}
-
-// Unwrap unwraps the DocumentToken entity that was returned from a transaction after it was closed,
-// so that all future queries will be executed through the driver which created the transaction.
-func (dt *DocumentToken) Unwrap() *DocumentToken {
-	_tx, ok := dt.config.driver.(*txDriver)
-	if !ok {
-		panic("ent: DocumentToken is not a transactional entity")
-	}
-	dt.config.driver = _tx.drv
-	return dt
-}
-
-// String implements the fmt.Stringer.
-func (dt *DocumentToken) String() string {
-	var builder strings.Builder
-	builder.WriteString("DocumentToken(")
-	builder.WriteString(fmt.Sprintf("id=%v, ", dt.ID))
-	builder.WriteString("created_at=")
-	builder.WriteString(dt.CreatedAt.Format(time.ANSIC))
-	builder.WriteString(", ")
-	builder.WriteString("updated_at=")
-	builder.WriteString(dt.UpdatedAt.Format(time.ANSIC))
-	builder.WriteString(", ")
-	builder.WriteString("token=")
-	builder.WriteString(fmt.Sprintf("%v", dt.Token))
-	builder.WriteString(", ")
-	builder.WriteString("uses=")
-	builder.WriteString(fmt.Sprintf("%v", dt.Uses))
-	builder.WriteString(", ")
-	builder.WriteString("expires_at=")
-	builder.WriteString(dt.ExpiresAt.Format(time.ANSIC))
-	builder.WriteByte(')')
-	return builder.String()
-}
-
-// DocumentTokens is a parsable slice of DocumentToken.
-type DocumentTokens []*DocumentToken
-
-func (dt DocumentTokens) config(cfg config) {
-	for _i := range dt {
-		dt[_i].config = cfg
-	}
-}
diff --git a/backend/internal/data/ent/documenttoken/documenttoken.go b/backend/internal/data/ent/documenttoken/documenttoken.go
deleted file mode 100644
index ce05656..0000000
--- a/backend/internal/data/ent/documenttoken/documenttoken.go
+++ /dev/null
@@ -1,85 +0,0 @@
-// Code generated by ent, DO NOT EDIT.
-
-package documenttoken
-
-import (
-	"time"
-
-	"github.com/google/uuid"
-)
-
-const (
-	// Label holds the string label denoting the documenttoken type in the database.
-	Label = "document_token"
-	// FieldID holds the string denoting the id field in the database.
-	FieldID = "id"
-	// FieldCreatedAt holds the string denoting the created_at field in the database.
-	FieldCreatedAt = "created_at"
-	// FieldUpdatedAt holds the string denoting the updated_at field in the database.
-	FieldUpdatedAt = "updated_at"
-	// FieldToken holds the string denoting the token field in the database.
-	FieldToken = "token"
-	// FieldUses holds the string denoting the uses field in the database.
-	FieldUses = "uses"
-	// FieldExpiresAt holds the string denoting the expires_at field in the database.
-	FieldExpiresAt = "expires_at"
-	// EdgeDocument holds the string denoting the document edge name in mutations.
-	EdgeDocument = "document"
-	// Table holds the table name of the documenttoken in the database.
-	Table = "document_tokens"
-	// DocumentTable is the table that holds the document relation/edge.
-	DocumentTable = "document_tokens"
-	// DocumentInverseTable is the table name for the Document entity.
-	// It exists in this package in order to avoid circular dependency with the "document" package.
-	DocumentInverseTable = "documents"
-	// DocumentColumn is the table column denoting the document relation/edge.
-	DocumentColumn = "document_document_tokens"
-)
-
-// Columns holds all SQL columns for documenttoken fields.
-var Columns = []string{
-	FieldID,
-	FieldCreatedAt,
-	FieldUpdatedAt,
-	FieldToken,
-	FieldUses,
-	FieldExpiresAt,
-}
-
-// ForeignKeys holds the SQL foreign-keys that are owned by the "document_tokens"
-// table and are not defined as standalone fields in the schema.
-var ForeignKeys = []string{
-	"document_document_tokens",
-}
-
-// ValidColumn reports if the column name is valid (part of the table columns).
-func ValidColumn(column string) bool {
-	for i := range Columns {
-		if column == Columns[i] {
-			return true
-		}
-	}
-	for i := range ForeignKeys {
-		if column == ForeignKeys[i] {
-			return true
-		}
-	}
-	return false
-}
-
-var (
-	// DefaultCreatedAt holds the default value on creation for the "created_at" field.
-	DefaultCreatedAt func() time.Time
-	// DefaultUpdatedAt holds the default value on creation for the "updated_at" field.
-	DefaultUpdatedAt func() time.Time
-	// UpdateDefaultUpdatedAt holds the default value on update for the "updated_at" field.
-	UpdateDefaultUpdatedAt func() time.Time
-	// TokenValidator is a validator for the "token" field. It is called by the builders before save.
-	TokenValidator func([]byte) error
-	// DefaultUses holds the default value on creation for the "uses" field.
-	DefaultUses int
-	// DefaultExpiresAt holds the default value on creation for the "expires_at" field.
-	DefaultExpiresAt func() time.Time
-	// DefaultID holds the default value on creation for the "id" field.
-	DefaultID func() uuid.UUID
-)
diff --git a/backend/internal/data/ent/documenttoken/where.go b/backend/internal/data/ent/documenttoken/where.go
deleted file mode 100644
index 32dbb39..0000000
--- a/backend/internal/data/ent/documenttoken/where.go
+++ /dev/null
@@ -1,498 +0,0 @@
-// Code generated by ent, DO NOT EDIT.
-
-package documenttoken
-
-import (
-	"time"
-
-	"entgo.io/ent/dialect/sql"
-	"entgo.io/ent/dialect/sql/sqlgraph"
-	"github.com/google/uuid"
-	"github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
-)
-
-// ID filters vertices based on their ID field.
-func ID(id uuid.UUID) predicate.DocumentToken {
-	return predicate.DocumentToken(func(s *sql.Selector) {
-		s.Where(sql.EQ(s.C(FieldID), id))
-	})
-}
-
-// IDEQ applies the EQ predicate on the ID field.
-func IDEQ(id uuid.UUID) predicate.DocumentToken {
-	return predicate.DocumentToken(func(s *sql.Selector) {
-		s.Where(sql.EQ(s.C(FieldID), id))
-	})
-}
-
-// IDNEQ applies the NEQ predicate on the ID field.
-func IDNEQ(id uuid.UUID) predicate.DocumentToken {
-	return predicate.DocumentToken(func(s *sql.Selector) {
-		s.Where(sql.NEQ(s.C(FieldID), id))
-	})
-}
-
-// IDIn applies the In predicate on the ID field.
-func IDIn(ids ...uuid.UUID) predicate.DocumentToken {
-	return predicate.DocumentToken(func(s *sql.Selector) {
-		v := make([]any, len(ids))
-		for i := range v {
-			v[i] = ids[i]
-		}
-		s.Where(sql.In(s.C(FieldID), v...))
-	})
-}
-
-// IDNotIn applies the NotIn predicate on the ID field.
-func IDNotIn(ids ...uuid.UUID) predicate.DocumentToken {
-	return predicate.DocumentToken(func(s *sql.Selector) {
-		v := make([]any, len(ids))
-		for i := range v {
-			v[i] = ids[i]
-		}
-		s.Where(sql.NotIn(s.C(FieldID), v...))
-	})
-}
-
-// IDGT applies the GT predicate on the ID field.
-func IDGT(id uuid.UUID) predicate.DocumentToken {
-	return predicate.DocumentToken(func(s *sql.Selector) {
-		s.Where(sql.GT(s.C(FieldID), id))
-	})
-}
-
-// IDGTE applies the GTE predicate on the ID field.
-func IDGTE(id uuid.UUID) predicate.DocumentToken {
-	return predicate.DocumentToken(func(s *sql.Selector) {
-		s.Where(sql.GTE(s.C(FieldID), id))
-	})
-}
-
-// IDLT applies the LT predicate on the ID field.
-func IDLT(id uuid.UUID) predicate.DocumentToken {
-	return predicate.DocumentToken(func(s *sql.Selector) {
-		s.Where(sql.LT(s.C(FieldID), id))
-	})
-}
-
-// IDLTE applies the LTE predicate on the ID field.
-func IDLTE(id uuid.UUID) predicate.DocumentToken {
-	return predicate.DocumentToken(func(s *sql.Selector) {
-		s.Where(sql.LTE(s.C(FieldID), id))
-	})
-}
-
-// CreatedAt applies equality check predicate on the "created_at" field. It's identical to CreatedAtEQ.
-func CreatedAt(v time.Time) predicate.DocumentToken {
-	return predicate.DocumentToken(func(s *sql.Selector) {
-		s.Where(sql.EQ(s.C(FieldCreatedAt), v))
-	})
-}
-
-// UpdatedAt applies equality check predicate on the "updated_at" field. It's identical to UpdatedAtEQ.
-func UpdatedAt(v time.Time) predicate.DocumentToken {
-	return predicate.DocumentToken(func(s *sql.Selector) {
-		s.Where(sql.EQ(s.C(FieldUpdatedAt), v))
-	})
-}
-
-// Token applies equality check predicate on the "token" field. It's identical to TokenEQ.
-func Token(v []byte) predicate.DocumentToken {
-	return predicate.DocumentToken(func(s *sql.Selector) {
-		s.Where(sql.EQ(s.C(FieldToken), v))
-	})
-}
-
-// Uses applies equality check predicate on the "uses" field. It's identical to UsesEQ.
-func Uses(v int) predicate.DocumentToken {
-	return predicate.DocumentToken(func(s *sql.Selector) {
-		s.Where(sql.EQ(s.C(FieldUses), v))
-	})
-}
-
-// ExpiresAt applies equality check predicate on the "expires_at" field. It's identical to ExpiresAtEQ.
-func ExpiresAt(v time.Time) predicate.DocumentToken {
-	return predicate.DocumentToken(func(s *sql.Selector) {
-		s.Where(sql.EQ(s.C(FieldExpiresAt), v))
-	})
-}
-
-// CreatedAtEQ applies the EQ predicate on the "created_at" field.
-func CreatedAtEQ(v time.Time) predicate.DocumentToken {
-	return predicate.DocumentToken(func(s *sql.Selector) {
-		s.Where(sql.EQ(s.C(FieldCreatedAt), v))
-	})
-}
-
-// CreatedAtNEQ applies the NEQ predicate on the "created_at" field.
-func CreatedAtNEQ(v time.Time) predicate.DocumentToken {
-	return predicate.DocumentToken(func(s *sql.Selector) {
-		s.Where(sql.NEQ(s.C(FieldCreatedAt), v))
-	})
-}
-
-// CreatedAtIn applies the In predicate on the "created_at" field.
-func CreatedAtIn(vs ...time.Time) predicate.DocumentToken {
-	v := make([]any, len(vs))
-	for i := range v {
-		v[i] = vs[i]
-	}
-	return predicate.DocumentToken(func(s *sql.Selector) {
-		s.Where(sql.In(s.C(FieldCreatedAt), v...))
-	})
-}
-
-// CreatedAtNotIn applies the NotIn predicate on the "created_at" field.
-func CreatedAtNotIn(vs ...time.Time) predicate.DocumentToken {
-	v := make([]any, len(vs))
-	for i := range v {
-		v[i] = vs[i]
-	}
-	return predicate.DocumentToken(func(s *sql.Selector) {
-		s.Where(sql.NotIn(s.C(FieldCreatedAt), v...))
-	})
-}
-
-// CreatedAtGT applies the GT predicate on the "created_at" field.
-func CreatedAtGT(v time.Time) predicate.DocumentToken {
-	return predicate.DocumentToken(func(s *sql.Selector) {
-		s.Where(sql.GT(s.C(FieldCreatedAt), v))
-	})
-}
-
-// CreatedAtGTE applies the GTE predicate on the "created_at" field.
-func CreatedAtGTE(v time.Time) predicate.DocumentToken {
-	return predicate.DocumentToken(func(s *sql.Selector) {
-		s.Where(sql.GTE(s.C(FieldCreatedAt), v))
-	})
-}
-
-// CreatedAtLT applies the LT predicate on the "created_at" field.
-func CreatedAtLT(v time.Time) predicate.DocumentToken {
-	return predicate.DocumentToken(func(s *sql.Selector) {
-		s.Where(sql.LT(s.C(FieldCreatedAt), v))
-	})
-}
-
-// CreatedAtLTE applies the LTE predicate on the "created_at" field.
-func CreatedAtLTE(v time.Time) predicate.DocumentToken {
-	return predicate.DocumentToken(func(s *sql.Selector) {
-		s.Where(sql.LTE(s.C(FieldCreatedAt), v))
-	})
-}
-
-// UpdatedAtEQ applies the EQ predicate on the "updated_at" field.
-func UpdatedAtEQ(v time.Time) predicate.DocumentToken {
-	return predicate.DocumentToken(func(s *sql.Selector) {
-		s.Where(sql.EQ(s.C(FieldUpdatedAt), v))
-	})
-}
-
-// UpdatedAtNEQ applies the NEQ predicate on the "updated_at" field.
-func UpdatedAtNEQ(v time.Time) predicate.DocumentToken {
-	return predicate.DocumentToken(func(s *sql.Selector) {
-		s.Where(sql.NEQ(s.C(FieldUpdatedAt), v))
-	})
-}
-
-// UpdatedAtIn applies the In predicate on the "updated_at" field.
-func UpdatedAtIn(vs ...time.Time) predicate.DocumentToken {
-	v := make([]any, len(vs))
-	for i := range v {
-		v[i] = vs[i]
-	}
-	return predicate.DocumentToken(func(s *sql.Selector) {
-		s.Where(sql.In(s.C(FieldUpdatedAt), v...))
-	})
-}
-
-// UpdatedAtNotIn applies the NotIn predicate on the "updated_at" field.
-func UpdatedAtNotIn(vs ...time.Time) predicate.DocumentToken {
-	v := make([]any, len(vs))
-	for i := range v {
-		v[i] = vs[i]
-	}
-	return predicate.DocumentToken(func(s *sql.Selector) {
-		s.Where(sql.NotIn(s.C(FieldUpdatedAt), v...))
-	})
-}
-
-// UpdatedAtGT applies the GT predicate on the "updated_at" field.
-func UpdatedAtGT(v time.Time) predicate.DocumentToken {
-	return predicate.DocumentToken(func(s *sql.Selector) {
-		s.Where(sql.GT(s.C(FieldUpdatedAt), v))
-	})
-}
-
-// UpdatedAtGTE applies the GTE predicate on the "updated_at" field.
-func UpdatedAtGTE(v time.Time) predicate.DocumentToken {
-	return predicate.DocumentToken(func(s *sql.Selector) {
-		s.Where(sql.GTE(s.C(FieldUpdatedAt), v))
-	})
-}
-
-// UpdatedAtLT applies the LT predicate on the "updated_at" field.
-func UpdatedAtLT(v time.Time) predicate.DocumentToken {
-	return predicate.DocumentToken(func(s *sql.Selector) {
-		s.Where(sql.LT(s.C(FieldUpdatedAt), v))
-	})
-}
-
-// UpdatedAtLTE applies the LTE predicate on the "updated_at" field.
-func UpdatedAtLTE(v time.Time) predicate.DocumentToken {
-	return predicate.DocumentToken(func(s *sql.Selector) {
-		s.Where(sql.LTE(s.C(FieldUpdatedAt), v))
-	})
-}
-
-// TokenEQ applies the EQ predicate on the "token" field.
-func TokenEQ(v []byte) predicate.DocumentToken {
-	return predicate.DocumentToken(func(s *sql.Selector) {
-		s.Where(sql.EQ(s.C(FieldToken), v))
-	})
-}
-
-// TokenNEQ applies the NEQ predicate on the "token" field.
-func TokenNEQ(v []byte) predicate.DocumentToken {
-	return predicate.DocumentToken(func(s *sql.Selector) {
-		s.Where(sql.NEQ(s.C(FieldToken), v))
-	})
-}
-
-// TokenIn applies the In predicate on the "token" field.
-func TokenIn(vs ...[]byte) predicate.DocumentToken {
-	v := make([]any, len(vs))
-	for i := range v {
-		v[i] = vs[i]
-	}
-	return predicate.DocumentToken(func(s *sql.Selector) {
-		s.Where(sql.In(s.C(FieldToken), v...))
-	})
-}
-
-// TokenNotIn applies the NotIn predicate on the "token" field.
-func TokenNotIn(vs ...[]byte) predicate.DocumentToken {
-	v := make([]any, len(vs))
-	for i := range v {
-		v[i] = vs[i]
-	}
-	return predicate.DocumentToken(func(s *sql.Selector) {
-		s.Where(sql.NotIn(s.C(FieldToken), v...))
-	})
-}
-
-// TokenGT applies the GT predicate on the "token" field.
-func TokenGT(v []byte) predicate.DocumentToken {
-	return predicate.DocumentToken(func(s *sql.Selector) {
-		s.Where(sql.GT(s.C(FieldToken), v))
-	})
-}
-
-// TokenGTE applies the GTE predicate on the "token" field.
-func TokenGTE(v []byte) predicate.DocumentToken {
-	return predicate.DocumentToken(func(s *sql.Selector) {
-		s.Where(sql.GTE(s.C(FieldToken), v))
-	})
-}
-
-// TokenLT applies the LT predicate on the "token" field.
-func TokenLT(v []byte) predicate.DocumentToken {
-	return predicate.DocumentToken(func(s *sql.Selector) {
-		s.Where(sql.LT(s.C(FieldToken), v))
-	})
-}
-
-// TokenLTE applies the LTE predicate on the "token" field.
-func TokenLTE(v []byte) predicate.DocumentToken {
-	return predicate.DocumentToken(func(s *sql.Selector) {
-		s.Where(sql.LTE(s.C(FieldToken), v))
-	})
-}
-
-// UsesEQ applies the EQ predicate on the "uses" field.
-func UsesEQ(v int) predicate.DocumentToken {
-	return predicate.DocumentToken(func(s *sql.Selector) {
-		s.Where(sql.EQ(s.C(FieldUses), v))
-	})
-}
-
-// UsesNEQ applies the NEQ predicate on the "uses" field.
-func UsesNEQ(v int) predicate.DocumentToken {
-	return predicate.DocumentToken(func(s *sql.Selector) {
-		s.Where(sql.NEQ(s.C(FieldUses), v))
-	})
-}
-
-// UsesIn applies the In predicate on the "uses" field.
-func UsesIn(vs ...int) predicate.DocumentToken {
-	v := make([]any, len(vs))
-	for i := range v {
-		v[i] = vs[i]
-	}
-	return predicate.DocumentToken(func(s *sql.Selector) {
-		s.Where(sql.In(s.C(FieldUses), v...))
-	})
-}
-
-// UsesNotIn applies the NotIn predicate on the "uses" field.
-func UsesNotIn(vs ...int) predicate.DocumentToken {
-	v := make([]any, len(vs))
-	for i := range v {
-		v[i] = vs[i]
-	}
-	return predicate.DocumentToken(func(s *sql.Selector) {
-		s.Where(sql.NotIn(s.C(FieldUses), v...))
-	})
-}
-
-// UsesGT applies the GT predicate on the "uses" field.
-func UsesGT(v int) predicate.DocumentToken {
-	return predicate.DocumentToken(func(s *sql.Selector) {
-		s.Where(sql.GT(s.C(FieldUses), v))
-	})
-}
-
-// UsesGTE applies the GTE predicate on the "uses" field.
-func UsesGTE(v int) predicate.DocumentToken {
-	return predicate.DocumentToken(func(s *sql.Selector) {
-		s.Where(sql.GTE(s.C(FieldUses), v))
-	})
-}
-
-// UsesLT applies the LT predicate on the "uses" field.
-func UsesLT(v int) predicate.DocumentToken {
-	return predicate.DocumentToken(func(s *sql.Selector) {
-		s.Where(sql.LT(s.C(FieldUses), v))
-	})
-}
-
-// UsesLTE applies the LTE predicate on the "uses" field.
-func UsesLTE(v int) predicate.DocumentToken {
-	return predicate.DocumentToken(func(s *sql.Selector) {
-		s.Where(sql.LTE(s.C(FieldUses), v))
-	})
-}
-
-// ExpiresAtEQ applies the EQ predicate on the "expires_at" field.
-func ExpiresAtEQ(v time.Time) predicate.DocumentToken {
-	return predicate.DocumentToken(func(s *sql.Selector) {
-		s.Where(sql.EQ(s.C(FieldExpiresAt), v))
-	})
-}
-
-// ExpiresAtNEQ applies the NEQ predicate on the "expires_at" field.
-func ExpiresAtNEQ(v time.Time) predicate.DocumentToken {
-	return predicate.DocumentToken(func(s *sql.Selector) {
-		s.Where(sql.NEQ(s.C(FieldExpiresAt), v))
-	})
-}
-
-// ExpiresAtIn applies the In predicate on the "expires_at" field.
-func ExpiresAtIn(vs ...time.Time) predicate.DocumentToken {
-	v := make([]any, len(vs))
-	for i := range v {
-		v[i] = vs[i]
-	}
-	return predicate.DocumentToken(func(s *sql.Selector) {
-		s.Where(sql.In(s.C(FieldExpiresAt), v...))
-	})
-}
-
-// ExpiresAtNotIn applies the NotIn predicate on the "expires_at" field.
-func ExpiresAtNotIn(vs ...time.Time) predicate.DocumentToken {
-	v := make([]any, len(vs))
-	for i := range v {
-		v[i] = vs[i]
-	}
-	return predicate.DocumentToken(func(s *sql.Selector) {
-		s.Where(sql.NotIn(s.C(FieldExpiresAt), v...))
-	})
-}
-
-// ExpiresAtGT applies the GT predicate on the "expires_at" field.
-func ExpiresAtGT(v time.Time) predicate.DocumentToken {
-	return predicate.DocumentToken(func(s *sql.Selector) {
-		s.Where(sql.GT(s.C(FieldExpiresAt), v))
-	})
-}
-
-// ExpiresAtGTE applies the GTE predicate on the "expires_at" field.
-func ExpiresAtGTE(v time.Time) predicate.DocumentToken {
-	return predicate.DocumentToken(func(s *sql.Selector) {
-		s.Where(sql.GTE(s.C(FieldExpiresAt), v))
-	})
-}
-
-// ExpiresAtLT applies the LT predicate on the "expires_at" field.
-func ExpiresAtLT(v time.Time) predicate.DocumentToken {
-	return predicate.DocumentToken(func(s *sql.Selector) {
-		s.Where(sql.LT(s.C(FieldExpiresAt), v))
-	})
-}
-
-// ExpiresAtLTE applies the LTE predicate on the "expires_at" field.
-func ExpiresAtLTE(v time.Time) predicate.DocumentToken {
-	return predicate.DocumentToken(func(s *sql.Selector) {
-		s.Where(sql.LTE(s.C(FieldExpiresAt), v))
-	})
-}
-
-// HasDocument applies the HasEdge predicate on the "document" edge.
-func HasDocument() predicate.DocumentToken {
-	return predicate.DocumentToken(func(s *sql.Selector) {
-		step := sqlgraph.NewStep(
-			sqlgraph.From(Table, FieldID),
-			sqlgraph.To(DocumentTable, FieldID),
-			sqlgraph.Edge(sqlgraph.M2O, true, DocumentTable, DocumentColumn),
-		)
-		sqlgraph.HasNeighbors(s, step)
-	})
-}
-
-// HasDocumentWith applies the HasEdge predicate on the "document" edge with a given conditions (other predicates).
-func HasDocumentWith(preds ...predicate.Document) predicate.DocumentToken {
-	return predicate.DocumentToken(func(s *sql.Selector) {
-		step := sqlgraph.NewStep(
-			sqlgraph.From(Table, FieldID),
-			sqlgraph.To(DocumentInverseTable, FieldID),
-			sqlgraph.Edge(sqlgraph.M2O, true, DocumentTable, DocumentColumn),
-		)
-		sqlgraph.HasNeighborsWith(s, step, func(s *sql.Selector) {
-			for _, p := range preds {
-				p(s)
-			}
-		})
-	})
-}
-
-// And groups predicates with the AND operator between them.
-func And(predicates ...predicate.DocumentToken) predicate.DocumentToken {
-	return predicate.DocumentToken(func(s *sql.Selector) {
-		s1 := s.Clone().SetP(nil)
-		for _, p := range predicates {
-			p(s1)
-		}
-		s.Where(s1.P())
-	})
-}
-
-// Or groups predicates with the OR operator between them.
-func Or(predicates ...predicate.DocumentToken) predicate.DocumentToken {
-	return predicate.DocumentToken(func(s *sql.Selector) {
-		s1 := s.Clone().SetP(nil)
-		for i, p := range predicates {
-			if i > 0 {
-				s1.Or()
-			}
-			p(s1)
-		}
-		s.Where(s1.P())
-	})
-}
-
-// Not applies the not operator on the given predicate.
-func Not(p predicate.DocumentToken) predicate.DocumentToken {
-	return predicate.DocumentToken(func(s *sql.Selector) {
-		p(s.Not())
-	})
-}
diff --git a/backend/internal/data/ent/documenttoken_create.go b/backend/internal/data/ent/documenttoken_create.go
deleted file mode 100644
index c937279..0000000
--- a/backend/internal/data/ent/documenttoken_create.go
+++ /dev/null
@@ -1,398 +0,0 @@
-// Code generated by ent, DO NOT EDIT.
-
-package ent
-
-import (
-	"context"
-	"errors"
-	"fmt"
-	"time"
-
-	"entgo.io/ent/dialect/sql/sqlgraph"
-	"entgo.io/ent/schema/field"
-	"github.com/google/uuid"
-	"github.com/hay-kot/homebox/backend/internal/data/ent/document"
-	"github.com/hay-kot/homebox/backend/internal/data/ent/documenttoken"
-)
-
-// DocumentTokenCreate is the builder for creating a DocumentToken entity.
-type DocumentTokenCreate struct {
-	config
-	mutation *DocumentTokenMutation
-	hooks    []Hook
-}
-
-// SetCreatedAt sets the "created_at" field.
-func (dtc *DocumentTokenCreate) SetCreatedAt(t time.Time) *DocumentTokenCreate {
-	dtc.mutation.SetCreatedAt(t)
-	return dtc
-}
-
-// SetNillableCreatedAt sets the "created_at" field if the given value is not nil.
-func (dtc *DocumentTokenCreate) SetNillableCreatedAt(t *time.Time) *DocumentTokenCreate {
-	if t != nil {
-		dtc.SetCreatedAt(*t)
-	}
-	return dtc
-}
-
-// SetUpdatedAt sets the "updated_at" field.
-func (dtc *DocumentTokenCreate) SetUpdatedAt(t time.Time) *DocumentTokenCreate {
-	dtc.mutation.SetUpdatedAt(t)
-	return dtc
-}
-
-// SetNillableUpdatedAt sets the "updated_at" field if the given value is not nil.
-func (dtc *DocumentTokenCreate) SetNillableUpdatedAt(t *time.Time) *DocumentTokenCreate {
-	if t != nil {
-		dtc.SetUpdatedAt(*t)
-	}
-	return dtc
-}
-
-// SetToken sets the "token" field.
-func (dtc *DocumentTokenCreate) SetToken(b []byte) *DocumentTokenCreate {
-	dtc.mutation.SetToken(b)
-	return dtc
-}
-
-// SetUses sets the "uses" field.
-func (dtc *DocumentTokenCreate) SetUses(i int) *DocumentTokenCreate {
-	dtc.mutation.SetUses(i)
-	return dtc
-}
-
-// SetNillableUses sets the "uses" field if the given value is not nil.
-func (dtc *DocumentTokenCreate) SetNillableUses(i *int) *DocumentTokenCreate {
-	if i != nil {
-		dtc.SetUses(*i)
-	}
-	return dtc
-}
-
-// SetExpiresAt sets the "expires_at" field.
-func (dtc *DocumentTokenCreate) SetExpiresAt(t time.Time) *DocumentTokenCreate {
-	dtc.mutation.SetExpiresAt(t)
-	return dtc
-}
-
-// SetNillableExpiresAt sets the "expires_at" field if the given value is not nil.
-func (dtc *DocumentTokenCreate) SetNillableExpiresAt(t *time.Time) *DocumentTokenCreate {
-	if t != nil {
-		dtc.SetExpiresAt(*t)
-	}
-	return dtc
-}
-
-// SetID sets the "id" field.
-func (dtc *DocumentTokenCreate) SetID(u uuid.UUID) *DocumentTokenCreate {
-	dtc.mutation.SetID(u)
-	return dtc
-}
-
-// SetNillableID sets the "id" field if the given value is not nil.
-func (dtc *DocumentTokenCreate) SetNillableID(u *uuid.UUID) *DocumentTokenCreate {
-	if u != nil {
-		dtc.SetID(*u)
-	}
-	return dtc
-}
-
-// SetDocumentID sets the "document" edge to the Document entity by ID.
-func (dtc *DocumentTokenCreate) SetDocumentID(id uuid.UUID) *DocumentTokenCreate {
-	dtc.mutation.SetDocumentID(id)
-	return dtc
-}
-
-// SetNillableDocumentID sets the "document" edge to the Document entity by ID if the given value is not nil.
-func (dtc *DocumentTokenCreate) SetNillableDocumentID(id *uuid.UUID) *DocumentTokenCreate {
-	if id != nil {
-		dtc = dtc.SetDocumentID(*id)
-	}
-	return dtc
-}
-
-// SetDocument sets the "document" edge to the Document entity.
-func (dtc *DocumentTokenCreate) SetDocument(d *Document) *DocumentTokenCreate {
-	return dtc.SetDocumentID(d.ID)
-}
-
-// Mutation returns the DocumentTokenMutation object of the builder.
-func (dtc *DocumentTokenCreate) Mutation() *DocumentTokenMutation {
-	return dtc.mutation
-}
-
-// Save creates the DocumentToken in the database.
-func (dtc *DocumentTokenCreate) Save(ctx context.Context) (*DocumentToken, error) {
-	var (
-		err  error
-		node *DocumentToken
-	)
-	dtc.defaults()
-	if len(dtc.hooks) == 0 {
-		if err = dtc.check(); err != nil {
-			return nil, err
-		}
-		node, err = dtc.sqlSave(ctx)
-	} else {
-		var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) {
-			mutation, ok := m.(*DocumentTokenMutation)
-			if !ok {
-				return nil, fmt.Errorf("unexpected mutation type %T", m)
-			}
-			if err = dtc.check(); err != nil {
-				return nil, err
-			}
-			dtc.mutation = mutation
-			if node, err = dtc.sqlSave(ctx); err != nil {
-				return nil, err
-			}
-			mutation.id = &node.ID
-			mutation.done = true
-			return node, err
-		})
-		for i := len(dtc.hooks) - 1; i >= 0; i-- {
-			if dtc.hooks[i] == nil {
-				return nil, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)")
-			}
-			mut = dtc.hooks[i](mut)
-		}
-		v, err := mut.Mutate(ctx, dtc.mutation)
-		if err != nil {
-			return nil, err
-		}
-		nv, ok := v.(*DocumentToken)
-		if !ok {
-			return nil, fmt.Errorf("unexpected node type %T returned from DocumentTokenMutation", v)
-		}
-		node = nv
-	}
-	return node, err
-}
-
-// SaveX calls Save and panics if Save returns an error.
-func (dtc *DocumentTokenCreate) SaveX(ctx context.Context) *DocumentToken {
-	v, err := dtc.Save(ctx)
-	if err != nil {
-		panic(err)
-	}
-	return v
-}
-
-// Exec executes the query.
-func (dtc *DocumentTokenCreate) Exec(ctx context.Context) error {
-	_, err := dtc.Save(ctx)
-	return err
-}
-
-// ExecX is like Exec, but panics if an error occurs.
-func (dtc *DocumentTokenCreate) ExecX(ctx context.Context) {
-	if err := dtc.Exec(ctx); err != nil {
-		panic(err)
-	}
-}
-
-// defaults sets the default values of the builder before save.
-func (dtc *DocumentTokenCreate) defaults() {
-	if _, ok := dtc.mutation.CreatedAt(); !ok {
-		v := documenttoken.DefaultCreatedAt()
-		dtc.mutation.SetCreatedAt(v)
-	}
-	if _, ok := dtc.mutation.UpdatedAt(); !ok {
-		v := documenttoken.DefaultUpdatedAt()
-		dtc.mutation.SetUpdatedAt(v)
-	}
-	if _, ok := dtc.mutation.Uses(); !ok {
-		v := documenttoken.DefaultUses
-		dtc.mutation.SetUses(v)
-	}
-	if _, ok := dtc.mutation.ExpiresAt(); !ok {
-		v := documenttoken.DefaultExpiresAt()
-		dtc.mutation.SetExpiresAt(v)
-	}
-	if _, ok := dtc.mutation.ID(); !ok {
-		v := documenttoken.DefaultID()
-		dtc.mutation.SetID(v)
-	}
-}
-
-// check runs all checks and user-defined validators on the builder.
-func (dtc *DocumentTokenCreate) check() error {
-	if _, ok := dtc.mutation.CreatedAt(); !ok {
-		return &ValidationError{Name: "created_at", err: errors.New(`ent: missing required field "DocumentToken.created_at"`)}
-	}
-	if _, ok := dtc.mutation.UpdatedAt(); !ok {
-		return &ValidationError{Name: "updated_at", err: errors.New(`ent: missing required field "DocumentToken.updated_at"`)}
-	}
-	if _, ok := dtc.mutation.Token(); !ok {
-		return &ValidationError{Name: "token", err: errors.New(`ent: missing required field "DocumentToken.token"`)}
-	}
-	if v, ok := dtc.mutation.Token(); ok {
-		if err := documenttoken.TokenValidator(v); err != nil {
-			return &ValidationError{Name: "token", err: fmt.Errorf(`ent: validator failed for field "DocumentToken.token": %w`, err)}
-		}
-	}
-	if _, ok := dtc.mutation.Uses(); !ok {
-		return &ValidationError{Name: "uses", err: errors.New(`ent: missing required field "DocumentToken.uses"`)}
-	}
-	if _, ok := dtc.mutation.ExpiresAt(); !ok {
-		return &ValidationError{Name: "expires_at", err: errors.New(`ent: missing required field "DocumentToken.expires_at"`)}
-	}
-	return nil
-}
-
-func (dtc *DocumentTokenCreate) sqlSave(ctx context.Context) (*DocumentToken, error) {
-	_node, _spec := dtc.createSpec()
-	if err := sqlgraph.CreateNode(ctx, dtc.driver, _spec); err != nil {
-		if sqlgraph.IsConstraintError(err) {
-			err = &ConstraintError{msg: err.Error(), wrap: err}
-		}
-		return nil, err
-	}
-	if _spec.ID.Value != nil {
-		if id, ok := _spec.ID.Value.(*uuid.UUID); ok {
-			_node.ID = *id
-		} else if err := _node.ID.Scan(_spec.ID.Value); err != nil {
-			return nil, err
-		}
-	}
-	return _node, nil
-}
-
-func (dtc *DocumentTokenCreate) createSpec() (*DocumentToken, *sqlgraph.CreateSpec) {
-	var (
-		_node = &DocumentToken{config: dtc.config}
-		_spec = &sqlgraph.CreateSpec{
-			Table: documenttoken.Table,
-			ID: &sqlgraph.FieldSpec{
-				Type:   field.TypeUUID,
-				Column: documenttoken.FieldID,
-			},
-		}
-	)
-	if id, ok := dtc.mutation.ID(); ok {
-		_node.ID = id
-		_spec.ID.Value = &id
-	}
-	if value, ok := dtc.mutation.CreatedAt(); ok {
-		_spec.SetField(documenttoken.FieldCreatedAt, field.TypeTime, value)
-		_node.CreatedAt = value
-	}
-	if value, ok := dtc.mutation.UpdatedAt(); ok {
-		_spec.SetField(documenttoken.FieldUpdatedAt, field.TypeTime, value)
-		_node.UpdatedAt = value
-	}
-	if value, ok := dtc.mutation.Token(); ok {
-		_spec.SetField(documenttoken.FieldToken, field.TypeBytes, value)
-		_node.Token = value
-	}
-	if value, ok := dtc.mutation.Uses(); ok {
-		_spec.SetField(documenttoken.FieldUses, field.TypeInt, value)
-		_node.Uses = value
-	}
-	if value, ok := dtc.mutation.ExpiresAt(); ok {
-		_spec.SetField(documenttoken.FieldExpiresAt, field.TypeTime, value)
-		_node.ExpiresAt = value
-	}
-	if nodes := dtc.mutation.DocumentIDs(); len(nodes) > 0 {
-		edge := &sqlgraph.EdgeSpec{
-			Rel:     sqlgraph.M2O,
-			Inverse: true,
-			Table:   documenttoken.DocumentTable,
-			Columns: []string{documenttoken.DocumentColumn},
-			Bidi:    false,
-			Target: &sqlgraph.EdgeTarget{
-				IDSpec: &sqlgraph.FieldSpec{
-					Type:   field.TypeUUID,
-					Column: document.FieldID,
-				},
-			},
-		}
-		for _, k := range nodes {
-			edge.Target.Nodes = append(edge.Target.Nodes, k)
-		}
-		_node.document_document_tokens = &nodes[0]
-		_spec.Edges = append(_spec.Edges, edge)
-	}
-	return _node, _spec
-}
-
-// DocumentTokenCreateBulk is the builder for creating many DocumentToken entities in bulk.
-type DocumentTokenCreateBulk struct {
-	config
-	builders []*DocumentTokenCreate
-}
-
-// Save creates the DocumentToken entities in the database.
-func (dtcb *DocumentTokenCreateBulk) Save(ctx context.Context) ([]*DocumentToken, error) {
-	specs := make([]*sqlgraph.CreateSpec, len(dtcb.builders))
-	nodes := make([]*DocumentToken, len(dtcb.builders))
-	mutators := make([]Mutator, len(dtcb.builders))
-	for i := range dtcb.builders {
-		func(i int, root context.Context) {
-			builder := dtcb.builders[i]
-			builder.defaults()
-			var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) {
-				mutation, ok := m.(*DocumentTokenMutation)
-				if !ok {
-					return nil, fmt.Errorf("unexpected mutation type %T", m)
-				}
-				if err := builder.check(); err != nil {
-					return nil, err
-				}
-				builder.mutation = mutation
-				nodes[i], specs[i] = builder.createSpec()
-				var err error
-				if i < len(mutators)-1 {
-					_, err = mutators[i+1].Mutate(root, dtcb.builders[i+1].mutation)
-				} else {
-					spec := &sqlgraph.BatchCreateSpec{Nodes: specs}
-					// Invoke the actual operation on the latest mutation in the chain.
-					if err = sqlgraph.BatchCreate(ctx, dtcb.driver, spec); err != nil {
-						if sqlgraph.IsConstraintError(err) {
-							err = &ConstraintError{msg: err.Error(), wrap: err}
-						}
-					}
-				}
-				if err != nil {
-					return nil, err
-				}
-				mutation.id = &nodes[i].ID
-				mutation.done = true
-				return nodes[i], nil
-			})
-			for i := len(builder.hooks) - 1; i >= 0; i-- {
-				mut = builder.hooks[i](mut)
-			}
-			mutators[i] = mut
-		}(i, ctx)
-	}
-	if len(mutators) > 0 {
-		if _, err := mutators[0].Mutate(ctx, dtcb.builders[0].mutation); err != nil {
-			return nil, err
-		}
-	}
-	return nodes, nil
-}
-
-// SaveX is like Save, but panics if an error occurs.
-func (dtcb *DocumentTokenCreateBulk) SaveX(ctx context.Context) []*DocumentToken {
-	v, err := dtcb.Save(ctx)
-	if err != nil {
-		panic(err)
-	}
-	return v
-}
-
-// Exec executes the query.
-func (dtcb *DocumentTokenCreateBulk) Exec(ctx context.Context) error {
-	_, err := dtcb.Save(ctx)
-	return err
-}
-
-// ExecX is like Exec, but panics if an error occurs.
-func (dtcb *DocumentTokenCreateBulk) ExecX(ctx context.Context) {
-	if err := dtcb.Exec(ctx); err != nil {
-		panic(err)
-	}
-}
diff --git a/backend/internal/data/ent/documenttoken_delete.go b/backend/internal/data/ent/documenttoken_delete.go
deleted file mode 100644
index 722ec1b..0000000
--- a/backend/internal/data/ent/documenttoken_delete.go
+++ /dev/null
@@ -1,115 +0,0 @@
-// Code generated by ent, DO NOT EDIT.
-
-package ent
-
-import (
-	"context"
-	"fmt"
-
-	"entgo.io/ent/dialect/sql"
-	"entgo.io/ent/dialect/sql/sqlgraph"
-	"entgo.io/ent/schema/field"
-	"github.com/hay-kot/homebox/backend/internal/data/ent/documenttoken"
-	"github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
-)
-
-// DocumentTokenDelete is the builder for deleting a DocumentToken entity.
-type DocumentTokenDelete struct {
-	config
-	hooks    []Hook
-	mutation *DocumentTokenMutation
-}
-
-// Where appends a list predicates to the DocumentTokenDelete builder.
-func (dtd *DocumentTokenDelete) Where(ps ...predicate.DocumentToken) *DocumentTokenDelete {
-	dtd.mutation.Where(ps...)
-	return dtd
-}
-
-// Exec executes the deletion query and returns how many vertices were deleted.
-func (dtd *DocumentTokenDelete) Exec(ctx context.Context) (int, error) {
-	var (
-		err      error
-		affected int
-	)
-	if len(dtd.hooks) == 0 {
-		affected, err = dtd.sqlExec(ctx)
-	} else {
-		var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) {
-			mutation, ok := m.(*DocumentTokenMutation)
-			if !ok {
-				return nil, fmt.Errorf("unexpected mutation type %T", m)
-			}
-			dtd.mutation = mutation
-			affected, err = dtd.sqlExec(ctx)
-			mutation.done = true
-			return affected, err
-		})
-		for i := len(dtd.hooks) - 1; i >= 0; i-- {
-			if dtd.hooks[i] == nil {
-				return 0, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)")
-			}
-			mut = dtd.hooks[i](mut)
-		}
-		if _, err := mut.Mutate(ctx, dtd.mutation); err != nil {
-			return 0, err
-		}
-	}
-	return affected, err
-}
-
-// ExecX is like Exec, but panics if an error occurs.
-func (dtd *DocumentTokenDelete) ExecX(ctx context.Context) int {
-	n, err := dtd.Exec(ctx)
-	if err != nil {
-		panic(err)
-	}
-	return n
-}
-
-func (dtd *DocumentTokenDelete) sqlExec(ctx context.Context) (int, error) {
-	_spec := &sqlgraph.DeleteSpec{
-		Node: &sqlgraph.NodeSpec{
-			Table: documenttoken.Table,
-			ID: &sqlgraph.FieldSpec{
-				Type:   field.TypeUUID,
-				Column: documenttoken.FieldID,
-			},
-		},
-	}
-	if ps := dtd.mutation.predicates; len(ps) > 0 {
-		_spec.Predicate = func(selector *sql.Selector) {
-			for i := range ps {
-				ps[i](selector)
-			}
-		}
-	}
-	affected, err := sqlgraph.DeleteNodes(ctx, dtd.driver, _spec)
-	if err != nil && sqlgraph.IsConstraintError(err) {
-		err = &ConstraintError{msg: err.Error(), wrap: err}
-	}
-	return affected, err
-}
-
-// DocumentTokenDeleteOne is the builder for deleting a single DocumentToken entity.
-type DocumentTokenDeleteOne struct {
-	dtd *DocumentTokenDelete
-}
-
-// Exec executes the deletion query.
-func (dtdo *DocumentTokenDeleteOne) Exec(ctx context.Context) error {
-	n, err := dtdo.dtd.Exec(ctx)
-	switch {
-	case err != nil:
-		return err
-	case n == 0:
-		return &NotFoundError{documenttoken.Label}
-	default:
-		return nil
-	}
-}
-
-// ExecX is like Exec, but panics if an error occurs.
-func (dtdo *DocumentTokenDeleteOne) ExecX(ctx context.Context) {
-	dtdo.dtd.ExecX(ctx)
-}
diff --git a/backend/internal/data/ent/documenttoken_query.go b/backend/internal/data/ent/documenttoken_query.go
deleted file mode 100644
index 58cb61b..0000000
--- a/backend/internal/data/ent/documenttoken_query.go
+++ /dev/null
@@ -1,633 +0,0 @@
-// Code generated by ent, DO NOT EDIT.
-
-package ent
-
-import (
-	"context"
-	"fmt"
-	"math"
-
-	"entgo.io/ent/dialect/sql"
-	"entgo.io/ent/dialect/sql/sqlgraph"
-	"entgo.io/ent/schema/field"
-	"github.com/google/uuid"
-	"github.com/hay-kot/homebox/backend/internal/data/ent/document"
-	"github.com/hay-kot/homebox/backend/internal/data/ent/documenttoken"
-	"github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
-)
-
-// DocumentTokenQuery is the builder for querying DocumentToken entities.
-type DocumentTokenQuery struct {
-	config
-	limit        *int
-	offset       *int
-	unique       *bool
-	order        []OrderFunc
-	fields       []string
-	predicates   []predicate.DocumentToken
-	withDocument *DocumentQuery
-	withFKs      bool
-	// intermediate query (i.e. traversal path).
-	sql  *sql.Selector
-	path func(context.Context) (*sql.Selector, error)
-}
-
-// Where adds a new predicate for the DocumentTokenQuery builder.
-func (dtq *DocumentTokenQuery) Where(ps ...predicate.DocumentToken) *DocumentTokenQuery {
-	dtq.predicates = append(dtq.predicates, ps...)
-	return dtq
-}
-
-// Limit adds a limit step to the query.
-func (dtq *DocumentTokenQuery) Limit(limit int) *DocumentTokenQuery {
-	dtq.limit = &limit
-	return dtq
-}
-
-// Offset adds an offset step to the query.
-func (dtq *DocumentTokenQuery) Offset(offset int) *DocumentTokenQuery {
-	dtq.offset = &offset
-	return dtq
-}
-
-// Unique configures the query builder to filter duplicate records on query.
-// By default, unique is set to true, and can be disabled using this method.
-func (dtq *DocumentTokenQuery) Unique(unique bool) *DocumentTokenQuery {
-	dtq.unique = &unique
-	return dtq
-}
-
-// Order adds an order step to the query.
-func (dtq *DocumentTokenQuery) Order(o ...OrderFunc) *DocumentTokenQuery {
-	dtq.order = append(dtq.order, o...)
-	return dtq
-}
-
-// QueryDocument chains the current query on the "document" edge.
-func (dtq *DocumentTokenQuery) QueryDocument() *DocumentQuery {
-	query := &DocumentQuery{config: dtq.config}
-	query.path = func(ctx context.Context) (fromU *sql.Selector, err error) {
-		if err := dtq.prepareQuery(ctx); err != nil {
-			return nil, err
-		}
-		selector := dtq.sqlQuery(ctx)
-		if err := selector.Err(); err != nil {
-			return nil, err
-		}
-		step := sqlgraph.NewStep(
-			sqlgraph.From(documenttoken.Table, documenttoken.FieldID, selector),
-			sqlgraph.To(document.Table, document.FieldID),
-			sqlgraph.Edge(sqlgraph.M2O, true, documenttoken.DocumentTable, documenttoken.DocumentColumn),
-		)
-		fromU = sqlgraph.SetNeighbors(dtq.driver.Dialect(), step)
-		return fromU, nil
-	}
-	return query
-}
-
-// First returns the first DocumentToken entity from the query.
-// Returns a *NotFoundError when no DocumentToken was found.
-func (dtq *DocumentTokenQuery) First(ctx context.Context) (*DocumentToken, error) {
-	nodes, err := dtq.Limit(1).All(ctx)
-	if err != nil {
-		return nil, err
-	}
-	if len(nodes) == 0 {
-		return nil, &NotFoundError{documenttoken.Label}
-	}
-	return nodes[0], nil
-}
-
-// FirstX is like First, but panics if an error occurs.
-func (dtq *DocumentTokenQuery) FirstX(ctx context.Context) *DocumentToken {
-	node, err := dtq.First(ctx)
-	if err != nil && !IsNotFound(err) {
-		panic(err)
-	}
-	return node
-}
-
-// FirstID returns the first DocumentToken ID from the query.
-// Returns a *NotFoundError when no DocumentToken ID was found.
-func (dtq *DocumentTokenQuery) FirstID(ctx context.Context) (id uuid.UUID, err error) {
-	var ids []uuid.UUID
-	if ids, err = dtq.Limit(1).IDs(ctx); err != nil {
-		return
-	}
-	if len(ids) == 0 {
-		err = &NotFoundError{documenttoken.Label}
-		return
-	}
-	return ids[0], nil
-}
-
-// FirstIDX is like FirstID, but panics if an error occurs.
-func (dtq *DocumentTokenQuery) FirstIDX(ctx context.Context) uuid.UUID {
-	id, err := dtq.FirstID(ctx)
-	if err != nil && !IsNotFound(err) {
-		panic(err)
-	}
-	return id
-}
-
-// Only returns a single DocumentToken entity found by the query, ensuring it only returns one.
-// Returns a *NotSingularError when more than one DocumentToken entity is found.
-// Returns a *NotFoundError when no DocumentToken entities are found.
-func (dtq *DocumentTokenQuery) Only(ctx context.Context) (*DocumentToken, error) {
-	nodes, err := dtq.Limit(2).All(ctx)
-	if err != nil {
-		return nil, err
-	}
-	switch len(nodes) {
-	case 1:
-		return nodes[0], nil
-	case 0:
-		return nil, &NotFoundError{documenttoken.Label}
-	default:
-		return nil, &NotSingularError{documenttoken.Label}
-	}
-}
-
-// OnlyX is like Only, but panics if an error occurs.
-func (dtq *DocumentTokenQuery) OnlyX(ctx context.Context) *DocumentToken {
-	node, err := dtq.Only(ctx)
-	if err != nil {
-		panic(err)
-	}
-	return node
-}
-
-// OnlyID is like Only, but returns the only DocumentToken ID in the query.
-// Returns a *NotSingularError when more than one DocumentToken ID is found.
-// Returns a *NotFoundError when no entities are found.
-func (dtq *DocumentTokenQuery) OnlyID(ctx context.Context) (id uuid.UUID, err error) {
-	var ids []uuid.UUID
-	if ids, err = dtq.Limit(2).IDs(ctx); err != nil {
-		return
-	}
-	switch len(ids) {
-	case 1:
-		id = ids[0]
-	case 0:
-		err = &NotFoundError{documenttoken.Label}
-	default:
-		err = &NotSingularError{documenttoken.Label}
-	}
-	return
-}
-
-// OnlyIDX is like OnlyID, but panics if an error occurs.
-func (dtq *DocumentTokenQuery) OnlyIDX(ctx context.Context) uuid.UUID {
-	id, err := dtq.OnlyID(ctx)
-	if err != nil {
-		panic(err)
-	}
-	return id
-}
-
-// All executes the query and returns a list of DocumentTokens.
-func (dtq *DocumentTokenQuery) All(ctx context.Context) ([]*DocumentToken, error) {
-	if err := dtq.prepareQuery(ctx); err != nil {
-		return nil, err
-	}
-	return dtq.sqlAll(ctx)
-}
-
-// AllX is like All, but panics if an error occurs.
-func (dtq *DocumentTokenQuery) AllX(ctx context.Context) []*DocumentToken {
-	nodes, err := dtq.All(ctx)
-	if err != nil {
-		panic(err)
-	}
-	return nodes
-}
-
-// IDs executes the query and returns a list of DocumentToken IDs.
-func (dtq *DocumentTokenQuery) IDs(ctx context.Context) ([]uuid.UUID, error) {
-	var ids []uuid.UUID
-	if err := dtq.Select(documenttoken.FieldID).Scan(ctx, &ids); err != nil {
-		return nil, err
-	}
-	return ids, nil
-}
-
-// IDsX is like IDs, but panics if an error occurs.
-func (dtq *DocumentTokenQuery) IDsX(ctx context.Context) []uuid.UUID {
-	ids, err := dtq.IDs(ctx)
-	if err != nil {
-		panic(err)
-	}
-	return ids
-}
-
-// Count returns the count of the given query.
-func (dtq *DocumentTokenQuery) Count(ctx context.Context) (int, error) {
-	if err := dtq.prepareQuery(ctx); err != nil {
-		return 0, err
-	}
-	return dtq.sqlCount(ctx)
-}
-
-// CountX is like Count, but panics if an error occurs.
-func (dtq *DocumentTokenQuery) CountX(ctx context.Context) int {
-	count, err := dtq.Count(ctx)
-	if err != nil {
-		panic(err)
-	}
-	return count
-}
-
-// Exist returns true if the query has elements in the graph.
-func (dtq *DocumentTokenQuery) Exist(ctx context.Context) (bool, error) {
-	if err := dtq.prepareQuery(ctx); err != nil {
-		return false, err
-	}
-	return dtq.sqlExist(ctx)
-}
-
-// ExistX is like Exist, but panics if an error occurs.
-func (dtq *DocumentTokenQuery) ExistX(ctx context.Context) bool {
-	exist, err := dtq.Exist(ctx)
-	if err != nil {
-		panic(err)
-	}
-	return exist
-}
-
-// Clone returns a duplicate of the DocumentTokenQuery builder, including all associated steps. It can be
-// used to prepare common query builders and use them differently after the clone is made.
-func (dtq *DocumentTokenQuery) Clone() *DocumentTokenQuery {
-	if dtq == nil {
-		return nil
-	}
-	return &DocumentTokenQuery{
-		config:       dtq.config,
-		limit:        dtq.limit,
-		offset:       dtq.offset,
-		order:        append([]OrderFunc{}, dtq.order...),
-		predicates:   append([]predicate.DocumentToken{}, dtq.predicates...),
-		withDocument: dtq.withDocument.Clone(),
-		// clone intermediate query.
-		sql:    dtq.sql.Clone(),
-		path:   dtq.path,
-		unique: dtq.unique,
-	}
-}
-
-// WithDocument tells the query-builder to eager-load the nodes that are connected to
-// the "document" edge. The optional arguments are used to configure the query builder of the edge.
-func (dtq *DocumentTokenQuery) WithDocument(opts ...func(*DocumentQuery)) *DocumentTokenQuery {
-	query := &DocumentQuery{config: dtq.config}
-	for _, opt := range opts {
-		opt(query)
-	}
-	dtq.withDocument = query
-	return dtq
-}
-
-// GroupBy is used to group vertices by one or more fields/columns.
-// It is often used with aggregate functions, like: count, max, mean, min, sum.
-//
-// Example:
-//
-//	var v []struct {
-//		CreatedAt time.Time `json:"created_at,omitempty"`
-//		Count int `json:"count,omitempty"`
-//	}
-//
-//	client.DocumentToken.Query().
-//		GroupBy(documenttoken.FieldCreatedAt).
-//		Aggregate(ent.Count()).
-//		Scan(ctx, &v)
-func (dtq *DocumentTokenQuery) GroupBy(field string, fields ...string) *DocumentTokenGroupBy {
-	grbuild := &DocumentTokenGroupBy{config: dtq.config}
-	grbuild.fields = append([]string{field}, fields...)
-	grbuild.path = func(ctx context.Context) (prev *sql.Selector, err error) {
-		if err := dtq.prepareQuery(ctx); err != nil {
-			return nil, err
-		}
-		return dtq.sqlQuery(ctx), nil
-	}
-	grbuild.label = documenttoken.Label
-	grbuild.flds, grbuild.scan = &grbuild.fields, grbuild.Scan
-	return grbuild
-}
-
-// Select allows the selection one or more fields/columns for the given query,
-// instead of selecting all fields in the entity.
-//
-// Example:
-//
-//	var v []struct {
-//		CreatedAt time.Time `json:"created_at,omitempty"`
-//	}
-//
-//	client.DocumentToken.Query().
-//		Select(documenttoken.FieldCreatedAt).
-//		Scan(ctx, &v)
-func (dtq *DocumentTokenQuery) Select(fields ...string) *DocumentTokenSelect {
-	dtq.fields = append(dtq.fields, fields...)
-	selbuild := &DocumentTokenSelect{DocumentTokenQuery: dtq}
-	selbuild.label = documenttoken.Label
-	selbuild.flds, selbuild.scan = &dtq.fields, selbuild.Scan
-	return selbuild
-}
-
-// Aggregate returns a DocumentTokenSelect configured with the given aggregations.
-func (dtq *DocumentTokenQuery) Aggregate(fns ...AggregateFunc) *DocumentTokenSelect {
-	return dtq.Select().Aggregate(fns...)
-}
-
-func (dtq *DocumentTokenQuery) prepareQuery(ctx context.Context) error {
-	for _, f := range dtq.fields {
-		if !documenttoken.ValidColumn(f) {
-			return &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)}
-		}
-	}
-	if dtq.path != nil {
-		prev, err := dtq.path(ctx)
-		if err != nil {
-			return err
-		}
-		dtq.sql = prev
-	}
-	return nil
-}
-
-func (dtq *DocumentTokenQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*DocumentToken, error) {
-	var (
-		nodes       = []*DocumentToken{}
-		withFKs     = dtq.withFKs
-		_spec       = dtq.querySpec()
-		loadedTypes = [1]bool{
-			dtq.withDocument != nil,
-		}
-	)
-	if dtq.withDocument != nil {
-		withFKs = true
-	}
-	if withFKs {
-		_spec.Node.Columns = append(_spec.Node.Columns, documenttoken.ForeignKeys...)
-	}
-	_spec.ScanValues = func(columns []string) ([]any, error) {
-		return (*DocumentToken).scanValues(nil, columns)
-	}
-	_spec.Assign = func(columns []string, values []any) error {
-		node := &DocumentToken{config: dtq.config}
-		nodes = append(nodes, node)
-		node.Edges.loadedTypes = loadedTypes
-		return node.assignValues(columns, values)
-	}
-	for i := range hooks {
-		hooks[i](ctx, _spec)
-	}
-	if err := sqlgraph.QueryNodes(ctx, dtq.driver, _spec); err != nil {
-		return nil, err
-	}
-	if len(nodes) == 0 {
-		return nodes, nil
-	}
-	if query := dtq.withDocument; query != nil {
-		if err := dtq.loadDocument(ctx, query, nodes, nil,
-			func(n *DocumentToken, e *Document) { n.Edges.Document = e }); err != nil {
-			return nil, err
-		}
-	}
-	return nodes, nil
-}
-
-func (dtq *DocumentTokenQuery) loadDocument(ctx context.Context, query *DocumentQuery, nodes []*DocumentToken, init func(*DocumentToken), assign func(*DocumentToken, *Document)) error {
-	ids := make([]uuid.UUID, 0, len(nodes))
-	nodeids := make(map[uuid.UUID][]*DocumentToken)
-	for i := range nodes {
-		if nodes[i].document_document_tokens == nil {
-			continue
-		}
-		fk := *nodes[i].document_document_tokens
-		if _, ok := nodeids[fk]; !ok {
-			ids = append(ids, fk)
-		}
-		nodeids[fk] = append(nodeids[fk], nodes[i])
-	}
-	query.Where(document.IDIn(ids...))
-	neighbors, err := query.All(ctx)
-	if err != nil {
-		return err
-	}
-	for _, n := range neighbors {
-		nodes, ok := nodeids[n.ID]
-		if !ok {
-			return fmt.Errorf(`unexpected foreign-key "document_document_tokens" returned %v`, n.ID)
-		}
-		for i := range nodes {
-			assign(nodes[i], n)
-		}
-	}
-	return nil
-}
-
-func (dtq *DocumentTokenQuery) sqlCount(ctx context.Context) (int, error) {
-	_spec := dtq.querySpec()
-	_spec.Node.Columns = dtq.fields
-	if len(dtq.fields) > 0 {
-		_spec.Unique = dtq.unique != nil && *dtq.unique
-	}
-	return sqlgraph.CountNodes(ctx, dtq.driver, _spec)
-}
-
-func (dtq *DocumentTokenQuery) sqlExist(ctx context.Context) (bool, error) {
-	switch _, err := dtq.FirstID(ctx); {
-	case IsNotFound(err):
-		return false, nil
-	case err != nil:
-		return false, fmt.Errorf("ent: check existence: %w", err)
-	default:
-		return true, nil
-	}
-}
-
-func (dtq *DocumentTokenQuery) querySpec() *sqlgraph.QuerySpec {
-	_spec := &sqlgraph.QuerySpec{
-		Node: &sqlgraph.NodeSpec{
-			Table:   documenttoken.Table,
-			Columns: documenttoken.Columns,
-			ID: &sqlgraph.FieldSpec{
-				Type:   field.TypeUUID,
-				Column: documenttoken.FieldID,
-			},
-		},
-		From:   dtq.sql,
-		Unique: true,
-	}
-	if unique := dtq.unique; unique != nil {
-		_spec.Unique = *unique
-	}
-	if fields := dtq.fields; len(fields) > 0 {
-		_spec.Node.Columns = make([]string, 0, len(fields))
-		_spec.Node.Columns = append(_spec.Node.Columns, documenttoken.FieldID)
-		for i := range fields {
-			if fields[i] != documenttoken.FieldID {
-				_spec.Node.Columns = append(_spec.Node.Columns, fields[i])
-			}
-		}
-	}
-	if ps := dtq.predicates; len(ps) > 0 {
-		_spec.Predicate = func(selector *sql.Selector) {
-			for i := range ps {
-				ps[i](selector)
-			}
-		}
-	}
-	if limit := dtq.limit; limit != nil {
-		_spec.Limit = *limit
-	}
-	if offset := dtq.offset; offset != nil {
-		_spec.Offset = *offset
-	}
-	if ps := dtq.order; len(ps) > 0 {
-		_spec.Order = func(selector *sql.Selector) {
-			for i := range ps {
-				ps[i](selector)
-			}
-		}
-	}
-	return _spec
-}
-
-func (dtq *DocumentTokenQuery) sqlQuery(ctx context.Context) *sql.Selector {
-	builder := sql.Dialect(dtq.driver.Dialect())
-	t1 := builder.Table(documenttoken.Table)
-	columns := dtq.fields
-	if len(columns) == 0 {
-		columns = documenttoken.Columns
-	}
-	selector := builder.Select(t1.Columns(columns...)...).From(t1)
-	if dtq.sql != nil {
-		selector = dtq.sql
-		selector.Select(selector.Columns(columns...)...)
-	}
-	if dtq.unique != nil && *dtq.unique {
-		selector.Distinct()
-	}
-	for _, p := range dtq.predicates {
-		p(selector)
-	}
-	for _, p := range dtq.order {
-		p(selector)
-	}
-	if offset := dtq.offset; offset != nil {
-		// limit is mandatory for offset clause. We start
-		// with default value, and override it below if needed.
-		selector.Offset(*offset).Limit(math.MaxInt32)
-	}
-	if limit := dtq.limit; limit != nil {
-		selector.Limit(*limit)
-	}
-	return selector
-}
-
-// DocumentTokenGroupBy is the group-by builder for DocumentToken entities.
-type DocumentTokenGroupBy struct {
-	config
-	selector
-	fields []string
-	fns    []AggregateFunc
-	// intermediate query (i.e. traversal path).
-	sql  *sql.Selector
-	path func(context.Context) (*sql.Selector, error)
-}
-
-// Aggregate adds the given aggregation functions to the group-by query.
-func (dtgb *DocumentTokenGroupBy) Aggregate(fns ...AggregateFunc) *DocumentTokenGroupBy {
-	dtgb.fns = append(dtgb.fns, fns...)
-	return dtgb
-}
-
-// Scan applies the group-by query and scans the result into the given value.
-func (dtgb *DocumentTokenGroupBy) Scan(ctx context.Context, v any) error {
-	query, err := dtgb.path(ctx)
-	if err != nil {
-		return err
-	}
-	dtgb.sql = query
-	return dtgb.sqlScan(ctx, v)
-}
-
-func (dtgb *DocumentTokenGroupBy) sqlScan(ctx context.Context, v any) error {
-	for _, f := range dtgb.fields {
-		if !documenttoken.ValidColumn(f) {
-			return &ValidationError{Name: f, err: fmt.Errorf("invalid field %q for group-by", f)}
-		}
-	}
-	selector := dtgb.sqlQuery()
-	if err := selector.Err(); err != nil {
-		return err
-	}
-	rows := &sql.Rows{}
-	query, args := selector.Query()
-	if err := dtgb.driver.Query(ctx, query, args, rows); err != nil {
-		return err
-	}
-	defer rows.Close()
-	return sql.ScanSlice(rows, v)
-}
-
-func (dtgb *DocumentTokenGroupBy) sqlQuery() *sql.Selector {
-	selector := dtgb.sql.Select()
-	aggregation := make([]string, 0, len(dtgb.fns))
-	for _, fn := range dtgb.fns {
-		aggregation = append(aggregation, fn(selector))
-	}
-	if len(selector.SelectedColumns()) == 0 {
-		columns := make([]string, 0, len(dtgb.fields)+len(dtgb.fns))
-		for _, f := range dtgb.fields {
-			columns = append(columns, selector.C(f))
-		}
-		columns = append(columns, aggregation...)
-		selector.Select(columns...)
-	}
-	return selector.GroupBy(selector.Columns(dtgb.fields...)...)
-}
-
-// DocumentTokenSelect is the builder for selecting fields of DocumentToken entities.
-type DocumentTokenSelect struct {
-	*DocumentTokenQuery
-	selector
-	// intermediate query (i.e. traversal path).
-	sql *sql.Selector
-}
-
-// Aggregate adds the given aggregation functions to the selector query.
-func (dts *DocumentTokenSelect) Aggregate(fns ...AggregateFunc) *DocumentTokenSelect {
-	dts.fns = append(dts.fns, fns...)
-	return dts
-}
-
-// Scan applies the selector query and scans the result into the given value.
-func (dts *DocumentTokenSelect) Scan(ctx context.Context, v any) error {
-	if err := dts.prepareQuery(ctx); err != nil {
-		return err
-	}
-	dts.sql = dts.DocumentTokenQuery.sqlQuery(ctx)
-	return dts.sqlScan(ctx, v)
-}
-
-func (dts *DocumentTokenSelect) sqlScan(ctx context.Context, v any) error {
-	aggregation := make([]string, 0, len(dts.fns))
-	for _, fn := range dts.fns {
-		aggregation = append(aggregation, fn(dts.sql))
-	}
-	switch n := len(*dts.selector.flds); {
-	case n == 0 && len(aggregation) > 0:
-		dts.sql.Select(aggregation...)
-	case n != 0 && len(aggregation) > 0:
-		dts.sql.AppendSelect(aggregation...)
-	}
-	rows := &sql.Rows{}
-	query, args := dts.sql.Query()
-	if err := dts.driver.Query(ctx, query, args, rows); err != nil {
-		return err
-	}
-	defer rows.Close()
-	return sql.ScanSlice(rows, v)
-}
diff --git a/backend/internal/data/ent/documenttoken_update.go b/backend/internal/data/ent/documenttoken_update.go
deleted file mode 100644
index 416bc08..0000000
--- a/backend/internal/data/ent/documenttoken_update.go
+++ /dev/null
@@ -1,542 +0,0 @@
-// Code generated by ent, DO NOT EDIT.
-
-package ent
-
-import (
-	"context"
-	"errors"
-	"fmt"
-	"time"
-
-	"entgo.io/ent/dialect/sql"
-	"entgo.io/ent/dialect/sql/sqlgraph"
-	"entgo.io/ent/schema/field"
-	"github.com/google/uuid"
-	"github.com/hay-kot/homebox/backend/internal/data/ent/document"
-	"github.com/hay-kot/homebox/backend/internal/data/ent/documenttoken"
-	"github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
-)
-
-// DocumentTokenUpdate is the builder for updating DocumentToken entities.
-type DocumentTokenUpdate struct {
-	config
-	hooks    []Hook
-	mutation *DocumentTokenMutation
-}
-
-// Where appends a list predicates to the DocumentTokenUpdate builder.
-func (dtu *DocumentTokenUpdate) Where(ps ...predicate.DocumentToken) *DocumentTokenUpdate {
-	dtu.mutation.Where(ps...)
-	return dtu
-}
-
-// SetUpdatedAt sets the "updated_at" field.
-func (dtu *DocumentTokenUpdate) SetUpdatedAt(t time.Time) *DocumentTokenUpdate {
-	dtu.mutation.SetUpdatedAt(t)
-	return dtu
-}
-
-// SetToken sets the "token" field.
-func (dtu *DocumentTokenUpdate) SetToken(b []byte) *DocumentTokenUpdate {
-	dtu.mutation.SetToken(b)
-	return dtu
-}
-
-// SetUses sets the "uses" field.
-func (dtu *DocumentTokenUpdate) SetUses(i int) *DocumentTokenUpdate {
-	dtu.mutation.ResetUses()
-	dtu.mutation.SetUses(i)
-	return dtu
-}
-
-// SetNillableUses sets the "uses" field if the given value is not nil.
-func (dtu *DocumentTokenUpdate) SetNillableUses(i *int) *DocumentTokenUpdate {
-	if i != nil {
-		dtu.SetUses(*i)
-	}
-	return dtu
-}
-
-// AddUses adds i to the "uses" field.
-func (dtu *DocumentTokenUpdate) AddUses(i int) *DocumentTokenUpdate {
-	dtu.mutation.AddUses(i)
-	return dtu
-}
-
-// SetExpiresAt sets the "expires_at" field.
-func (dtu *DocumentTokenUpdate) SetExpiresAt(t time.Time) *DocumentTokenUpdate {
-	dtu.mutation.SetExpiresAt(t)
-	return dtu
-}
-
-// SetNillableExpiresAt sets the "expires_at" field if the given value is not nil.
-func (dtu *DocumentTokenUpdate) SetNillableExpiresAt(t *time.Time) *DocumentTokenUpdate {
-	if t != nil {
-		dtu.SetExpiresAt(*t)
-	}
-	return dtu
-}
-
-// SetDocumentID sets the "document" edge to the Document entity by ID.
-func (dtu *DocumentTokenUpdate) SetDocumentID(id uuid.UUID) *DocumentTokenUpdate {
-	dtu.mutation.SetDocumentID(id)
-	return dtu
-}
-
-// SetNillableDocumentID sets the "document" edge to the Document entity by ID if the given value is not nil.
-func (dtu *DocumentTokenUpdate) SetNillableDocumentID(id *uuid.UUID) *DocumentTokenUpdate {
-	if id != nil {
-		dtu = dtu.SetDocumentID(*id)
-	}
-	return dtu
-}
-
-// SetDocument sets the "document" edge to the Document entity.
-func (dtu *DocumentTokenUpdate) SetDocument(d *Document) *DocumentTokenUpdate {
-	return dtu.SetDocumentID(d.ID)
-}
-
-// Mutation returns the DocumentTokenMutation object of the builder.
-func (dtu *DocumentTokenUpdate) Mutation() *DocumentTokenMutation {
-	return dtu.mutation
-}
-
-// ClearDocument clears the "document" edge to the Document entity.
-func (dtu *DocumentTokenUpdate) ClearDocument() *DocumentTokenUpdate {
-	dtu.mutation.ClearDocument()
-	return dtu
-}
-
-// Save executes the query and returns the number of nodes affected by the update operation.
-func (dtu *DocumentTokenUpdate) Save(ctx context.Context) (int, error) {
-	var (
-		err      error
-		affected int
-	)
-	dtu.defaults()
-	if len(dtu.hooks) == 0 {
-		if err = dtu.check(); err != nil {
-			return 0, err
-		}
-		affected, err = dtu.sqlSave(ctx)
-	} else {
-		var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) {
-			mutation, ok := m.(*DocumentTokenMutation)
-			if !ok {
-				return nil, fmt.Errorf("unexpected mutation type %T", m)
-			}
-			if err = dtu.check(); err != nil {
-				return 0, err
-			}
-			dtu.mutation = mutation
-			affected, err = dtu.sqlSave(ctx)
-			mutation.done = true
-			return affected, err
-		})
-		for i := len(dtu.hooks) - 1; i >= 0; i-- {
-			if dtu.hooks[i] == nil {
-				return 0, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)")
-			}
-			mut = dtu.hooks[i](mut)
-		}
-		if _, err := mut.Mutate(ctx, dtu.mutation); err != nil {
-			return 0, err
-		}
-	}
-	return affected, err
-}
-
-// SaveX is like Save, but panics if an error occurs.
-func (dtu *DocumentTokenUpdate) SaveX(ctx context.Context) int {
-	affected, err := dtu.Save(ctx)
-	if err != nil {
-		panic(err)
-	}
-	return affected
-}
-
-// Exec executes the query.
-func (dtu *DocumentTokenUpdate) Exec(ctx context.Context) error {
-	_, err := dtu.Save(ctx)
-	return err
-}
-
-// ExecX is like Exec, but panics if an error occurs.
-func (dtu *DocumentTokenUpdate) ExecX(ctx context.Context) {
-	if err := dtu.Exec(ctx); err != nil {
-		panic(err)
-	}
-}
-
-// defaults sets the default values of the builder before save.
-func (dtu *DocumentTokenUpdate) defaults() {
-	if _, ok := dtu.mutation.UpdatedAt(); !ok {
-		v := documenttoken.UpdateDefaultUpdatedAt()
-		dtu.mutation.SetUpdatedAt(v)
-	}
-}
-
-// check runs all checks and user-defined validators on the builder.
-func (dtu *DocumentTokenUpdate) check() error {
-	if v, ok := dtu.mutation.Token(); ok {
-		if err := documenttoken.TokenValidator(v); err != nil {
-			return &ValidationError{Name: "token", err: fmt.Errorf(`ent: validator failed for field "DocumentToken.token": %w`, err)}
-		}
-	}
-	return nil
-}
-
-func (dtu *DocumentTokenUpdate) sqlSave(ctx context.Context) (n int, err error) {
-	_spec := &sqlgraph.UpdateSpec{
-		Node: &sqlgraph.NodeSpec{
-			Table:   documenttoken.Table,
-			Columns: documenttoken.Columns,
-			ID: &sqlgraph.FieldSpec{
-				Type:   field.TypeUUID,
-				Column: documenttoken.FieldID,
-			},
-		},
-	}
-	if ps := dtu.mutation.predicates; len(ps) > 0 {
-		_spec.Predicate = func(selector *sql.Selector) {
-			for i := range ps {
-				ps[i](selector)
-			}
-		}
-	}
-	if value, ok := dtu.mutation.UpdatedAt(); ok {
-		_spec.SetField(documenttoken.FieldUpdatedAt, field.TypeTime, value)
-	}
-	if value, ok := dtu.mutation.Token(); ok {
-		_spec.SetField(documenttoken.FieldToken, field.TypeBytes, value)
-	}
-	if value, ok := dtu.mutation.Uses(); ok {
-		_spec.SetField(documenttoken.FieldUses, field.TypeInt, value)
-	}
-	if value, ok := dtu.mutation.AddedUses(); ok {
-		_spec.AddField(documenttoken.FieldUses, field.TypeInt, value)
-	}
-	if value, ok := dtu.mutation.ExpiresAt(); ok {
-		_spec.SetField(documenttoken.FieldExpiresAt, field.TypeTime, value)
-	}
-	if dtu.mutation.DocumentCleared() {
-		edge := &sqlgraph.EdgeSpec{
-			Rel:     sqlgraph.M2O,
-			Inverse: true,
-			Table:   documenttoken.DocumentTable,
-			Columns: []string{documenttoken.DocumentColumn},
-			Bidi:    false,
-			Target: &sqlgraph.EdgeTarget{
-				IDSpec: &sqlgraph.FieldSpec{
-					Type:   field.TypeUUID,
-					Column: document.FieldID,
-				},
-			},
-		}
-		_spec.Edges.Clear = append(_spec.Edges.Clear, edge)
-	}
-	if nodes := dtu.mutation.DocumentIDs(); len(nodes) > 0 {
-		edge := &sqlgraph.EdgeSpec{
-			Rel:     sqlgraph.M2O,
-			Inverse: true,
-			Table:   documenttoken.DocumentTable,
-			Columns: []string{documenttoken.DocumentColumn},
-			Bidi:    false,
-			Target: &sqlgraph.EdgeTarget{
-				IDSpec: &sqlgraph.FieldSpec{
-					Type:   field.TypeUUID,
-					Column: document.FieldID,
-				},
-			},
-		}
-		for _, k := range nodes {
-			edge.Target.Nodes = append(edge.Target.Nodes, k)
-		}
-		_spec.Edges.Add = append(_spec.Edges.Add, edge)
-	}
-	if n, err = sqlgraph.UpdateNodes(ctx, dtu.driver, _spec); err != nil {
-		if _, ok := err.(*sqlgraph.NotFoundError); ok {
-			err = &NotFoundError{documenttoken.Label}
-		} else if sqlgraph.IsConstraintError(err) {
-			err = &ConstraintError{msg: err.Error(), wrap: err}
-		}
-		return 0, err
-	}
-	return n, nil
-}
-
-// DocumentTokenUpdateOne is the builder for updating a single DocumentToken entity.
-type DocumentTokenUpdateOne struct {
-	config
-	fields   []string
-	hooks    []Hook
-	mutation *DocumentTokenMutation
-}
-
-// SetUpdatedAt sets the "updated_at" field.
-func (dtuo *DocumentTokenUpdateOne) SetUpdatedAt(t time.Time) *DocumentTokenUpdateOne {
-	dtuo.mutation.SetUpdatedAt(t)
-	return dtuo
-}
-
-// SetToken sets the "token" field.
-func (dtuo *DocumentTokenUpdateOne) SetToken(b []byte) *DocumentTokenUpdateOne {
-	dtuo.mutation.SetToken(b)
-	return dtuo
-}
-
-// SetUses sets the "uses" field.
-func (dtuo *DocumentTokenUpdateOne) SetUses(i int) *DocumentTokenUpdateOne {
-	dtuo.mutation.ResetUses()
-	dtuo.mutation.SetUses(i)
-	return dtuo
-}
-
-// SetNillableUses sets the "uses" field if the given value is not nil.
-func (dtuo *DocumentTokenUpdateOne) SetNillableUses(i *int) *DocumentTokenUpdateOne {
-	if i != nil {
-		dtuo.SetUses(*i)
-	}
-	return dtuo
-}
-
-// AddUses adds i to the "uses" field.
-func (dtuo *DocumentTokenUpdateOne) AddUses(i int) *DocumentTokenUpdateOne {
-	dtuo.mutation.AddUses(i)
-	return dtuo
-}
-
-// SetExpiresAt sets the "expires_at" field.
-func (dtuo *DocumentTokenUpdateOne) SetExpiresAt(t time.Time) *DocumentTokenUpdateOne {
-	dtuo.mutation.SetExpiresAt(t)
-	return dtuo
-}
-
-// SetNillableExpiresAt sets the "expires_at" field if the given value is not nil.
-func (dtuo *DocumentTokenUpdateOne) SetNillableExpiresAt(t *time.Time) *DocumentTokenUpdateOne {
-	if t != nil {
-		dtuo.SetExpiresAt(*t)
-	}
-	return dtuo
-}
-
-// SetDocumentID sets the "document" edge to the Document entity by ID.
-func (dtuo *DocumentTokenUpdateOne) SetDocumentID(id uuid.UUID) *DocumentTokenUpdateOne {
-	dtuo.mutation.SetDocumentID(id)
-	return dtuo
-}
-
-// SetNillableDocumentID sets the "document" edge to the Document entity by ID if the given value is not nil.
-func (dtuo *DocumentTokenUpdateOne) SetNillableDocumentID(id *uuid.UUID) *DocumentTokenUpdateOne {
-	if id != nil {
-		dtuo = dtuo.SetDocumentID(*id)
-	}
-	return dtuo
-}
-
-// SetDocument sets the "document" edge to the Document entity.
-func (dtuo *DocumentTokenUpdateOne) SetDocument(d *Document) *DocumentTokenUpdateOne {
-	return dtuo.SetDocumentID(d.ID)
-}
-
-// Mutation returns the DocumentTokenMutation object of the builder.
-func (dtuo *DocumentTokenUpdateOne) Mutation() *DocumentTokenMutation {
-	return dtuo.mutation
-}
-
-// ClearDocument clears the "document" edge to the Document entity.
-func (dtuo *DocumentTokenUpdateOne) ClearDocument() *DocumentTokenUpdateOne {
-	dtuo.mutation.ClearDocument()
-	return dtuo
-}
-
-// Select allows selecting one or more fields (columns) of the returned entity.
-// The default is selecting all fields defined in the entity schema.
-func (dtuo *DocumentTokenUpdateOne) Select(field string, fields ...string) *DocumentTokenUpdateOne {
-	dtuo.fields = append([]string{field}, fields...)
-	return dtuo
-}
-
-// Save executes the query and returns the updated DocumentToken entity.
-func (dtuo *DocumentTokenUpdateOne) Save(ctx context.Context) (*DocumentToken, error) {
-	var (
-		err  error
-		node *DocumentToken
-	)
-	dtuo.defaults()
-	if len(dtuo.hooks) == 0 {
-		if err = dtuo.check(); err != nil {
-			return nil, err
-		}
-		node, err = dtuo.sqlSave(ctx)
-	} else {
-		var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) {
-			mutation, ok := m.(*DocumentTokenMutation)
-			if !ok {
-				return nil, fmt.Errorf("unexpected mutation type %T", m)
-			}
-			if err = dtuo.check(); err != nil {
-				return nil, err
-			}
-			dtuo.mutation = mutation
-			node, err = dtuo.sqlSave(ctx)
-			mutation.done = true
-			return node, err
-		})
-		for i := len(dtuo.hooks) - 1; i >= 0; i-- {
-			if dtuo.hooks[i] == nil {
-				return nil, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)")
-			}
-			mut = dtuo.hooks[i](mut)
-		}
-		v, err := mut.Mutate(ctx, dtuo.mutation)
-		if err != nil {
-			return nil, err
-		}
-		nv, ok := v.(*DocumentToken)
-		if !ok {
-			return nil, fmt.Errorf("unexpected node type %T returned from DocumentTokenMutation", v)
-		}
-		node = nv
-	}
-	return node, err
-}
-
-// SaveX is like Save, but panics if an error occurs.
-func (dtuo *DocumentTokenUpdateOne) SaveX(ctx context.Context) *DocumentToken {
-	node, err := dtuo.Save(ctx)
-	if err != nil {
-		panic(err)
-	}
-	return node
-}
-
-// Exec executes the query on the entity.
-func (dtuo *DocumentTokenUpdateOne) Exec(ctx context.Context) error {
-	_, err := dtuo.Save(ctx)
-	return err
-}
-
-// ExecX is like Exec, but panics if an error occurs.
-func (dtuo *DocumentTokenUpdateOne) ExecX(ctx context.Context) {
-	if err := dtuo.Exec(ctx); err != nil {
-		panic(err)
-	}
-}
-
-// defaults sets the default values of the builder before save.
-func (dtuo *DocumentTokenUpdateOne) defaults() {
-	if _, ok := dtuo.mutation.UpdatedAt(); !ok {
-		v := documenttoken.UpdateDefaultUpdatedAt()
-		dtuo.mutation.SetUpdatedAt(v)
-	}
-}
-
-// check runs all checks and user-defined validators on the builder.
-func (dtuo *DocumentTokenUpdateOne) check() error {
-	if v, ok := dtuo.mutation.Token(); ok {
-		if err := documenttoken.TokenValidator(v); err != nil {
-			return &ValidationError{Name: "token", err: fmt.Errorf(`ent: validator failed for field "DocumentToken.token": %w`, err)}
-		}
-	}
-	return nil
-}
-
-func (dtuo *DocumentTokenUpdateOne) sqlSave(ctx context.Context) (_node *DocumentToken, err error) {
-	_spec := &sqlgraph.UpdateSpec{
-		Node: &sqlgraph.NodeSpec{
-			Table:   documenttoken.Table,
-			Columns: documenttoken.Columns,
-			ID: &sqlgraph.FieldSpec{
-				Type:   field.TypeUUID,
-				Column: documenttoken.FieldID,
-			},
-		},
-	}
-	id, ok := dtuo.mutation.ID()
-	if !ok {
-		return nil, &ValidationError{Name: "id", err: errors.New(`ent: missing "DocumentToken.id" for update`)}
-	}
-	_spec.Node.ID.Value = id
-	if fields := dtuo.fields; len(fields) > 0 {
-		_spec.Node.Columns = make([]string, 0, len(fields))
-		_spec.Node.Columns = append(_spec.Node.Columns, documenttoken.FieldID)
-		for _, f := range fields {
-			if !documenttoken.ValidColumn(f) {
-				return nil, &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)}
-			}
-			if f != documenttoken.FieldID {
-				_spec.Node.Columns = append(_spec.Node.Columns, f)
-			}
-		}
-	}
-	if ps := dtuo.mutation.predicates; len(ps) > 0 {
-		_spec.Predicate = func(selector *sql.Selector) {
-			for i := range ps {
-				ps[i](selector)
-			}
-		}
-	}
-	if value, ok := dtuo.mutation.UpdatedAt(); ok {
-		_spec.SetField(documenttoken.FieldUpdatedAt, field.TypeTime, value)
-	}
-	if value, ok := dtuo.mutation.Token(); ok {
-		_spec.SetField(documenttoken.FieldToken, field.TypeBytes, value)
-	}
-	if value, ok := dtuo.mutation.Uses(); ok {
-		_spec.SetField(documenttoken.FieldUses, field.TypeInt, value)
-	}
-	if value, ok := dtuo.mutation.AddedUses(); ok {
-		_spec.AddField(documenttoken.FieldUses, field.TypeInt, value)
-	}
-	if value, ok := dtuo.mutation.ExpiresAt(); ok {
-		_spec.SetField(documenttoken.FieldExpiresAt, field.TypeTime, value)
-	}
-	if dtuo.mutation.DocumentCleared() {
-		edge := &sqlgraph.EdgeSpec{
-			Rel:     sqlgraph.M2O,
-			Inverse: true,
-			Table:   documenttoken.DocumentTable,
-			Columns: []string{documenttoken.DocumentColumn},
-			Bidi:    false,
-			Target: &sqlgraph.EdgeTarget{
-				IDSpec: &sqlgraph.FieldSpec{
-					Type:   field.TypeUUID,
-					Column: document.FieldID,
-				},
-			},
-		}
-		_spec.Edges.Clear = append(_spec.Edges.Clear, edge)
-	}
-	if nodes := dtuo.mutation.DocumentIDs(); len(nodes) > 0 {
-		edge := &sqlgraph.EdgeSpec{
-			Rel:     sqlgraph.M2O,
-			Inverse: true,
-			Table:   documenttoken.DocumentTable,
-			Columns: []string{documenttoken.DocumentColumn},
-			Bidi:    false,
-			Target: &sqlgraph.EdgeTarget{
-				IDSpec: &sqlgraph.FieldSpec{
-					Type:   field.TypeUUID,
-					Column: document.FieldID,
-				},
-			},
-		}
-		for _, k := range nodes {
-			edge.Target.Nodes = append(edge.Target.Nodes, k)
-		}
-		_spec.Edges.Add = append(_spec.Edges.Add, edge)
-	}
-	_node = &DocumentToken{config: dtuo.config}
-	_spec.Assign = _node.assignValues
-	_spec.ScanValues = _node.scanValues
-	if err = sqlgraph.UpdateNode(ctx, dtuo.driver, _spec); err != nil {
-		if _, ok := err.(*sqlgraph.NotFoundError); ok {
-			err = &NotFoundError{documenttoken.Label}
-		} else if sqlgraph.IsConstraintError(err) {
-			err = &ConstraintError{msg: err.Error(), wrap: err}
-		}
-		return nil, err
-	}
-	return _node, nil
-}
diff --git a/backend/internal/data/ent/ent.go b/backend/internal/data/ent/ent.go
index 0731b14..adf8df3 100644
--- a/backend/internal/data/ent/ent.go
+++ b/backend/internal/data/ent/ent.go
@@ -14,13 +14,13 @@ import (
 	"github.com/hay-kot/homebox/backend/internal/data/ent/authroles"
 	"github.com/hay-kot/homebox/backend/internal/data/ent/authtokens"
 	"github.com/hay-kot/homebox/backend/internal/data/ent/document"
-	"github.com/hay-kot/homebox/backend/internal/data/ent/documenttoken"
 	"github.com/hay-kot/homebox/backend/internal/data/ent/group"
 	"github.com/hay-kot/homebox/backend/internal/data/ent/groupinvitationtoken"
 	"github.com/hay-kot/homebox/backend/internal/data/ent/item"
 	"github.com/hay-kot/homebox/backend/internal/data/ent/itemfield"
 	"github.com/hay-kot/homebox/backend/internal/data/ent/label"
 	"github.com/hay-kot/homebox/backend/internal/data/ent/location"
+	"github.com/hay-kot/homebox/backend/internal/data/ent/maintenanceentry"
 	"github.com/hay-kot/homebox/backend/internal/data/ent/user"
 )
 
@@ -46,13 +46,13 @@ func columnChecker(table string) func(string) error {
 		authroles.Table:            authroles.ValidColumn,
 		authtokens.Table:           authtokens.ValidColumn,
 		document.Table:             document.ValidColumn,
-		documenttoken.Table:        documenttoken.ValidColumn,
 		group.Table:                group.ValidColumn,
 		groupinvitationtoken.Table: groupinvitationtoken.ValidColumn,
 		item.Table:                 item.ValidColumn,
 		itemfield.Table:            itemfield.ValidColumn,
 		label.Table:                label.ValidColumn,
 		location.Table:             location.ValidColumn,
+		maintenanceentry.Table:     maintenanceentry.ValidColumn,
 		user.Table:                 user.ValidColumn,
 	}
 	check, ok := checks[table]
diff --git a/backend/internal/data/ent/generate.go b/backend/internal/data/ent/generate.go
index eb03ded..7b8b727 100644
--- a/backend/internal/data/ent/generate.go
+++ b/backend/internal/data/ent/generate.go
@@ -1,3 +1,3 @@
 package ent
 
-//go:generate go run -mod=mod entgo.io/ent/cmd/ent generate --feature sql/versioned-migration ./schema
+//go:generate go run -mod=mod entgo.io/ent/cmd/ent generate --feature sql/versioned-migration ./schema --template=./schema/templates/has_id.tmpl
diff --git a/backend/internal/data/ent/has_id.go b/backend/internal/data/ent/has_id.go
index a6afc6a..875ba0d 100644
--- a/backend/internal/data/ent/has_id.go
+++ b/backend/internal/data/ent/has_id.go
@@ -8,6 +8,10 @@ func (a *Attachment) GetID() uuid.UUID {
 	return a.ID
 }
 
+func (ar *AuthRoles) GetID() int {
+	return ar.ID
+}
+
 func (at *AuthTokens) GetID() uuid.UUID {
 	return at.ID
 }
@@ -16,14 +20,14 @@ func (d *Document) GetID() uuid.UUID {
 	return d.ID
 }
 
-func (dt *DocumentToken) GetID() uuid.UUID {
-	return dt.ID
-}
-
 func (gr *Group) GetID() uuid.UUID {
 	return gr.ID
 }
 
+func (git *GroupInvitationToken) GetID() uuid.UUID {
+	return git.ID
+}
+
 func (i *Item) GetID() uuid.UUID {
 	return i.ID
 }
@@ -40,6 +44,10 @@ func (l *Location) GetID() uuid.UUID {
 	return l.ID
 }
 
+func (me *MaintenanceEntry) GetID() uuid.UUID {
+	return me.ID
+}
+
 func (u *User) GetID() uuid.UUID {
 	return u.ID
 }
diff --git a/backend/internal/data/ent/hook/hook.go b/backend/internal/data/ent/hook/hook.go
index 49d24c3..08c9401 100644
--- a/backend/internal/data/ent/hook/hook.go
+++ b/backend/internal/data/ent/hook/hook.go
@@ -61,19 +61,6 @@ func (f DocumentFunc) Mutate(ctx context.Context, m ent.Mutation) (ent.Value, er
 	return f(ctx, mv)
 }
 
-// The DocumentTokenFunc type is an adapter to allow the use of ordinary
-// function as DocumentToken mutator.
-type DocumentTokenFunc func(context.Context, *ent.DocumentTokenMutation) (ent.Value, error)
-
-// Mutate calls f(ctx, m).
-func (f DocumentTokenFunc) Mutate(ctx context.Context, m ent.Mutation) (ent.Value, error) {
-	mv, ok := m.(*ent.DocumentTokenMutation)
-	if !ok {
-		return nil, fmt.Errorf("unexpected mutation type %T. expect *ent.DocumentTokenMutation", m)
-	}
-	return f(ctx, mv)
-}
-
 // The GroupFunc type is an adapter to allow the use of ordinary
 // function as Group mutator.
 type GroupFunc func(context.Context, *ent.GroupMutation) (ent.Value, error)
@@ -152,6 +139,19 @@ func (f LocationFunc) Mutate(ctx context.Context, m ent.Mutation) (ent.Value, er
 	return f(ctx, mv)
 }
 
+// The MaintenanceEntryFunc type is an adapter to allow the use of ordinary
+// function as MaintenanceEntry mutator.
+type MaintenanceEntryFunc func(context.Context, *ent.MaintenanceEntryMutation) (ent.Value, error)
+
+// Mutate calls f(ctx, m).
+func (f MaintenanceEntryFunc) Mutate(ctx context.Context, m ent.Mutation) (ent.Value, error) {
+	mv, ok := m.(*ent.MaintenanceEntryMutation)
+	if !ok {
+		return nil, fmt.Errorf("unexpected mutation type %T. expect *ent.MaintenanceEntryMutation", m)
+	}
+	return f(ctx, mv)
+}
+
 // The UserFunc type is an adapter to allow the use of ordinary
 // function as User mutator.
 type UserFunc func(context.Context, *ent.UserMutation) (ent.Value, error)
diff --git a/backend/internal/data/ent/item.go b/backend/internal/data/ent/item.go
index a780945..bcca2c7 100644
--- a/backend/internal/data/ent/item.go
+++ b/backend/internal/data/ent/item.go
@@ -87,11 +87,13 @@ type ItemEdges struct {
 	Location *Location `json:"location,omitempty"`
 	// Fields holds the value of the fields edge.
 	Fields []*ItemField `json:"fields,omitempty"`
+	// MaintenanceEntries holds the value of the maintenance_entries edge.
+	MaintenanceEntries []*MaintenanceEntry `json:"maintenance_entries,omitempty"`
 	// Attachments holds the value of the attachments edge.
 	Attachments []*Attachment `json:"attachments,omitempty"`
 	// loadedTypes holds the information for reporting if a
 	// type was loaded (or requested) in eager-loading or not.
-	loadedTypes [7]bool
+	loadedTypes [8]bool
 }
 
 // ParentOrErr returns the Parent value or an error if the edge
@@ -160,10 +162,19 @@ func (e ItemEdges) FieldsOrErr() ([]*ItemField, error) {
 	return nil, &NotLoadedError{edge: "fields"}
 }
 
+// MaintenanceEntriesOrErr returns the MaintenanceEntries value or an error if the edge
+// was not loaded in eager-loading.
+func (e ItemEdges) MaintenanceEntriesOrErr() ([]*MaintenanceEntry, error) {
+	if e.loadedTypes[6] {
+		return e.MaintenanceEntries, nil
+	}
+	return nil, &NotLoadedError{edge: "maintenance_entries"}
+}
+
 // AttachmentsOrErr returns the Attachments value or an error if the edge
 // was not loaded in eager-loading.
 func (e ItemEdges) AttachmentsOrErr() ([]*Attachment, error) {
-	if e.loadedTypes[6] {
+	if e.loadedTypes[7] {
 		return e.Attachments, nil
 	}
 	return nil, &NotLoadedError{edge: "attachments"}
@@ -407,6 +418,11 @@ func (i *Item) QueryFields() *ItemFieldQuery {
 	return (&ItemClient{config: i.config}).QueryFields(i)
 }
 
+// QueryMaintenanceEntries queries the "maintenance_entries" edge of the Item entity.
+func (i *Item) QueryMaintenanceEntries() *MaintenanceEntryQuery {
+	return (&ItemClient{config: i.config}).QueryMaintenanceEntries(i)
+}
+
 // QueryAttachments queries the "attachments" edge of the Item entity.
 func (i *Item) QueryAttachments() *AttachmentQuery {
 	return (&ItemClient{config: i.config}).QueryAttachments(i)
diff --git a/backend/internal/data/ent/item/item.go b/backend/internal/data/ent/item/item.go
index ab3b43f..2cb7f6d 100644
--- a/backend/internal/data/ent/item/item.go
+++ b/backend/internal/data/ent/item/item.go
@@ -71,6 +71,8 @@ const (
 	EdgeLocation = "location"
 	// EdgeFields holds the string denoting the fields edge name in mutations.
 	EdgeFields = "fields"
+	// EdgeMaintenanceEntries holds the string denoting the maintenance_entries edge name in mutations.
+	EdgeMaintenanceEntries = "maintenance_entries"
 	// EdgeAttachments holds the string denoting the attachments edge name in mutations.
 	EdgeAttachments = "attachments"
 	// Table holds the table name of the item in the database.
@@ -109,6 +111,13 @@ const (
 	FieldsInverseTable = "item_fields"
 	// FieldsColumn is the table column denoting the fields relation/edge.
 	FieldsColumn = "item_fields"
+	// MaintenanceEntriesTable is the table that holds the maintenance_entries relation/edge.
+	MaintenanceEntriesTable = "maintenance_entries"
+	// MaintenanceEntriesInverseTable is the table name for the MaintenanceEntry entity.
+	// It exists in this package in order to avoid circular dependency with the "maintenanceentry" package.
+	MaintenanceEntriesInverseTable = "maintenance_entries"
+	// MaintenanceEntriesColumn is the table column denoting the maintenance_entries relation/edge.
+	MaintenanceEntriesColumn = "item_id"
 	// AttachmentsTable is the table that holds the attachments relation/edge.
 	AttachmentsTable = "attachments"
 	// AttachmentsInverseTable is the table name for the Attachment entity.
diff --git a/backend/internal/data/ent/item/where.go b/backend/internal/data/ent/item/where.go
index 2174432..cef11f4 100644
--- a/backend/internal/data/ent/item/where.go
+++ b/backend/internal/data/ent/item/where.go
@@ -2300,6 +2300,34 @@ func HasFieldsWith(preds ...predicate.ItemField) predicate.Item {
 	})
 }
 
+// HasMaintenanceEntries applies the HasEdge predicate on the "maintenance_entries" edge.
+func HasMaintenanceEntries() predicate.Item {
+	return predicate.Item(func(s *sql.Selector) {
+		step := sqlgraph.NewStep(
+			sqlgraph.From(Table, FieldID),
+			sqlgraph.To(MaintenanceEntriesTable, FieldID),
+			sqlgraph.Edge(sqlgraph.O2M, false, MaintenanceEntriesTable, MaintenanceEntriesColumn),
+		)
+		sqlgraph.HasNeighbors(s, step)
+	})
+}
+
+// HasMaintenanceEntriesWith applies the HasEdge predicate on the "maintenance_entries" edge with a given conditions (other predicates).
+func HasMaintenanceEntriesWith(preds ...predicate.MaintenanceEntry) predicate.Item {
+	return predicate.Item(func(s *sql.Selector) {
+		step := sqlgraph.NewStep(
+			sqlgraph.From(Table, FieldID),
+			sqlgraph.To(MaintenanceEntriesInverseTable, FieldID),
+			sqlgraph.Edge(sqlgraph.O2M, false, MaintenanceEntriesTable, MaintenanceEntriesColumn),
+		)
+		sqlgraph.HasNeighborsWith(s, step, func(s *sql.Selector) {
+			for _, p := range preds {
+				p(s)
+			}
+		})
+	})
+}
+
 // HasAttachments applies the HasEdge predicate on the "attachments" edge.
 func HasAttachments() predicate.Item {
 	return predicate.Item(func(s *sql.Selector) {
diff --git a/backend/internal/data/ent/item_create.go b/backend/internal/data/ent/item_create.go
index 97938f9..4a7c5aa 100644
--- a/backend/internal/data/ent/item_create.go
+++ b/backend/internal/data/ent/item_create.go
@@ -17,6 +17,7 @@ import (
 	"github.com/hay-kot/homebox/backend/internal/data/ent/itemfield"
 	"github.com/hay-kot/homebox/backend/internal/data/ent/label"
 	"github.com/hay-kot/homebox/backend/internal/data/ent/location"
+	"github.com/hay-kot/homebox/backend/internal/data/ent/maintenanceentry"
 )
 
 // ItemCreate is the builder for creating a Item entity.
@@ -448,6 +449,21 @@ func (ic *ItemCreate) AddFields(i ...*ItemField) *ItemCreate {
 	return ic.AddFieldIDs(ids...)
 }
 
+// AddMaintenanceEntryIDs adds the "maintenance_entries" edge to the MaintenanceEntry entity by IDs.
+func (ic *ItemCreate) AddMaintenanceEntryIDs(ids ...uuid.UUID) *ItemCreate {
+	ic.mutation.AddMaintenanceEntryIDs(ids...)
+	return ic
+}
+
+// AddMaintenanceEntries adds the "maintenance_entries" edges to the MaintenanceEntry entity.
+func (ic *ItemCreate) AddMaintenanceEntries(m ...*MaintenanceEntry) *ItemCreate {
+	ids := make([]uuid.UUID, len(m))
+	for i := range m {
+		ids[i] = m[i].ID
+	}
+	return ic.AddMaintenanceEntryIDs(ids...)
+}
+
 // AddAttachmentIDs adds the "attachments" edge to the Attachment entity by IDs.
 func (ic *ItemCreate) AddAttachmentIDs(ids ...uuid.UUID) *ItemCreate {
 	ic.mutation.AddAttachmentIDs(ids...)
@@ -907,6 +923,25 @@ func (ic *ItemCreate) createSpec() (*Item, *sqlgraph.CreateSpec) {
 		}
 		_spec.Edges = append(_spec.Edges, edge)
 	}
+	if nodes := ic.mutation.MaintenanceEntriesIDs(); len(nodes) > 0 {
+		edge := &sqlgraph.EdgeSpec{
+			Rel:     sqlgraph.O2M,
+			Inverse: false,
+			Table:   item.MaintenanceEntriesTable,
+			Columns: []string{item.MaintenanceEntriesColumn},
+			Bidi:    false,
+			Target: &sqlgraph.EdgeTarget{
+				IDSpec: &sqlgraph.FieldSpec{
+					Type:   field.TypeUUID,
+					Column: maintenanceentry.FieldID,
+				},
+			},
+		}
+		for _, k := range nodes {
+			edge.Target.Nodes = append(edge.Target.Nodes, k)
+		}
+		_spec.Edges = append(_spec.Edges, edge)
+	}
 	if nodes := ic.mutation.AttachmentsIDs(); len(nodes) > 0 {
 		edge := &sqlgraph.EdgeSpec{
 			Rel:     sqlgraph.O2M,
diff --git a/backend/internal/data/ent/item_query.go b/backend/internal/data/ent/item_query.go
index 0040a42..8891469 100644
--- a/backend/internal/data/ent/item_query.go
+++ b/backend/internal/data/ent/item_query.go
@@ -18,26 +18,28 @@ import (
 	"github.com/hay-kot/homebox/backend/internal/data/ent/itemfield"
 	"github.com/hay-kot/homebox/backend/internal/data/ent/label"
 	"github.com/hay-kot/homebox/backend/internal/data/ent/location"
+	"github.com/hay-kot/homebox/backend/internal/data/ent/maintenanceentry"
 	"github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
 )
 
 // ItemQuery is the builder for querying Item entities.
 type ItemQuery struct {
 	config
-	limit           *int
-	offset          *int
-	unique          *bool
-	order           []OrderFunc
-	fields          []string
-	predicates      []predicate.Item
-	withParent      *ItemQuery
-	withChildren    *ItemQuery
-	withGroup       *GroupQuery
-	withLabel       *LabelQuery
-	withLocation    *LocationQuery
-	withFields      *ItemFieldQuery
-	withAttachments *AttachmentQuery
-	withFKs         bool
+	limit                  *int
+	offset                 *int
+	unique                 *bool
+	order                  []OrderFunc
+	fields                 []string
+	predicates             []predicate.Item
+	withParent             *ItemQuery
+	withChildren           *ItemQuery
+	withGroup              *GroupQuery
+	withLabel              *LabelQuery
+	withLocation           *LocationQuery
+	withFields             *ItemFieldQuery
+	withMaintenanceEntries *MaintenanceEntryQuery
+	withAttachments        *AttachmentQuery
+	withFKs                bool
 	// intermediate query (i.e. traversal path).
 	sql  *sql.Selector
 	path func(context.Context) (*sql.Selector, error)
@@ -206,6 +208,28 @@ func (iq *ItemQuery) QueryFields() *ItemFieldQuery {
 	return query
 }
 
+// QueryMaintenanceEntries chains the current query on the "maintenance_entries" edge.
+func (iq *ItemQuery) QueryMaintenanceEntries() *MaintenanceEntryQuery {
+	query := &MaintenanceEntryQuery{config: iq.config}
+	query.path = func(ctx context.Context) (fromU *sql.Selector, err error) {
+		if err := iq.prepareQuery(ctx); err != nil {
+			return nil, err
+		}
+		selector := iq.sqlQuery(ctx)
+		if err := selector.Err(); err != nil {
+			return nil, err
+		}
+		step := sqlgraph.NewStep(
+			sqlgraph.From(item.Table, item.FieldID, selector),
+			sqlgraph.To(maintenanceentry.Table, maintenanceentry.FieldID),
+			sqlgraph.Edge(sqlgraph.O2M, false, item.MaintenanceEntriesTable, item.MaintenanceEntriesColumn),
+		)
+		fromU = sqlgraph.SetNeighbors(iq.driver.Dialect(), step)
+		return fromU, nil
+	}
+	return query
+}
+
 // QueryAttachments chains the current query on the "attachments" edge.
 func (iq *ItemQuery) QueryAttachments() *AttachmentQuery {
 	query := &AttachmentQuery{config: iq.config}
@@ -404,18 +428,19 @@ func (iq *ItemQuery) Clone() *ItemQuery {
 		return nil
 	}
 	return &ItemQuery{
-		config:          iq.config,
-		limit:           iq.limit,
-		offset:          iq.offset,
-		order:           append([]OrderFunc{}, iq.order...),
-		predicates:      append([]predicate.Item{}, iq.predicates...),
-		withParent:      iq.withParent.Clone(),
-		withChildren:    iq.withChildren.Clone(),
-		withGroup:       iq.withGroup.Clone(),
-		withLabel:       iq.withLabel.Clone(),
-		withLocation:    iq.withLocation.Clone(),
-		withFields:      iq.withFields.Clone(),
-		withAttachments: iq.withAttachments.Clone(),
+		config:                 iq.config,
+		limit:                  iq.limit,
+		offset:                 iq.offset,
+		order:                  append([]OrderFunc{}, iq.order...),
+		predicates:             append([]predicate.Item{}, iq.predicates...),
+		withParent:             iq.withParent.Clone(),
+		withChildren:           iq.withChildren.Clone(),
+		withGroup:              iq.withGroup.Clone(),
+		withLabel:              iq.withLabel.Clone(),
+		withLocation:           iq.withLocation.Clone(),
+		withFields:             iq.withFields.Clone(),
+		withMaintenanceEntries: iq.withMaintenanceEntries.Clone(),
+		withAttachments:        iq.withAttachments.Clone(),
 		// clone intermediate query.
 		sql:    iq.sql.Clone(),
 		path:   iq.path,
@@ -489,6 +514,17 @@ func (iq *ItemQuery) WithFields(opts ...func(*ItemFieldQuery)) *ItemQuery {
 	return iq
 }
 
+// WithMaintenanceEntries tells the query-builder to eager-load the nodes that are connected to
+// the "maintenance_entries" edge. The optional arguments are used to configure the query builder of the edge.
+func (iq *ItemQuery) WithMaintenanceEntries(opts ...func(*MaintenanceEntryQuery)) *ItemQuery {
+	query := &MaintenanceEntryQuery{config: iq.config}
+	for _, opt := range opts {
+		opt(query)
+	}
+	iq.withMaintenanceEntries = query
+	return iq
+}
+
 // WithAttachments tells the query-builder to eager-load the nodes that are connected to
 // the "attachments" edge. The optional arguments are used to configure the query builder of the edge.
 func (iq *ItemQuery) WithAttachments(opts ...func(*AttachmentQuery)) *ItemQuery {
@@ -574,13 +610,14 @@ func (iq *ItemQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*Item, e
 		nodes       = []*Item{}
 		withFKs     = iq.withFKs
 		_spec       = iq.querySpec()
-		loadedTypes = [7]bool{
+		loadedTypes = [8]bool{
 			iq.withParent != nil,
 			iq.withChildren != nil,
 			iq.withGroup != nil,
 			iq.withLabel != nil,
 			iq.withLocation != nil,
 			iq.withFields != nil,
+			iq.withMaintenanceEntries != nil,
 			iq.withAttachments != nil,
 		}
 	)
@@ -647,6 +684,13 @@ func (iq *ItemQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*Item, e
 			return nil, err
 		}
 	}
+	if query := iq.withMaintenanceEntries; query != nil {
+		if err := iq.loadMaintenanceEntries(ctx, query, nodes,
+			func(n *Item) { n.Edges.MaintenanceEntries = []*MaintenanceEntry{} },
+			func(n *Item, e *MaintenanceEntry) { n.Edges.MaintenanceEntries = append(n.Edges.MaintenanceEntries, e) }); err != nil {
+			return nil, err
+		}
+	}
 	if query := iq.withAttachments; query != nil {
 		if err := iq.loadAttachments(ctx, query, nodes,
 			func(n *Item) { n.Edges.Attachments = []*Attachment{} },
@@ -864,6 +908,33 @@ func (iq *ItemQuery) loadFields(ctx context.Context, query *ItemFieldQuery, node
 	}
 	return nil
 }
+func (iq *ItemQuery) loadMaintenanceEntries(ctx context.Context, query *MaintenanceEntryQuery, nodes []*Item, init func(*Item), assign func(*Item, *MaintenanceEntry)) error {
+	fks := make([]driver.Value, 0, len(nodes))
+	nodeids := make(map[uuid.UUID]*Item)
+	for i := range nodes {
+		fks = append(fks, nodes[i].ID)
+		nodeids[nodes[i].ID] = nodes[i]
+		if init != nil {
+			init(nodes[i])
+		}
+	}
+	query.Where(predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.InValues(item.MaintenanceEntriesColumn, fks...))
+	}))
+	neighbors, err := query.All(ctx)
+	if err != nil {
+		return err
+	}
+	for _, n := range neighbors {
+		fk := n.ItemID
+		node, ok := nodeids[fk]
+		if !ok {
+			return fmt.Errorf(`unexpected foreign-key "item_id" returned %v for node %v`, fk, n.ID)
+		}
+		assign(node, n)
+	}
+	return nil
+}
 func (iq *ItemQuery) loadAttachments(ctx context.Context, query *AttachmentQuery, nodes []*Item, init func(*Item), assign func(*Item, *Attachment)) error {
 	fks := make([]driver.Value, 0, len(nodes))
 	nodeids := make(map[uuid.UUID]*Item)
diff --git a/backend/internal/data/ent/item_update.go b/backend/internal/data/ent/item_update.go
index 236f363..b7a9b79 100644
--- a/backend/internal/data/ent/item_update.go
+++ b/backend/internal/data/ent/item_update.go
@@ -18,6 +18,7 @@ import (
 	"github.com/hay-kot/homebox/backend/internal/data/ent/itemfield"
 	"github.com/hay-kot/homebox/backend/internal/data/ent/label"
 	"github.com/hay-kot/homebox/backend/internal/data/ent/location"
+	"github.com/hay-kot/homebox/backend/internal/data/ent/maintenanceentry"
 	"github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
 )
 
@@ -506,6 +507,21 @@ func (iu *ItemUpdate) AddFields(i ...*ItemField) *ItemUpdate {
 	return iu.AddFieldIDs(ids...)
 }
 
+// AddMaintenanceEntryIDs adds the "maintenance_entries" edge to the MaintenanceEntry entity by IDs.
+func (iu *ItemUpdate) AddMaintenanceEntryIDs(ids ...uuid.UUID) *ItemUpdate {
+	iu.mutation.AddMaintenanceEntryIDs(ids...)
+	return iu
+}
+
+// AddMaintenanceEntries adds the "maintenance_entries" edges to the MaintenanceEntry entity.
+func (iu *ItemUpdate) AddMaintenanceEntries(m ...*MaintenanceEntry) *ItemUpdate {
+	ids := make([]uuid.UUID, len(m))
+	for i := range m {
+		ids[i] = m[i].ID
+	}
+	return iu.AddMaintenanceEntryIDs(ids...)
+}
+
 // AddAttachmentIDs adds the "attachments" edge to the Attachment entity by IDs.
 func (iu *ItemUpdate) AddAttachmentIDs(ids ...uuid.UUID) *ItemUpdate {
 	iu.mutation.AddAttachmentIDs(ids...)
@@ -607,6 +623,27 @@ func (iu *ItemUpdate) RemoveFields(i ...*ItemField) *ItemUpdate {
 	return iu.RemoveFieldIDs(ids...)
 }
 
+// ClearMaintenanceEntries clears all "maintenance_entries" edges to the MaintenanceEntry entity.
+func (iu *ItemUpdate) ClearMaintenanceEntries() *ItemUpdate {
+	iu.mutation.ClearMaintenanceEntries()
+	return iu
+}
+
+// RemoveMaintenanceEntryIDs removes the "maintenance_entries" edge to MaintenanceEntry entities by IDs.
+func (iu *ItemUpdate) RemoveMaintenanceEntryIDs(ids ...uuid.UUID) *ItemUpdate {
+	iu.mutation.RemoveMaintenanceEntryIDs(ids...)
+	return iu
+}
+
+// RemoveMaintenanceEntries removes "maintenance_entries" edges to MaintenanceEntry entities.
+func (iu *ItemUpdate) RemoveMaintenanceEntries(m ...*MaintenanceEntry) *ItemUpdate {
+	ids := make([]uuid.UUID, len(m))
+	for i := range m {
+		ids[i] = m[i].ID
+	}
+	return iu.RemoveMaintenanceEntryIDs(ids...)
+}
+
 // ClearAttachments clears all "attachments" edges to the Attachment entity.
 func (iu *ItemUpdate) ClearAttachments() *ItemUpdate {
 	iu.mutation.ClearAttachments()
@@ -1144,6 +1181,60 @@ func (iu *ItemUpdate) sqlSave(ctx context.Context) (n int, err error) {
 		}
 		_spec.Edges.Add = append(_spec.Edges.Add, edge)
 	}
+	if iu.mutation.MaintenanceEntriesCleared() {
+		edge := &sqlgraph.EdgeSpec{
+			Rel:     sqlgraph.O2M,
+			Inverse: false,
+			Table:   item.MaintenanceEntriesTable,
+			Columns: []string{item.MaintenanceEntriesColumn},
+			Bidi:    false,
+			Target: &sqlgraph.EdgeTarget{
+				IDSpec: &sqlgraph.FieldSpec{
+					Type:   field.TypeUUID,
+					Column: maintenanceentry.FieldID,
+				},
+			},
+		}
+		_spec.Edges.Clear = append(_spec.Edges.Clear, edge)
+	}
+	if nodes := iu.mutation.RemovedMaintenanceEntriesIDs(); len(nodes) > 0 && !iu.mutation.MaintenanceEntriesCleared() {
+		edge := &sqlgraph.EdgeSpec{
+			Rel:     sqlgraph.O2M,
+			Inverse: false,
+			Table:   item.MaintenanceEntriesTable,
+			Columns: []string{item.MaintenanceEntriesColumn},
+			Bidi:    false,
+			Target: &sqlgraph.EdgeTarget{
+				IDSpec: &sqlgraph.FieldSpec{
+					Type:   field.TypeUUID,
+					Column: maintenanceentry.FieldID,
+				},
+			},
+		}
+		for _, k := range nodes {
+			edge.Target.Nodes = append(edge.Target.Nodes, k)
+		}
+		_spec.Edges.Clear = append(_spec.Edges.Clear, edge)
+	}
+	if nodes := iu.mutation.MaintenanceEntriesIDs(); len(nodes) > 0 {
+		edge := &sqlgraph.EdgeSpec{
+			Rel:     sqlgraph.O2M,
+			Inverse: false,
+			Table:   item.MaintenanceEntriesTable,
+			Columns: []string{item.MaintenanceEntriesColumn},
+			Bidi:    false,
+			Target: &sqlgraph.EdgeTarget{
+				IDSpec: &sqlgraph.FieldSpec{
+					Type:   field.TypeUUID,
+					Column: maintenanceentry.FieldID,
+				},
+			},
+		}
+		for _, k := range nodes {
+			edge.Target.Nodes = append(edge.Target.Nodes, k)
+		}
+		_spec.Edges.Add = append(_spec.Edges.Add, edge)
+	}
 	if iu.mutation.AttachmentsCleared() {
 		edge := &sqlgraph.EdgeSpec{
 			Rel:     sqlgraph.O2M,
@@ -1689,6 +1780,21 @@ func (iuo *ItemUpdateOne) AddFields(i ...*ItemField) *ItemUpdateOne {
 	return iuo.AddFieldIDs(ids...)
 }
 
+// AddMaintenanceEntryIDs adds the "maintenance_entries" edge to the MaintenanceEntry entity by IDs.
+func (iuo *ItemUpdateOne) AddMaintenanceEntryIDs(ids ...uuid.UUID) *ItemUpdateOne {
+	iuo.mutation.AddMaintenanceEntryIDs(ids...)
+	return iuo
+}
+
+// AddMaintenanceEntries adds the "maintenance_entries" edges to the MaintenanceEntry entity.
+func (iuo *ItemUpdateOne) AddMaintenanceEntries(m ...*MaintenanceEntry) *ItemUpdateOne {
+	ids := make([]uuid.UUID, len(m))
+	for i := range m {
+		ids[i] = m[i].ID
+	}
+	return iuo.AddMaintenanceEntryIDs(ids...)
+}
+
 // AddAttachmentIDs adds the "attachments" edge to the Attachment entity by IDs.
 func (iuo *ItemUpdateOne) AddAttachmentIDs(ids ...uuid.UUID) *ItemUpdateOne {
 	iuo.mutation.AddAttachmentIDs(ids...)
@@ -1790,6 +1896,27 @@ func (iuo *ItemUpdateOne) RemoveFields(i ...*ItemField) *ItemUpdateOne {
 	return iuo.RemoveFieldIDs(ids...)
 }
 
+// ClearMaintenanceEntries clears all "maintenance_entries" edges to the MaintenanceEntry entity.
+func (iuo *ItemUpdateOne) ClearMaintenanceEntries() *ItemUpdateOne {
+	iuo.mutation.ClearMaintenanceEntries()
+	return iuo
+}
+
+// RemoveMaintenanceEntryIDs removes the "maintenance_entries" edge to MaintenanceEntry entities by IDs.
+func (iuo *ItemUpdateOne) RemoveMaintenanceEntryIDs(ids ...uuid.UUID) *ItemUpdateOne {
+	iuo.mutation.RemoveMaintenanceEntryIDs(ids...)
+	return iuo
+}
+
+// RemoveMaintenanceEntries removes "maintenance_entries" edges to MaintenanceEntry entities.
+func (iuo *ItemUpdateOne) RemoveMaintenanceEntries(m ...*MaintenanceEntry) *ItemUpdateOne {
+	ids := make([]uuid.UUID, len(m))
+	for i := range m {
+		ids[i] = m[i].ID
+	}
+	return iuo.RemoveMaintenanceEntryIDs(ids...)
+}
+
 // ClearAttachments clears all "attachments" edges to the Attachment entity.
 func (iuo *ItemUpdateOne) ClearAttachments() *ItemUpdateOne {
 	iuo.mutation.ClearAttachments()
@@ -2357,6 +2484,60 @@ func (iuo *ItemUpdateOne) sqlSave(ctx context.Context) (_node *Item, err error)
 		}
 		_spec.Edges.Add = append(_spec.Edges.Add, edge)
 	}
+	if iuo.mutation.MaintenanceEntriesCleared() {
+		edge := &sqlgraph.EdgeSpec{
+			Rel:     sqlgraph.O2M,
+			Inverse: false,
+			Table:   item.MaintenanceEntriesTable,
+			Columns: []string{item.MaintenanceEntriesColumn},
+			Bidi:    false,
+			Target: &sqlgraph.EdgeTarget{
+				IDSpec: &sqlgraph.FieldSpec{
+					Type:   field.TypeUUID,
+					Column: maintenanceentry.FieldID,
+				},
+			},
+		}
+		_spec.Edges.Clear = append(_spec.Edges.Clear, edge)
+	}
+	if nodes := iuo.mutation.RemovedMaintenanceEntriesIDs(); len(nodes) > 0 && !iuo.mutation.MaintenanceEntriesCleared() {
+		edge := &sqlgraph.EdgeSpec{
+			Rel:     sqlgraph.O2M,
+			Inverse: false,
+			Table:   item.MaintenanceEntriesTable,
+			Columns: []string{item.MaintenanceEntriesColumn},
+			Bidi:    false,
+			Target: &sqlgraph.EdgeTarget{
+				IDSpec: &sqlgraph.FieldSpec{
+					Type:   field.TypeUUID,
+					Column: maintenanceentry.FieldID,
+				},
+			},
+		}
+		for _, k := range nodes {
+			edge.Target.Nodes = append(edge.Target.Nodes, k)
+		}
+		_spec.Edges.Clear = append(_spec.Edges.Clear, edge)
+	}
+	if nodes := iuo.mutation.MaintenanceEntriesIDs(); len(nodes) > 0 {
+		edge := &sqlgraph.EdgeSpec{
+			Rel:     sqlgraph.O2M,
+			Inverse: false,
+			Table:   item.MaintenanceEntriesTable,
+			Columns: []string{item.MaintenanceEntriesColumn},
+			Bidi:    false,
+			Target: &sqlgraph.EdgeTarget{
+				IDSpec: &sqlgraph.FieldSpec{
+					Type:   field.TypeUUID,
+					Column: maintenanceentry.FieldID,
+				},
+			},
+		}
+		for _, k := range nodes {
+			edge.Target.Nodes = append(edge.Target.Nodes, k)
+		}
+		_spec.Edges.Add = append(_spec.Edges.Add, edge)
+	}
 	if iuo.mutation.AttachmentsCleared() {
 		edge := &sqlgraph.EdgeSpec{
 			Rel:     sqlgraph.O2M,
diff --git a/backend/internal/data/ent/maintenanceentry.go b/backend/internal/data/ent/maintenanceentry.go
new file mode 100644
index 0000000..4d0b078
--- /dev/null
+++ b/backend/internal/data/ent/maintenanceentry.go
@@ -0,0 +1,202 @@
+// Code generated by ent, DO NOT EDIT.
+
+package ent
+
+import (
+	"fmt"
+	"strings"
+	"time"
+
+	"entgo.io/ent/dialect/sql"
+	"github.com/google/uuid"
+	"github.com/hay-kot/homebox/backend/internal/data/ent/item"
+	"github.com/hay-kot/homebox/backend/internal/data/ent/maintenanceentry"
+)
+
+// MaintenanceEntry is the model entity for the MaintenanceEntry schema.
+type MaintenanceEntry struct {
+	config `json:"-"`
+	// ID of the ent.
+	ID uuid.UUID `json:"id,omitempty"`
+	// CreatedAt holds the value of the "created_at" field.
+	CreatedAt time.Time `json:"created_at,omitempty"`
+	// UpdatedAt holds the value of the "updated_at" field.
+	UpdatedAt time.Time `json:"updated_at,omitempty"`
+	// ItemID holds the value of the "item_id" field.
+	ItemID uuid.UUID `json:"item_id,omitempty"`
+	// Date holds the value of the "date" field.
+	Date time.Time `json:"date,omitempty"`
+	// Name holds the value of the "name" field.
+	Name string `json:"name,omitempty"`
+	// Description holds the value of the "description" field.
+	Description string `json:"description,omitempty"`
+	// Cost holds the value of the "cost" field.
+	Cost float64 `json:"cost,omitempty"`
+	// Edges holds the relations/edges for other nodes in the graph.
+	// The values are being populated by the MaintenanceEntryQuery when eager-loading is set.
+	Edges MaintenanceEntryEdges `json:"edges"`
+}
+
+// MaintenanceEntryEdges holds the relations/edges for other nodes in the graph.
+type MaintenanceEntryEdges struct {
+	// Item holds the value of the item edge.
+	Item *Item `json:"item,omitempty"`
+	// loadedTypes holds the information for reporting if a
+	// type was loaded (or requested) in eager-loading or not.
+	loadedTypes [1]bool
+}
+
+// ItemOrErr returns the Item value or an error if the edge
+// was not loaded in eager-loading, or loaded but was not found.
+func (e MaintenanceEntryEdges) ItemOrErr() (*Item, error) {
+	if e.loadedTypes[0] {
+		if e.Item == nil {
+			// Edge was loaded but was not found.
+			return nil, &NotFoundError{label: item.Label}
+		}
+		return e.Item, nil
+	}
+	return nil, &NotLoadedError{edge: "item"}
+}
+
+// scanValues returns the types for scanning values from sql.Rows.
+func (*MaintenanceEntry) scanValues(columns []string) ([]any, error) {
+	values := make([]any, len(columns))
+	for i := range columns {
+		switch columns[i] {
+		case maintenanceentry.FieldCost:
+			values[i] = new(sql.NullFloat64)
+		case maintenanceentry.FieldName, maintenanceentry.FieldDescription:
+			values[i] = new(sql.NullString)
+		case maintenanceentry.FieldCreatedAt, maintenanceentry.FieldUpdatedAt, maintenanceentry.FieldDate:
+			values[i] = new(sql.NullTime)
+		case maintenanceentry.FieldID, maintenanceentry.FieldItemID:
+			values[i] = new(uuid.UUID)
+		default:
+			return nil, fmt.Errorf("unexpected column %q for type MaintenanceEntry", columns[i])
+		}
+	}
+	return values, nil
+}
+
+// assignValues assigns the values that were returned from sql.Rows (after scanning)
+// to the MaintenanceEntry fields.
+func (me *MaintenanceEntry) assignValues(columns []string, values []any) error {
+	if m, n := len(values), len(columns); m < n {
+		return fmt.Errorf("mismatch number of scan values: %d != %d", m, n)
+	}
+	for i := range columns {
+		switch columns[i] {
+		case maintenanceentry.FieldID:
+			if value, ok := values[i].(*uuid.UUID); !ok {
+				return fmt.Errorf("unexpected type %T for field id", values[i])
+			} else if value != nil {
+				me.ID = *value
+			}
+		case maintenanceentry.FieldCreatedAt:
+			if value, ok := values[i].(*sql.NullTime); !ok {
+				return fmt.Errorf("unexpected type %T for field created_at", values[i])
+			} else if value.Valid {
+				me.CreatedAt = value.Time
+			}
+		case maintenanceentry.FieldUpdatedAt:
+			if value, ok := values[i].(*sql.NullTime); !ok {
+				return fmt.Errorf("unexpected type %T for field updated_at", values[i])
+			} else if value.Valid {
+				me.UpdatedAt = value.Time
+			}
+		case maintenanceentry.FieldItemID:
+			if value, ok := values[i].(*uuid.UUID); !ok {
+				return fmt.Errorf("unexpected type %T for field item_id", values[i])
+			} else if value != nil {
+				me.ItemID = *value
+			}
+		case maintenanceentry.FieldDate:
+			if value, ok := values[i].(*sql.NullTime); !ok {
+				return fmt.Errorf("unexpected type %T for field date", values[i])
+			} else if value.Valid {
+				me.Date = value.Time
+			}
+		case maintenanceentry.FieldName:
+			if value, ok := values[i].(*sql.NullString); !ok {
+				return fmt.Errorf("unexpected type %T for field name", values[i])
+			} else if value.Valid {
+				me.Name = value.String
+			}
+		case maintenanceentry.FieldDescription:
+			if value, ok := values[i].(*sql.NullString); !ok {
+				return fmt.Errorf("unexpected type %T for field description", values[i])
+			} else if value.Valid {
+				me.Description = value.String
+			}
+		case maintenanceentry.FieldCost:
+			if value, ok := values[i].(*sql.NullFloat64); !ok {
+				return fmt.Errorf("unexpected type %T for field cost", values[i])
+			} else if value.Valid {
+				me.Cost = value.Float64
+			}
+		}
+	}
+	return nil
+}
+
+// QueryItem queries the "item" edge of the MaintenanceEntry entity.
+func (me *MaintenanceEntry) QueryItem() *ItemQuery {
+	return (&MaintenanceEntryClient{config: me.config}).QueryItem(me)
+}
+
+// Update returns a builder for updating this MaintenanceEntry.
+// Note that you need to call MaintenanceEntry.Unwrap() before calling this method if this MaintenanceEntry
+// was returned from a transaction, and the transaction was committed or rolled back.
+func (me *MaintenanceEntry) Update() *MaintenanceEntryUpdateOne {
+	return (&MaintenanceEntryClient{config: me.config}).UpdateOne(me)
+}
+
+// Unwrap unwraps the MaintenanceEntry entity that was returned from a transaction after it was closed,
+// so that all future queries will be executed through the driver which created the transaction.
+func (me *MaintenanceEntry) Unwrap() *MaintenanceEntry {
+	_tx, ok := me.config.driver.(*txDriver)
+	if !ok {
+		panic("ent: MaintenanceEntry is not a transactional entity")
+	}
+	me.config.driver = _tx.drv
+	return me
+}
+
+// String implements the fmt.Stringer.
+func (me *MaintenanceEntry) String() string {
+	var builder strings.Builder
+	builder.WriteString("MaintenanceEntry(")
+	builder.WriteString(fmt.Sprintf("id=%v, ", me.ID))
+	builder.WriteString("created_at=")
+	builder.WriteString(me.CreatedAt.Format(time.ANSIC))
+	builder.WriteString(", ")
+	builder.WriteString("updated_at=")
+	builder.WriteString(me.UpdatedAt.Format(time.ANSIC))
+	builder.WriteString(", ")
+	builder.WriteString("item_id=")
+	builder.WriteString(fmt.Sprintf("%v", me.ItemID))
+	builder.WriteString(", ")
+	builder.WriteString("date=")
+	builder.WriteString(me.Date.Format(time.ANSIC))
+	builder.WriteString(", ")
+	builder.WriteString("name=")
+	builder.WriteString(me.Name)
+	builder.WriteString(", ")
+	builder.WriteString("description=")
+	builder.WriteString(me.Description)
+	builder.WriteString(", ")
+	builder.WriteString("cost=")
+	builder.WriteString(fmt.Sprintf("%v", me.Cost))
+	builder.WriteByte(')')
+	return builder.String()
+}
+
+// MaintenanceEntries is a parsable slice of MaintenanceEntry.
+type MaintenanceEntries []*MaintenanceEntry
+
+func (me MaintenanceEntries) config(cfg config) {
+	for _i := range me {
+		me[_i].config = cfg
+	}
+}
diff --git a/backend/internal/data/ent/maintenanceentry/maintenanceentry.go b/backend/internal/data/ent/maintenanceentry/maintenanceentry.go
new file mode 100644
index 0000000..c1dcffc
--- /dev/null
+++ b/backend/internal/data/ent/maintenanceentry/maintenanceentry.go
@@ -0,0 +1,82 @@
+// Code generated by ent, DO NOT EDIT.
+
+package maintenanceentry
+
+import (
+	"time"
+
+	"github.com/google/uuid"
+)
+
+const (
+	// Label holds the string label denoting the maintenanceentry type in the database.
+	Label = "maintenance_entry"
+	// FieldID holds the string denoting the id field in the database.
+	FieldID = "id"
+	// FieldCreatedAt holds the string denoting the created_at field in the database.
+	FieldCreatedAt = "created_at"
+	// FieldUpdatedAt holds the string denoting the updated_at field in the database.
+	FieldUpdatedAt = "updated_at"
+	// FieldItemID holds the string denoting the item_id field in the database.
+	FieldItemID = "item_id"
+	// FieldDate holds the string denoting the date field in the database.
+	FieldDate = "date"
+	// FieldName holds the string denoting the name field in the database.
+	FieldName = "name"
+	// FieldDescription holds the string denoting the description field in the database.
+	FieldDescription = "description"
+	// FieldCost holds the string denoting the cost field in the database.
+	FieldCost = "cost"
+	// EdgeItem holds the string denoting the item edge name in mutations.
+	EdgeItem = "item"
+	// Table holds the table name of the maintenanceentry in the database.
+	Table = "maintenance_entries"
+	// ItemTable is the table that holds the item relation/edge.
+	ItemTable = "maintenance_entries"
+	// ItemInverseTable is the table name for the Item entity.
+	// It exists in this package in order to avoid circular dependency with the "item" package.
+	ItemInverseTable = "items"
+	// ItemColumn is the table column denoting the item relation/edge.
+	ItemColumn = "item_id"
+)
+
+// Columns holds all SQL columns for maintenanceentry fields.
+var Columns = []string{
+	FieldID,
+	FieldCreatedAt,
+	FieldUpdatedAt,
+	FieldItemID,
+	FieldDate,
+	FieldName,
+	FieldDescription,
+	FieldCost,
+}
+
+// ValidColumn reports if the column name is valid (part of the table columns).
+func ValidColumn(column string) bool {
+	for i := range Columns {
+		if column == Columns[i] {
+			return true
+		}
+	}
+	return false
+}
+
+var (
+	// DefaultCreatedAt holds the default value on creation for the "created_at" field.
+	DefaultCreatedAt func() time.Time
+	// DefaultUpdatedAt holds the default value on creation for the "updated_at" field.
+	DefaultUpdatedAt func() time.Time
+	// UpdateDefaultUpdatedAt holds the default value on update for the "updated_at" field.
+	UpdateDefaultUpdatedAt func() time.Time
+	// DefaultDate holds the default value on creation for the "date" field.
+	DefaultDate func() time.Time
+	// NameValidator is a validator for the "name" field. It is called by the builders before save.
+	NameValidator func(string) error
+	// DescriptionValidator is a validator for the "description" field. It is called by the builders before save.
+	DescriptionValidator func(string) error
+	// DefaultCost holds the default value on creation for the "cost" field.
+	DefaultCost float64
+	// DefaultID holds the default value on creation for the "id" field.
+	DefaultID func() uuid.UUID
+)
diff --git a/backend/internal/data/ent/maintenanceentry/where.go b/backend/internal/data/ent/maintenanceentry/where.go
new file mode 100644
index 0000000..02d9633
--- /dev/null
+++ b/backend/internal/data/ent/maintenanceentry/where.go
@@ -0,0 +1,696 @@
+// Code generated by ent, DO NOT EDIT.
+
+package maintenanceentry
+
+import (
+	"time"
+
+	"entgo.io/ent/dialect/sql"
+	"entgo.io/ent/dialect/sql/sqlgraph"
+	"github.com/google/uuid"
+	"github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
+)
+
+// ID filters vertices based on their ID field.
+func ID(id uuid.UUID) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.EQ(s.C(FieldID), id))
+	})
+}
+
+// IDEQ applies the EQ predicate on the ID field.
+func IDEQ(id uuid.UUID) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.EQ(s.C(FieldID), id))
+	})
+}
+
+// IDNEQ applies the NEQ predicate on the ID field.
+func IDNEQ(id uuid.UUID) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.NEQ(s.C(FieldID), id))
+	})
+}
+
+// IDIn applies the In predicate on the ID field.
+func IDIn(ids ...uuid.UUID) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		v := make([]any, len(ids))
+		for i := range v {
+			v[i] = ids[i]
+		}
+		s.Where(sql.In(s.C(FieldID), v...))
+	})
+}
+
+// IDNotIn applies the NotIn predicate on the ID field.
+func IDNotIn(ids ...uuid.UUID) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		v := make([]any, len(ids))
+		for i := range v {
+			v[i] = ids[i]
+		}
+		s.Where(sql.NotIn(s.C(FieldID), v...))
+	})
+}
+
+// IDGT applies the GT predicate on the ID field.
+func IDGT(id uuid.UUID) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.GT(s.C(FieldID), id))
+	})
+}
+
+// IDGTE applies the GTE predicate on the ID field.
+func IDGTE(id uuid.UUID) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.GTE(s.C(FieldID), id))
+	})
+}
+
+// IDLT applies the LT predicate on the ID field.
+func IDLT(id uuid.UUID) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.LT(s.C(FieldID), id))
+	})
+}
+
+// IDLTE applies the LTE predicate on the ID field.
+func IDLTE(id uuid.UUID) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.LTE(s.C(FieldID), id))
+	})
+}
+
+// CreatedAt applies equality check predicate on the "created_at" field. It's identical to CreatedAtEQ.
+func CreatedAt(v time.Time) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.EQ(s.C(FieldCreatedAt), v))
+	})
+}
+
+// UpdatedAt applies equality check predicate on the "updated_at" field. It's identical to UpdatedAtEQ.
+func UpdatedAt(v time.Time) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.EQ(s.C(FieldUpdatedAt), v))
+	})
+}
+
+// ItemID applies equality check predicate on the "item_id" field. It's identical to ItemIDEQ.
+func ItemID(v uuid.UUID) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.EQ(s.C(FieldItemID), v))
+	})
+}
+
+// Date applies equality check predicate on the "date" field. It's identical to DateEQ.
+func Date(v time.Time) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.EQ(s.C(FieldDate), v))
+	})
+}
+
+// Name applies equality check predicate on the "name" field. It's identical to NameEQ.
+func Name(v string) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.EQ(s.C(FieldName), v))
+	})
+}
+
+// Description applies equality check predicate on the "description" field. It's identical to DescriptionEQ.
+func Description(v string) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.EQ(s.C(FieldDescription), v))
+	})
+}
+
+// Cost applies equality check predicate on the "cost" field. It's identical to CostEQ.
+func Cost(v float64) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.EQ(s.C(FieldCost), v))
+	})
+}
+
+// CreatedAtEQ applies the EQ predicate on the "created_at" field.
+func CreatedAtEQ(v time.Time) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.EQ(s.C(FieldCreatedAt), v))
+	})
+}
+
+// CreatedAtNEQ applies the NEQ predicate on the "created_at" field.
+func CreatedAtNEQ(v time.Time) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.NEQ(s.C(FieldCreatedAt), v))
+	})
+}
+
+// CreatedAtIn applies the In predicate on the "created_at" field.
+func CreatedAtIn(vs ...time.Time) predicate.MaintenanceEntry {
+	v := make([]any, len(vs))
+	for i := range v {
+		v[i] = vs[i]
+	}
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.In(s.C(FieldCreatedAt), v...))
+	})
+}
+
+// CreatedAtNotIn applies the NotIn predicate on the "created_at" field.
+func CreatedAtNotIn(vs ...time.Time) predicate.MaintenanceEntry {
+	v := make([]any, len(vs))
+	for i := range v {
+		v[i] = vs[i]
+	}
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.NotIn(s.C(FieldCreatedAt), v...))
+	})
+}
+
+// CreatedAtGT applies the GT predicate on the "created_at" field.
+func CreatedAtGT(v time.Time) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.GT(s.C(FieldCreatedAt), v))
+	})
+}
+
+// CreatedAtGTE applies the GTE predicate on the "created_at" field.
+func CreatedAtGTE(v time.Time) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.GTE(s.C(FieldCreatedAt), v))
+	})
+}
+
+// CreatedAtLT applies the LT predicate on the "created_at" field.
+func CreatedAtLT(v time.Time) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.LT(s.C(FieldCreatedAt), v))
+	})
+}
+
+// CreatedAtLTE applies the LTE predicate on the "created_at" field.
+func CreatedAtLTE(v time.Time) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.LTE(s.C(FieldCreatedAt), v))
+	})
+}
+
+// UpdatedAtEQ applies the EQ predicate on the "updated_at" field.
+func UpdatedAtEQ(v time.Time) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.EQ(s.C(FieldUpdatedAt), v))
+	})
+}
+
+// UpdatedAtNEQ applies the NEQ predicate on the "updated_at" field.
+func UpdatedAtNEQ(v time.Time) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.NEQ(s.C(FieldUpdatedAt), v))
+	})
+}
+
+// UpdatedAtIn applies the In predicate on the "updated_at" field.
+func UpdatedAtIn(vs ...time.Time) predicate.MaintenanceEntry {
+	v := make([]any, len(vs))
+	for i := range v {
+		v[i] = vs[i]
+	}
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.In(s.C(FieldUpdatedAt), v...))
+	})
+}
+
+// UpdatedAtNotIn applies the NotIn predicate on the "updated_at" field.
+func UpdatedAtNotIn(vs ...time.Time) predicate.MaintenanceEntry {
+	v := make([]any, len(vs))
+	for i := range v {
+		v[i] = vs[i]
+	}
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.NotIn(s.C(FieldUpdatedAt), v...))
+	})
+}
+
+// UpdatedAtGT applies the GT predicate on the "updated_at" field.
+func UpdatedAtGT(v time.Time) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.GT(s.C(FieldUpdatedAt), v))
+	})
+}
+
+// UpdatedAtGTE applies the GTE predicate on the "updated_at" field.
+func UpdatedAtGTE(v time.Time) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.GTE(s.C(FieldUpdatedAt), v))
+	})
+}
+
+// UpdatedAtLT applies the LT predicate on the "updated_at" field.
+func UpdatedAtLT(v time.Time) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.LT(s.C(FieldUpdatedAt), v))
+	})
+}
+
+// UpdatedAtLTE applies the LTE predicate on the "updated_at" field.
+func UpdatedAtLTE(v time.Time) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.LTE(s.C(FieldUpdatedAt), v))
+	})
+}
+
+// ItemIDEQ applies the EQ predicate on the "item_id" field.
+func ItemIDEQ(v uuid.UUID) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.EQ(s.C(FieldItemID), v))
+	})
+}
+
+// ItemIDNEQ applies the NEQ predicate on the "item_id" field.
+func ItemIDNEQ(v uuid.UUID) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.NEQ(s.C(FieldItemID), v))
+	})
+}
+
+// ItemIDIn applies the In predicate on the "item_id" field.
+func ItemIDIn(vs ...uuid.UUID) predicate.MaintenanceEntry {
+	v := make([]any, len(vs))
+	for i := range v {
+		v[i] = vs[i]
+	}
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.In(s.C(FieldItemID), v...))
+	})
+}
+
+// ItemIDNotIn applies the NotIn predicate on the "item_id" field.
+func ItemIDNotIn(vs ...uuid.UUID) predicate.MaintenanceEntry {
+	v := make([]any, len(vs))
+	for i := range v {
+		v[i] = vs[i]
+	}
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.NotIn(s.C(FieldItemID), v...))
+	})
+}
+
+// DateEQ applies the EQ predicate on the "date" field.
+func DateEQ(v time.Time) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.EQ(s.C(FieldDate), v))
+	})
+}
+
+// DateNEQ applies the NEQ predicate on the "date" field.
+func DateNEQ(v time.Time) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.NEQ(s.C(FieldDate), v))
+	})
+}
+
+// DateIn applies the In predicate on the "date" field.
+func DateIn(vs ...time.Time) predicate.MaintenanceEntry {
+	v := make([]any, len(vs))
+	for i := range v {
+		v[i] = vs[i]
+	}
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.In(s.C(FieldDate), v...))
+	})
+}
+
+// DateNotIn applies the NotIn predicate on the "date" field.
+func DateNotIn(vs ...time.Time) predicate.MaintenanceEntry {
+	v := make([]any, len(vs))
+	for i := range v {
+		v[i] = vs[i]
+	}
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.NotIn(s.C(FieldDate), v...))
+	})
+}
+
+// DateGT applies the GT predicate on the "date" field.
+func DateGT(v time.Time) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.GT(s.C(FieldDate), v))
+	})
+}
+
+// DateGTE applies the GTE predicate on the "date" field.
+func DateGTE(v time.Time) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.GTE(s.C(FieldDate), v))
+	})
+}
+
+// DateLT applies the LT predicate on the "date" field.
+func DateLT(v time.Time) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.LT(s.C(FieldDate), v))
+	})
+}
+
+// DateLTE applies the LTE predicate on the "date" field.
+func DateLTE(v time.Time) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.LTE(s.C(FieldDate), v))
+	})
+}
+
+// NameEQ applies the EQ predicate on the "name" field.
+func NameEQ(v string) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.EQ(s.C(FieldName), v))
+	})
+}
+
+// NameNEQ applies the NEQ predicate on the "name" field.
+func NameNEQ(v string) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.NEQ(s.C(FieldName), v))
+	})
+}
+
+// NameIn applies the In predicate on the "name" field.
+func NameIn(vs ...string) predicate.MaintenanceEntry {
+	v := make([]any, len(vs))
+	for i := range v {
+		v[i] = vs[i]
+	}
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.In(s.C(FieldName), v...))
+	})
+}
+
+// NameNotIn applies the NotIn predicate on the "name" field.
+func NameNotIn(vs ...string) predicate.MaintenanceEntry {
+	v := make([]any, len(vs))
+	for i := range v {
+		v[i] = vs[i]
+	}
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.NotIn(s.C(FieldName), v...))
+	})
+}
+
+// NameGT applies the GT predicate on the "name" field.
+func NameGT(v string) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.GT(s.C(FieldName), v))
+	})
+}
+
+// NameGTE applies the GTE predicate on the "name" field.
+func NameGTE(v string) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.GTE(s.C(FieldName), v))
+	})
+}
+
+// NameLT applies the LT predicate on the "name" field.
+func NameLT(v string) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.LT(s.C(FieldName), v))
+	})
+}
+
+// NameLTE applies the LTE predicate on the "name" field.
+func NameLTE(v string) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.LTE(s.C(FieldName), v))
+	})
+}
+
+// NameContains applies the Contains predicate on the "name" field.
+func NameContains(v string) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.Contains(s.C(FieldName), v))
+	})
+}
+
+// NameHasPrefix applies the HasPrefix predicate on the "name" field.
+func NameHasPrefix(v string) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.HasPrefix(s.C(FieldName), v))
+	})
+}
+
+// NameHasSuffix applies the HasSuffix predicate on the "name" field.
+func NameHasSuffix(v string) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.HasSuffix(s.C(FieldName), v))
+	})
+}
+
+// NameEqualFold applies the EqualFold predicate on the "name" field.
+func NameEqualFold(v string) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.EqualFold(s.C(FieldName), v))
+	})
+}
+
+// NameContainsFold applies the ContainsFold predicate on the "name" field.
+func NameContainsFold(v string) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.ContainsFold(s.C(FieldName), v))
+	})
+}
+
+// DescriptionEQ applies the EQ predicate on the "description" field.
+func DescriptionEQ(v string) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.EQ(s.C(FieldDescription), v))
+	})
+}
+
+// DescriptionNEQ applies the NEQ predicate on the "description" field.
+func DescriptionNEQ(v string) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.NEQ(s.C(FieldDescription), v))
+	})
+}
+
+// DescriptionIn applies the In predicate on the "description" field.
+func DescriptionIn(vs ...string) predicate.MaintenanceEntry {
+	v := make([]any, len(vs))
+	for i := range v {
+		v[i] = vs[i]
+	}
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.In(s.C(FieldDescription), v...))
+	})
+}
+
+// DescriptionNotIn applies the NotIn predicate on the "description" field.
+func DescriptionNotIn(vs ...string) predicate.MaintenanceEntry {
+	v := make([]any, len(vs))
+	for i := range v {
+		v[i] = vs[i]
+	}
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.NotIn(s.C(FieldDescription), v...))
+	})
+}
+
+// DescriptionGT applies the GT predicate on the "description" field.
+func DescriptionGT(v string) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.GT(s.C(FieldDescription), v))
+	})
+}
+
+// DescriptionGTE applies the GTE predicate on the "description" field.
+func DescriptionGTE(v string) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.GTE(s.C(FieldDescription), v))
+	})
+}
+
+// DescriptionLT applies the LT predicate on the "description" field.
+func DescriptionLT(v string) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.LT(s.C(FieldDescription), v))
+	})
+}
+
+// DescriptionLTE applies the LTE predicate on the "description" field.
+func DescriptionLTE(v string) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.LTE(s.C(FieldDescription), v))
+	})
+}
+
+// DescriptionContains applies the Contains predicate on the "description" field.
+func DescriptionContains(v string) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.Contains(s.C(FieldDescription), v))
+	})
+}
+
+// DescriptionHasPrefix applies the HasPrefix predicate on the "description" field.
+func DescriptionHasPrefix(v string) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.HasPrefix(s.C(FieldDescription), v))
+	})
+}
+
+// DescriptionHasSuffix applies the HasSuffix predicate on the "description" field.
+func DescriptionHasSuffix(v string) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.HasSuffix(s.C(FieldDescription), v))
+	})
+}
+
+// DescriptionIsNil applies the IsNil predicate on the "description" field.
+func DescriptionIsNil() predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.IsNull(s.C(FieldDescription)))
+	})
+}
+
+// DescriptionNotNil applies the NotNil predicate on the "description" field.
+func DescriptionNotNil() predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.NotNull(s.C(FieldDescription)))
+	})
+}
+
+// DescriptionEqualFold applies the EqualFold predicate on the "description" field.
+func DescriptionEqualFold(v string) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.EqualFold(s.C(FieldDescription), v))
+	})
+}
+
+// DescriptionContainsFold applies the ContainsFold predicate on the "description" field.
+func DescriptionContainsFold(v string) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.ContainsFold(s.C(FieldDescription), v))
+	})
+}
+
+// CostEQ applies the EQ predicate on the "cost" field.
+func CostEQ(v float64) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.EQ(s.C(FieldCost), v))
+	})
+}
+
+// CostNEQ applies the NEQ predicate on the "cost" field.
+func CostNEQ(v float64) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.NEQ(s.C(FieldCost), v))
+	})
+}
+
+// CostIn applies the In predicate on the "cost" field.
+func CostIn(vs ...float64) predicate.MaintenanceEntry {
+	v := make([]any, len(vs))
+	for i := range v {
+		v[i] = vs[i]
+	}
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.In(s.C(FieldCost), v...))
+	})
+}
+
+// CostNotIn applies the NotIn predicate on the "cost" field.
+func CostNotIn(vs ...float64) predicate.MaintenanceEntry {
+	v := make([]any, len(vs))
+	for i := range v {
+		v[i] = vs[i]
+	}
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.NotIn(s.C(FieldCost), v...))
+	})
+}
+
+// CostGT applies the GT predicate on the "cost" field.
+func CostGT(v float64) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.GT(s.C(FieldCost), v))
+	})
+}
+
+// CostGTE applies the GTE predicate on the "cost" field.
+func CostGTE(v float64) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.GTE(s.C(FieldCost), v))
+	})
+}
+
+// CostLT applies the LT predicate on the "cost" field.
+func CostLT(v float64) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.LT(s.C(FieldCost), v))
+	})
+}
+
+// CostLTE applies the LTE predicate on the "cost" field.
+func CostLTE(v float64) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s.Where(sql.LTE(s.C(FieldCost), v))
+	})
+}
+
+// HasItem applies the HasEdge predicate on the "item" edge.
+func HasItem() predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		step := sqlgraph.NewStep(
+			sqlgraph.From(Table, FieldID),
+			sqlgraph.To(ItemTable, FieldID),
+			sqlgraph.Edge(sqlgraph.M2O, true, ItemTable, ItemColumn),
+		)
+		sqlgraph.HasNeighbors(s, step)
+	})
+}
+
+// HasItemWith applies the HasEdge predicate on the "item" edge with a given conditions (other predicates).
+func HasItemWith(preds ...predicate.Item) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		step := sqlgraph.NewStep(
+			sqlgraph.From(Table, FieldID),
+			sqlgraph.To(ItemInverseTable, FieldID),
+			sqlgraph.Edge(sqlgraph.M2O, true, ItemTable, ItemColumn),
+		)
+		sqlgraph.HasNeighborsWith(s, step, func(s *sql.Selector) {
+			for _, p := range preds {
+				p(s)
+			}
+		})
+	})
+}
+
+// And groups predicates with the AND operator between them.
+func And(predicates ...predicate.MaintenanceEntry) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s1 := s.Clone().SetP(nil)
+		for _, p := range predicates {
+			p(s1)
+		}
+		s.Where(s1.P())
+	})
+}
+
+// Or groups predicates with the OR operator between them.
+func Or(predicates ...predicate.MaintenanceEntry) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		s1 := s.Clone().SetP(nil)
+		for i, p := range predicates {
+			if i > 0 {
+				s1.Or()
+			}
+			p(s1)
+		}
+		s.Where(s1.P())
+	})
+}
+
+// Not applies the not operator on the given predicate.
+func Not(p predicate.MaintenanceEntry) predicate.MaintenanceEntry {
+	return predicate.MaintenanceEntry(func(s *sql.Selector) {
+		p(s.Not())
+	})
+}
diff --git a/backend/internal/data/ent/maintenanceentry_create.go b/backend/internal/data/ent/maintenanceentry_create.go
new file mode 100644
index 0000000..3abaa84
--- /dev/null
+++ b/backend/internal/data/ent/maintenanceentry_create.go
@@ -0,0 +1,419 @@
+// Code generated by ent, DO NOT EDIT.
+
+package ent
+
+import (
+	"context"
+	"errors"
+	"fmt"
+	"time"
+
+	"entgo.io/ent/dialect/sql/sqlgraph"
+	"entgo.io/ent/schema/field"
+	"github.com/google/uuid"
+	"github.com/hay-kot/homebox/backend/internal/data/ent/item"
+	"github.com/hay-kot/homebox/backend/internal/data/ent/maintenanceentry"
+)
+
+// MaintenanceEntryCreate is the builder for creating a MaintenanceEntry entity.
+type MaintenanceEntryCreate struct {
+	config
+	mutation *MaintenanceEntryMutation
+	hooks    []Hook
+}
+
+// SetCreatedAt sets the "created_at" field.
+func (mec *MaintenanceEntryCreate) SetCreatedAt(t time.Time) *MaintenanceEntryCreate {
+	mec.mutation.SetCreatedAt(t)
+	return mec
+}
+
+// SetNillableCreatedAt sets the "created_at" field if the given value is not nil.
+func (mec *MaintenanceEntryCreate) SetNillableCreatedAt(t *time.Time) *MaintenanceEntryCreate {
+	if t != nil {
+		mec.SetCreatedAt(*t)
+	}
+	return mec
+}
+
+// SetUpdatedAt sets the "updated_at" field.
+func (mec *MaintenanceEntryCreate) SetUpdatedAt(t time.Time) *MaintenanceEntryCreate {
+	mec.mutation.SetUpdatedAt(t)
+	return mec
+}
+
+// SetNillableUpdatedAt sets the "updated_at" field if the given value is not nil.
+func (mec *MaintenanceEntryCreate) SetNillableUpdatedAt(t *time.Time) *MaintenanceEntryCreate {
+	if t != nil {
+		mec.SetUpdatedAt(*t)
+	}
+	return mec
+}
+
+// SetItemID sets the "item_id" field.
+func (mec *MaintenanceEntryCreate) SetItemID(u uuid.UUID) *MaintenanceEntryCreate {
+	mec.mutation.SetItemID(u)
+	return mec
+}
+
+// SetDate sets the "date" field.
+func (mec *MaintenanceEntryCreate) SetDate(t time.Time) *MaintenanceEntryCreate {
+	mec.mutation.SetDate(t)
+	return mec
+}
+
+// SetNillableDate sets the "date" field if the given value is not nil.
+func (mec *MaintenanceEntryCreate) SetNillableDate(t *time.Time) *MaintenanceEntryCreate {
+	if t != nil {
+		mec.SetDate(*t)
+	}
+	return mec
+}
+
+// SetName sets the "name" field.
+func (mec *MaintenanceEntryCreate) SetName(s string) *MaintenanceEntryCreate {
+	mec.mutation.SetName(s)
+	return mec
+}
+
+// SetDescription sets the "description" field.
+func (mec *MaintenanceEntryCreate) SetDescription(s string) *MaintenanceEntryCreate {
+	mec.mutation.SetDescription(s)
+	return mec
+}
+
+// SetNillableDescription sets the "description" field if the given value is not nil.
+func (mec *MaintenanceEntryCreate) SetNillableDescription(s *string) *MaintenanceEntryCreate {
+	if s != nil {
+		mec.SetDescription(*s)
+	}
+	return mec
+}
+
+// SetCost sets the "cost" field.
+func (mec *MaintenanceEntryCreate) SetCost(f float64) *MaintenanceEntryCreate {
+	mec.mutation.SetCost(f)
+	return mec
+}
+
+// SetNillableCost sets the "cost" field if the given value is not nil.
+func (mec *MaintenanceEntryCreate) SetNillableCost(f *float64) *MaintenanceEntryCreate {
+	if f != nil {
+		mec.SetCost(*f)
+	}
+	return mec
+}
+
+// SetID sets the "id" field.
+func (mec *MaintenanceEntryCreate) SetID(u uuid.UUID) *MaintenanceEntryCreate {
+	mec.mutation.SetID(u)
+	return mec
+}
+
+// SetNillableID sets the "id" field if the given value is not nil.
+func (mec *MaintenanceEntryCreate) SetNillableID(u *uuid.UUID) *MaintenanceEntryCreate {
+	if u != nil {
+		mec.SetID(*u)
+	}
+	return mec
+}
+
+// SetItem sets the "item" edge to the Item entity.
+func (mec *MaintenanceEntryCreate) SetItem(i *Item) *MaintenanceEntryCreate {
+	return mec.SetItemID(i.ID)
+}
+
+// Mutation returns the MaintenanceEntryMutation object of the builder.
+func (mec *MaintenanceEntryCreate) Mutation() *MaintenanceEntryMutation {
+	return mec.mutation
+}
+
+// Save creates the MaintenanceEntry in the database.
+func (mec *MaintenanceEntryCreate) Save(ctx context.Context) (*MaintenanceEntry, error) {
+	var (
+		err  error
+		node *MaintenanceEntry
+	)
+	mec.defaults()
+	if len(mec.hooks) == 0 {
+		if err = mec.check(); err != nil {
+			return nil, err
+		}
+		node, err = mec.sqlSave(ctx)
+	} else {
+		var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) {
+			mutation, ok := m.(*MaintenanceEntryMutation)
+			if !ok {
+				return nil, fmt.Errorf("unexpected mutation type %T", m)
+			}
+			if err = mec.check(); err != nil {
+				return nil, err
+			}
+			mec.mutation = mutation
+			if node, err = mec.sqlSave(ctx); err != nil {
+				return nil, err
+			}
+			mutation.id = &node.ID
+			mutation.done = true
+			return node, err
+		})
+		for i := len(mec.hooks) - 1; i >= 0; i-- {
+			if mec.hooks[i] == nil {
+				return nil, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)")
+			}
+			mut = mec.hooks[i](mut)
+		}
+		v, err := mut.Mutate(ctx, mec.mutation)
+		if err != nil {
+			return nil, err
+		}
+		nv, ok := v.(*MaintenanceEntry)
+		if !ok {
+			return nil, fmt.Errorf("unexpected node type %T returned from MaintenanceEntryMutation", v)
+		}
+		node = nv
+	}
+	return node, err
+}
+
+// SaveX calls Save and panics if Save returns an error.
+func (mec *MaintenanceEntryCreate) SaveX(ctx context.Context) *MaintenanceEntry {
+	v, err := mec.Save(ctx)
+	if err != nil {
+		panic(err)
+	}
+	return v
+}
+
+// Exec executes the query.
+func (mec *MaintenanceEntryCreate) Exec(ctx context.Context) error {
+	_, err := mec.Save(ctx)
+	return err
+}
+
+// ExecX is like Exec, but panics if an error occurs.
+func (mec *MaintenanceEntryCreate) ExecX(ctx context.Context) {
+	if err := mec.Exec(ctx); err != nil {
+		panic(err)
+	}
+}
+
+// defaults sets the default values of the builder before save.
+func (mec *MaintenanceEntryCreate) defaults() {
+	if _, ok := mec.mutation.CreatedAt(); !ok {
+		v := maintenanceentry.DefaultCreatedAt()
+		mec.mutation.SetCreatedAt(v)
+	}
+	if _, ok := mec.mutation.UpdatedAt(); !ok {
+		v := maintenanceentry.DefaultUpdatedAt()
+		mec.mutation.SetUpdatedAt(v)
+	}
+	if _, ok := mec.mutation.Date(); !ok {
+		v := maintenanceentry.DefaultDate()
+		mec.mutation.SetDate(v)
+	}
+	if _, ok := mec.mutation.Cost(); !ok {
+		v := maintenanceentry.DefaultCost
+		mec.mutation.SetCost(v)
+	}
+	if _, ok := mec.mutation.ID(); !ok {
+		v := maintenanceentry.DefaultID()
+		mec.mutation.SetID(v)
+	}
+}
+
+// check runs all checks and user-defined validators on the builder.
+func (mec *MaintenanceEntryCreate) check() error {
+	if _, ok := mec.mutation.CreatedAt(); !ok {
+		return &ValidationError{Name: "created_at", err: errors.New(`ent: missing required field "MaintenanceEntry.created_at"`)}
+	}
+	if _, ok := mec.mutation.UpdatedAt(); !ok {
+		return &ValidationError{Name: "updated_at", err: errors.New(`ent: missing required field "MaintenanceEntry.updated_at"`)}
+	}
+	if _, ok := mec.mutation.ItemID(); !ok {
+		return &ValidationError{Name: "item_id", err: errors.New(`ent: missing required field "MaintenanceEntry.item_id"`)}
+	}
+	if _, ok := mec.mutation.Date(); !ok {
+		return &ValidationError{Name: "date", err: errors.New(`ent: missing required field "MaintenanceEntry.date"`)}
+	}
+	if _, ok := mec.mutation.Name(); !ok {
+		return &ValidationError{Name: "name", err: errors.New(`ent: missing required field "MaintenanceEntry.name"`)}
+	}
+	if v, ok := mec.mutation.Name(); ok {
+		if err := maintenanceentry.NameValidator(v); err != nil {
+			return &ValidationError{Name: "name", err: fmt.Errorf(`ent: validator failed for field "MaintenanceEntry.name": %w`, err)}
+		}
+	}
+	if v, ok := mec.mutation.Description(); ok {
+		if err := maintenanceentry.DescriptionValidator(v); err != nil {
+			return &ValidationError{Name: "description", err: fmt.Errorf(`ent: validator failed for field "MaintenanceEntry.description": %w`, err)}
+		}
+	}
+	if _, ok := mec.mutation.Cost(); !ok {
+		return &ValidationError{Name: "cost", err: errors.New(`ent: missing required field "MaintenanceEntry.cost"`)}
+	}
+	if _, ok := mec.mutation.ItemID(); !ok {
+		return &ValidationError{Name: "item", err: errors.New(`ent: missing required edge "MaintenanceEntry.item"`)}
+	}
+	return nil
+}
+
+func (mec *MaintenanceEntryCreate) sqlSave(ctx context.Context) (*MaintenanceEntry, error) {
+	_node, _spec := mec.createSpec()
+	if err := sqlgraph.CreateNode(ctx, mec.driver, _spec); err != nil {
+		if sqlgraph.IsConstraintError(err) {
+			err = &ConstraintError{msg: err.Error(), wrap: err}
+		}
+		return nil, err
+	}
+	if _spec.ID.Value != nil {
+		if id, ok := _spec.ID.Value.(*uuid.UUID); ok {
+			_node.ID = *id
+		} else if err := _node.ID.Scan(_spec.ID.Value); err != nil {
+			return nil, err
+		}
+	}
+	return _node, nil
+}
+
+func (mec *MaintenanceEntryCreate) createSpec() (*MaintenanceEntry, *sqlgraph.CreateSpec) {
+	var (
+		_node = &MaintenanceEntry{config: mec.config}
+		_spec = &sqlgraph.CreateSpec{
+			Table: maintenanceentry.Table,
+			ID: &sqlgraph.FieldSpec{
+				Type:   field.TypeUUID,
+				Column: maintenanceentry.FieldID,
+			},
+		}
+	)
+	if id, ok := mec.mutation.ID(); ok {
+		_node.ID = id
+		_spec.ID.Value = &id
+	}
+	if value, ok := mec.mutation.CreatedAt(); ok {
+		_spec.SetField(maintenanceentry.FieldCreatedAt, field.TypeTime, value)
+		_node.CreatedAt = value
+	}
+	if value, ok := mec.mutation.UpdatedAt(); ok {
+		_spec.SetField(maintenanceentry.FieldUpdatedAt, field.TypeTime, value)
+		_node.UpdatedAt = value
+	}
+	if value, ok := mec.mutation.Date(); ok {
+		_spec.SetField(maintenanceentry.FieldDate, field.TypeTime, value)
+		_node.Date = value
+	}
+	if value, ok := mec.mutation.Name(); ok {
+		_spec.SetField(maintenanceentry.FieldName, field.TypeString, value)
+		_node.Name = value
+	}
+	if value, ok := mec.mutation.Description(); ok {
+		_spec.SetField(maintenanceentry.FieldDescription, field.TypeString, value)
+		_node.Description = value
+	}
+	if value, ok := mec.mutation.Cost(); ok {
+		_spec.SetField(maintenanceentry.FieldCost, field.TypeFloat64, value)
+		_node.Cost = value
+	}
+	if nodes := mec.mutation.ItemIDs(); len(nodes) > 0 {
+		edge := &sqlgraph.EdgeSpec{
+			Rel:     sqlgraph.M2O,
+			Inverse: true,
+			Table:   maintenanceentry.ItemTable,
+			Columns: []string{maintenanceentry.ItemColumn},
+			Bidi:    false,
+			Target: &sqlgraph.EdgeTarget{
+				IDSpec: &sqlgraph.FieldSpec{
+					Type:   field.TypeUUID,
+					Column: item.FieldID,
+				},
+			},
+		}
+		for _, k := range nodes {
+			edge.Target.Nodes = append(edge.Target.Nodes, k)
+		}
+		_node.ItemID = nodes[0]
+		_spec.Edges = append(_spec.Edges, edge)
+	}
+	return _node, _spec
+}
+
+// MaintenanceEntryCreateBulk is the builder for creating many MaintenanceEntry entities in bulk.
+type MaintenanceEntryCreateBulk struct {
+	config
+	builders []*MaintenanceEntryCreate
+}
+
+// Save creates the MaintenanceEntry entities in the database.
+func (mecb *MaintenanceEntryCreateBulk) Save(ctx context.Context) ([]*MaintenanceEntry, error) {
+	specs := make([]*sqlgraph.CreateSpec, len(mecb.builders))
+	nodes := make([]*MaintenanceEntry, len(mecb.builders))
+	mutators := make([]Mutator, len(mecb.builders))
+	for i := range mecb.builders {
+		func(i int, root context.Context) {
+			builder := mecb.builders[i]
+			builder.defaults()
+			var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) {
+				mutation, ok := m.(*MaintenanceEntryMutation)
+				if !ok {
+					return nil, fmt.Errorf("unexpected mutation type %T", m)
+				}
+				if err := builder.check(); err != nil {
+					return nil, err
+				}
+				builder.mutation = mutation
+				nodes[i], specs[i] = builder.createSpec()
+				var err error
+				if i < len(mutators)-1 {
+					_, err = mutators[i+1].Mutate(root, mecb.builders[i+1].mutation)
+				} else {
+					spec := &sqlgraph.BatchCreateSpec{Nodes: specs}
+					// Invoke the actual operation on the latest mutation in the chain.
+					if err = sqlgraph.BatchCreate(ctx, mecb.driver, spec); err != nil {
+						if sqlgraph.IsConstraintError(err) {
+							err = &ConstraintError{msg: err.Error(), wrap: err}
+						}
+					}
+				}
+				if err != nil {
+					return nil, err
+				}
+				mutation.id = &nodes[i].ID
+				mutation.done = true
+				return nodes[i], nil
+			})
+			for i := len(builder.hooks) - 1; i >= 0; i-- {
+				mut = builder.hooks[i](mut)
+			}
+			mutators[i] = mut
+		}(i, ctx)
+	}
+	if len(mutators) > 0 {
+		if _, err := mutators[0].Mutate(ctx, mecb.builders[0].mutation); err != nil {
+			return nil, err
+		}
+	}
+	return nodes, nil
+}
+
+// SaveX is like Save, but panics if an error occurs.
+func (mecb *MaintenanceEntryCreateBulk) SaveX(ctx context.Context) []*MaintenanceEntry {
+	v, err := mecb.Save(ctx)
+	if err != nil {
+		panic(err)
+	}
+	return v
+}
+
+// Exec executes the query.
+func (mecb *MaintenanceEntryCreateBulk) Exec(ctx context.Context) error {
+	_, err := mecb.Save(ctx)
+	return err
+}
+
+// ExecX is like Exec, but panics if an error occurs.
+func (mecb *MaintenanceEntryCreateBulk) ExecX(ctx context.Context) {
+	if err := mecb.Exec(ctx); err != nil {
+		panic(err)
+	}
+}
diff --git a/backend/internal/data/ent/maintenanceentry_delete.go b/backend/internal/data/ent/maintenanceentry_delete.go
new file mode 100644
index 0000000..ea0ed2a
--- /dev/null
+++ b/backend/internal/data/ent/maintenanceentry_delete.go
@@ -0,0 +1,115 @@
+// Code generated by ent, DO NOT EDIT.
+
+package ent
+
+import (
+	"context"
+	"fmt"
+
+	"entgo.io/ent/dialect/sql"
+	"entgo.io/ent/dialect/sql/sqlgraph"
+	"entgo.io/ent/schema/field"
+	"github.com/hay-kot/homebox/backend/internal/data/ent/maintenanceentry"
+	"github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
+)
+
+// MaintenanceEntryDelete is the builder for deleting a MaintenanceEntry entity.
+type MaintenanceEntryDelete struct {
+	config
+	hooks    []Hook
+	mutation *MaintenanceEntryMutation
+}
+
+// Where appends a list predicates to the MaintenanceEntryDelete builder.
+func (med *MaintenanceEntryDelete) Where(ps ...predicate.MaintenanceEntry) *MaintenanceEntryDelete {
+	med.mutation.Where(ps...)
+	return med
+}
+
+// Exec executes the deletion query and returns how many vertices were deleted.
+func (med *MaintenanceEntryDelete) Exec(ctx context.Context) (int, error) {
+	var (
+		err      error
+		affected int
+	)
+	if len(med.hooks) == 0 {
+		affected, err = med.sqlExec(ctx)
+	} else {
+		var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) {
+			mutation, ok := m.(*MaintenanceEntryMutation)
+			if !ok {
+				return nil, fmt.Errorf("unexpected mutation type %T", m)
+			}
+			med.mutation = mutation
+			affected, err = med.sqlExec(ctx)
+			mutation.done = true
+			return affected, err
+		})
+		for i := len(med.hooks) - 1; i >= 0; i-- {
+			if med.hooks[i] == nil {
+				return 0, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)")
+			}
+			mut = med.hooks[i](mut)
+		}
+		if _, err := mut.Mutate(ctx, med.mutation); err != nil {
+			return 0, err
+		}
+	}
+	return affected, err
+}
+
+// ExecX is like Exec, but panics if an error occurs.
+func (med *MaintenanceEntryDelete) ExecX(ctx context.Context) int {
+	n, err := med.Exec(ctx)
+	if err != nil {
+		panic(err)
+	}
+	return n
+}
+
+func (med *MaintenanceEntryDelete) sqlExec(ctx context.Context) (int, error) {
+	_spec := &sqlgraph.DeleteSpec{
+		Node: &sqlgraph.NodeSpec{
+			Table: maintenanceentry.Table,
+			ID: &sqlgraph.FieldSpec{
+				Type:   field.TypeUUID,
+				Column: maintenanceentry.FieldID,
+			},
+		},
+	}
+	if ps := med.mutation.predicates; len(ps) > 0 {
+		_spec.Predicate = func(selector *sql.Selector) {
+			for i := range ps {
+				ps[i](selector)
+			}
+		}
+	}
+	affected, err := sqlgraph.DeleteNodes(ctx, med.driver, _spec)
+	if err != nil && sqlgraph.IsConstraintError(err) {
+		err = &ConstraintError{msg: err.Error(), wrap: err}
+	}
+	return affected, err
+}
+
+// MaintenanceEntryDeleteOne is the builder for deleting a single MaintenanceEntry entity.
+type MaintenanceEntryDeleteOne struct {
+	med *MaintenanceEntryDelete
+}
+
+// Exec executes the deletion query.
+func (medo *MaintenanceEntryDeleteOne) Exec(ctx context.Context) error {
+	n, err := medo.med.Exec(ctx)
+	switch {
+	case err != nil:
+		return err
+	case n == 0:
+		return &NotFoundError{maintenanceentry.Label}
+	default:
+		return nil
+	}
+}
+
+// ExecX is like Exec, but panics if an error occurs.
+func (medo *MaintenanceEntryDeleteOne) ExecX(ctx context.Context) {
+	medo.med.ExecX(ctx)
+}
diff --git a/backend/internal/data/ent/maintenanceentry_query.go b/backend/internal/data/ent/maintenanceentry_query.go
new file mode 100644
index 0000000..bcc95b5
--- /dev/null
+++ b/backend/internal/data/ent/maintenanceentry_query.go
@@ -0,0 +1,622 @@
+// Code generated by ent, DO NOT EDIT.
+
+package ent
+
+import (
+	"context"
+	"fmt"
+	"math"
+
+	"entgo.io/ent/dialect/sql"
+	"entgo.io/ent/dialect/sql/sqlgraph"
+	"entgo.io/ent/schema/field"
+	"github.com/google/uuid"
+	"github.com/hay-kot/homebox/backend/internal/data/ent/item"
+	"github.com/hay-kot/homebox/backend/internal/data/ent/maintenanceentry"
+	"github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
+)
+
+// MaintenanceEntryQuery is the builder for querying MaintenanceEntry entities.
+type MaintenanceEntryQuery struct {
+	config
+	limit      *int
+	offset     *int
+	unique     *bool
+	order      []OrderFunc
+	fields     []string
+	predicates []predicate.MaintenanceEntry
+	withItem   *ItemQuery
+	// intermediate query (i.e. traversal path).
+	sql  *sql.Selector
+	path func(context.Context) (*sql.Selector, error)
+}
+
+// Where adds a new predicate for the MaintenanceEntryQuery builder.
+func (meq *MaintenanceEntryQuery) Where(ps ...predicate.MaintenanceEntry) *MaintenanceEntryQuery {
+	meq.predicates = append(meq.predicates, ps...)
+	return meq
+}
+
+// Limit adds a limit step to the query.
+func (meq *MaintenanceEntryQuery) Limit(limit int) *MaintenanceEntryQuery {
+	meq.limit = &limit
+	return meq
+}
+
+// Offset adds an offset step to the query.
+func (meq *MaintenanceEntryQuery) Offset(offset int) *MaintenanceEntryQuery {
+	meq.offset = &offset
+	return meq
+}
+
+// Unique configures the query builder to filter duplicate records on query.
+// By default, unique is set to true, and can be disabled using this method.
+func (meq *MaintenanceEntryQuery) Unique(unique bool) *MaintenanceEntryQuery {
+	meq.unique = &unique
+	return meq
+}
+
+// Order adds an order step to the query.
+func (meq *MaintenanceEntryQuery) Order(o ...OrderFunc) *MaintenanceEntryQuery {
+	meq.order = append(meq.order, o...)
+	return meq
+}
+
+// QueryItem chains the current query on the "item" edge.
+func (meq *MaintenanceEntryQuery) QueryItem() *ItemQuery {
+	query := &ItemQuery{config: meq.config}
+	query.path = func(ctx context.Context) (fromU *sql.Selector, err error) {
+		if err := meq.prepareQuery(ctx); err != nil {
+			return nil, err
+		}
+		selector := meq.sqlQuery(ctx)
+		if err := selector.Err(); err != nil {
+			return nil, err
+		}
+		step := sqlgraph.NewStep(
+			sqlgraph.From(maintenanceentry.Table, maintenanceentry.FieldID, selector),
+			sqlgraph.To(item.Table, item.FieldID),
+			sqlgraph.Edge(sqlgraph.M2O, true, maintenanceentry.ItemTable, maintenanceentry.ItemColumn),
+		)
+		fromU = sqlgraph.SetNeighbors(meq.driver.Dialect(), step)
+		return fromU, nil
+	}
+	return query
+}
+
+// First returns the first MaintenanceEntry entity from the query.
+// Returns a *NotFoundError when no MaintenanceEntry was found.
+func (meq *MaintenanceEntryQuery) First(ctx context.Context) (*MaintenanceEntry, error) {
+	nodes, err := meq.Limit(1).All(ctx)
+	if err != nil {
+		return nil, err
+	}
+	if len(nodes) == 0 {
+		return nil, &NotFoundError{maintenanceentry.Label}
+	}
+	return nodes[0], nil
+}
+
+// FirstX is like First, but panics if an error occurs.
+func (meq *MaintenanceEntryQuery) FirstX(ctx context.Context) *MaintenanceEntry {
+	node, err := meq.First(ctx)
+	if err != nil && !IsNotFound(err) {
+		panic(err)
+	}
+	return node
+}
+
+// FirstID returns the first MaintenanceEntry ID from the query.
+// Returns a *NotFoundError when no MaintenanceEntry ID was found.
+func (meq *MaintenanceEntryQuery) FirstID(ctx context.Context) (id uuid.UUID, err error) {
+	var ids []uuid.UUID
+	if ids, err = meq.Limit(1).IDs(ctx); err != nil {
+		return
+	}
+	if len(ids) == 0 {
+		err = &NotFoundError{maintenanceentry.Label}
+		return
+	}
+	return ids[0], nil
+}
+
+// FirstIDX is like FirstID, but panics if an error occurs.
+func (meq *MaintenanceEntryQuery) FirstIDX(ctx context.Context) uuid.UUID {
+	id, err := meq.FirstID(ctx)
+	if err != nil && !IsNotFound(err) {
+		panic(err)
+	}
+	return id
+}
+
+// Only returns a single MaintenanceEntry entity found by the query, ensuring it only returns one.
+// Returns a *NotSingularError when more than one MaintenanceEntry entity is found.
+// Returns a *NotFoundError when no MaintenanceEntry entities are found.
+func (meq *MaintenanceEntryQuery) Only(ctx context.Context) (*MaintenanceEntry, error) {
+	nodes, err := meq.Limit(2).All(ctx)
+	if err != nil {
+		return nil, err
+	}
+	switch len(nodes) {
+	case 1:
+		return nodes[0], nil
+	case 0:
+		return nil, &NotFoundError{maintenanceentry.Label}
+	default:
+		return nil, &NotSingularError{maintenanceentry.Label}
+	}
+}
+
+// OnlyX is like Only, but panics if an error occurs.
+func (meq *MaintenanceEntryQuery) OnlyX(ctx context.Context) *MaintenanceEntry {
+	node, err := meq.Only(ctx)
+	if err != nil {
+		panic(err)
+	}
+	return node
+}
+
+// OnlyID is like Only, but returns the only MaintenanceEntry ID in the query.
+// Returns a *NotSingularError when more than one MaintenanceEntry ID is found.
+// Returns a *NotFoundError when no entities are found.
+func (meq *MaintenanceEntryQuery) OnlyID(ctx context.Context) (id uuid.UUID, err error) {
+	var ids []uuid.UUID
+	if ids, err = meq.Limit(2).IDs(ctx); err != nil {
+		return
+	}
+	switch len(ids) {
+	case 1:
+		id = ids[0]
+	case 0:
+		err = &NotFoundError{maintenanceentry.Label}
+	default:
+		err = &NotSingularError{maintenanceentry.Label}
+	}
+	return
+}
+
+// OnlyIDX is like OnlyID, but panics if an error occurs.
+func (meq *MaintenanceEntryQuery) OnlyIDX(ctx context.Context) uuid.UUID {
+	id, err := meq.OnlyID(ctx)
+	if err != nil {
+		panic(err)
+	}
+	return id
+}
+
+// All executes the query and returns a list of MaintenanceEntries.
+func (meq *MaintenanceEntryQuery) All(ctx context.Context) ([]*MaintenanceEntry, error) {
+	if err := meq.prepareQuery(ctx); err != nil {
+		return nil, err
+	}
+	return meq.sqlAll(ctx)
+}
+
+// AllX is like All, but panics if an error occurs.
+func (meq *MaintenanceEntryQuery) AllX(ctx context.Context) []*MaintenanceEntry {
+	nodes, err := meq.All(ctx)
+	if err != nil {
+		panic(err)
+	}
+	return nodes
+}
+
+// IDs executes the query and returns a list of MaintenanceEntry IDs.
+func (meq *MaintenanceEntryQuery) IDs(ctx context.Context) ([]uuid.UUID, error) {
+	var ids []uuid.UUID
+	if err := meq.Select(maintenanceentry.FieldID).Scan(ctx, &ids); err != nil {
+		return nil, err
+	}
+	return ids, nil
+}
+
+// IDsX is like IDs, but panics if an error occurs.
+func (meq *MaintenanceEntryQuery) IDsX(ctx context.Context) []uuid.UUID {
+	ids, err := meq.IDs(ctx)
+	if err != nil {
+		panic(err)
+	}
+	return ids
+}
+
+// Count returns the count of the given query.
+func (meq *MaintenanceEntryQuery) Count(ctx context.Context) (int, error) {
+	if err := meq.prepareQuery(ctx); err != nil {
+		return 0, err
+	}
+	return meq.sqlCount(ctx)
+}
+
+// CountX is like Count, but panics if an error occurs.
+func (meq *MaintenanceEntryQuery) CountX(ctx context.Context) int {
+	count, err := meq.Count(ctx)
+	if err != nil {
+		panic(err)
+	}
+	return count
+}
+
+// Exist returns true if the query has elements in the graph.
+func (meq *MaintenanceEntryQuery) Exist(ctx context.Context) (bool, error) {
+	if err := meq.prepareQuery(ctx); err != nil {
+		return false, err
+	}
+	return meq.sqlExist(ctx)
+}
+
+// ExistX is like Exist, but panics if an error occurs.
+func (meq *MaintenanceEntryQuery) ExistX(ctx context.Context) bool {
+	exist, err := meq.Exist(ctx)
+	if err != nil {
+		panic(err)
+	}
+	return exist
+}
+
+// Clone returns a duplicate of the MaintenanceEntryQuery builder, including all associated steps. It can be
+// used to prepare common query builders and use them differently after the clone is made.
+func (meq *MaintenanceEntryQuery) Clone() *MaintenanceEntryQuery {
+	if meq == nil {
+		return nil
+	}
+	return &MaintenanceEntryQuery{
+		config:     meq.config,
+		limit:      meq.limit,
+		offset:     meq.offset,
+		order:      append([]OrderFunc{}, meq.order...),
+		predicates: append([]predicate.MaintenanceEntry{}, meq.predicates...),
+		withItem:   meq.withItem.Clone(),
+		// clone intermediate query.
+		sql:    meq.sql.Clone(),
+		path:   meq.path,
+		unique: meq.unique,
+	}
+}
+
+// WithItem tells the query-builder to eager-load the nodes that are connected to
+// the "item" edge. The optional arguments are used to configure the query builder of the edge.
+func (meq *MaintenanceEntryQuery) WithItem(opts ...func(*ItemQuery)) *MaintenanceEntryQuery {
+	query := &ItemQuery{config: meq.config}
+	for _, opt := range opts {
+		opt(query)
+	}
+	meq.withItem = query
+	return meq
+}
+
+// GroupBy is used to group vertices by one or more fields/columns.
+// It is often used with aggregate functions, like: count, max, mean, min, sum.
+//
+// Example:
+//
+//	var v []struct {
+//		CreatedAt time.Time `json:"created_at,omitempty"`
+//		Count int `json:"count,omitempty"`
+//	}
+//
+//	client.MaintenanceEntry.Query().
+//		GroupBy(maintenanceentry.FieldCreatedAt).
+//		Aggregate(ent.Count()).
+//		Scan(ctx, &v)
+func (meq *MaintenanceEntryQuery) GroupBy(field string, fields ...string) *MaintenanceEntryGroupBy {
+	grbuild := &MaintenanceEntryGroupBy{config: meq.config}
+	grbuild.fields = append([]string{field}, fields...)
+	grbuild.path = func(ctx context.Context) (prev *sql.Selector, err error) {
+		if err := meq.prepareQuery(ctx); err != nil {
+			return nil, err
+		}
+		return meq.sqlQuery(ctx), nil
+	}
+	grbuild.label = maintenanceentry.Label
+	grbuild.flds, grbuild.scan = &grbuild.fields, grbuild.Scan
+	return grbuild
+}
+
+// Select allows the selection one or more fields/columns for the given query,
+// instead of selecting all fields in the entity.
+//
+// Example:
+//
+//	var v []struct {
+//		CreatedAt time.Time `json:"created_at,omitempty"`
+//	}
+//
+//	client.MaintenanceEntry.Query().
+//		Select(maintenanceentry.FieldCreatedAt).
+//		Scan(ctx, &v)
+func (meq *MaintenanceEntryQuery) Select(fields ...string) *MaintenanceEntrySelect {
+	meq.fields = append(meq.fields, fields...)
+	selbuild := &MaintenanceEntrySelect{MaintenanceEntryQuery: meq}
+	selbuild.label = maintenanceentry.Label
+	selbuild.flds, selbuild.scan = &meq.fields, selbuild.Scan
+	return selbuild
+}
+
+// Aggregate returns a MaintenanceEntrySelect configured with the given aggregations.
+func (meq *MaintenanceEntryQuery) Aggregate(fns ...AggregateFunc) *MaintenanceEntrySelect {
+	return meq.Select().Aggregate(fns...)
+}
+
+func (meq *MaintenanceEntryQuery) prepareQuery(ctx context.Context) error {
+	for _, f := range meq.fields {
+		if !maintenanceentry.ValidColumn(f) {
+			return &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)}
+		}
+	}
+	if meq.path != nil {
+		prev, err := meq.path(ctx)
+		if err != nil {
+			return err
+		}
+		meq.sql = prev
+	}
+	return nil
+}
+
+func (meq *MaintenanceEntryQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*MaintenanceEntry, error) {
+	var (
+		nodes       = []*MaintenanceEntry{}
+		_spec       = meq.querySpec()
+		loadedTypes = [1]bool{
+			meq.withItem != nil,
+		}
+	)
+	_spec.ScanValues = func(columns []string) ([]any, error) {
+		return (*MaintenanceEntry).scanValues(nil, columns)
+	}
+	_spec.Assign = func(columns []string, values []any) error {
+		node := &MaintenanceEntry{config: meq.config}
+		nodes = append(nodes, node)
+		node.Edges.loadedTypes = loadedTypes
+		return node.assignValues(columns, values)
+	}
+	for i := range hooks {
+		hooks[i](ctx, _spec)
+	}
+	if err := sqlgraph.QueryNodes(ctx, meq.driver, _spec); err != nil {
+		return nil, err
+	}
+	if len(nodes) == 0 {
+		return nodes, nil
+	}
+	if query := meq.withItem; query != nil {
+		if err := meq.loadItem(ctx, query, nodes, nil,
+			func(n *MaintenanceEntry, e *Item) { n.Edges.Item = e }); err != nil {
+			return nil, err
+		}
+	}
+	return nodes, nil
+}
+
+func (meq *MaintenanceEntryQuery) loadItem(ctx context.Context, query *ItemQuery, nodes []*MaintenanceEntry, init func(*MaintenanceEntry), assign func(*MaintenanceEntry, *Item)) error {
+	ids := make([]uuid.UUID, 0, len(nodes))
+	nodeids := make(map[uuid.UUID][]*MaintenanceEntry)
+	for i := range nodes {
+		fk := nodes[i].ItemID
+		if _, ok := nodeids[fk]; !ok {
+			ids = append(ids, fk)
+		}
+		nodeids[fk] = append(nodeids[fk], nodes[i])
+	}
+	query.Where(item.IDIn(ids...))
+	neighbors, err := query.All(ctx)
+	if err != nil {
+		return err
+	}
+	for _, n := range neighbors {
+		nodes, ok := nodeids[n.ID]
+		if !ok {
+			return fmt.Errorf(`unexpected foreign-key "item_id" returned %v`, n.ID)
+		}
+		for i := range nodes {
+			assign(nodes[i], n)
+		}
+	}
+	return nil
+}
+
+func (meq *MaintenanceEntryQuery) sqlCount(ctx context.Context) (int, error) {
+	_spec := meq.querySpec()
+	_spec.Node.Columns = meq.fields
+	if len(meq.fields) > 0 {
+		_spec.Unique = meq.unique != nil && *meq.unique
+	}
+	return sqlgraph.CountNodes(ctx, meq.driver, _spec)
+}
+
+func (meq *MaintenanceEntryQuery) sqlExist(ctx context.Context) (bool, error) {
+	switch _, err := meq.FirstID(ctx); {
+	case IsNotFound(err):
+		return false, nil
+	case err != nil:
+		return false, fmt.Errorf("ent: check existence: %w", err)
+	default:
+		return true, nil
+	}
+}
+
+func (meq *MaintenanceEntryQuery) querySpec() *sqlgraph.QuerySpec {
+	_spec := &sqlgraph.QuerySpec{
+		Node: &sqlgraph.NodeSpec{
+			Table:   maintenanceentry.Table,
+			Columns: maintenanceentry.Columns,
+			ID: &sqlgraph.FieldSpec{
+				Type:   field.TypeUUID,
+				Column: maintenanceentry.FieldID,
+			},
+		},
+		From:   meq.sql,
+		Unique: true,
+	}
+	if unique := meq.unique; unique != nil {
+		_spec.Unique = *unique
+	}
+	if fields := meq.fields; len(fields) > 0 {
+		_spec.Node.Columns = make([]string, 0, len(fields))
+		_spec.Node.Columns = append(_spec.Node.Columns, maintenanceentry.FieldID)
+		for i := range fields {
+			if fields[i] != maintenanceentry.FieldID {
+				_spec.Node.Columns = append(_spec.Node.Columns, fields[i])
+			}
+		}
+	}
+	if ps := meq.predicates; len(ps) > 0 {
+		_spec.Predicate = func(selector *sql.Selector) {
+			for i := range ps {
+				ps[i](selector)
+			}
+		}
+	}
+	if limit := meq.limit; limit != nil {
+		_spec.Limit = *limit
+	}
+	if offset := meq.offset; offset != nil {
+		_spec.Offset = *offset
+	}
+	if ps := meq.order; len(ps) > 0 {
+		_spec.Order = func(selector *sql.Selector) {
+			for i := range ps {
+				ps[i](selector)
+			}
+		}
+	}
+	return _spec
+}
+
+func (meq *MaintenanceEntryQuery) sqlQuery(ctx context.Context) *sql.Selector {
+	builder := sql.Dialect(meq.driver.Dialect())
+	t1 := builder.Table(maintenanceentry.Table)
+	columns := meq.fields
+	if len(columns) == 0 {
+		columns = maintenanceentry.Columns
+	}
+	selector := builder.Select(t1.Columns(columns...)...).From(t1)
+	if meq.sql != nil {
+		selector = meq.sql
+		selector.Select(selector.Columns(columns...)...)
+	}
+	if meq.unique != nil && *meq.unique {
+		selector.Distinct()
+	}
+	for _, p := range meq.predicates {
+		p(selector)
+	}
+	for _, p := range meq.order {
+		p(selector)
+	}
+	if offset := meq.offset; offset != nil {
+		// limit is mandatory for offset clause. We start
+		// with default value, and override it below if needed.
+		selector.Offset(*offset).Limit(math.MaxInt32)
+	}
+	if limit := meq.limit; limit != nil {
+		selector.Limit(*limit)
+	}
+	return selector
+}
+
+// MaintenanceEntryGroupBy is the group-by builder for MaintenanceEntry entities.
+type MaintenanceEntryGroupBy struct {
+	config
+	selector
+	fields []string
+	fns    []AggregateFunc
+	// intermediate query (i.e. traversal path).
+	sql  *sql.Selector
+	path func(context.Context) (*sql.Selector, error)
+}
+
+// Aggregate adds the given aggregation functions to the group-by query.
+func (megb *MaintenanceEntryGroupBy) Aggregate(fns ...AggregateFunc) *MaintenanceEntryGroupBy {
+	megb.fns = append(megb.fns, fns...)
+	return megb
+}
+
+// Scan applies the group-by query and scans the result into the given value.
+func (megb *MaintenanceEntryGroupBy) Scan(ctx context.Context, v any) error {
+	query, err := megb.path(ctx)
+	if err != nil {
+		return err
+	}
+	megb.sql = query
+	return megb.sqlScan(ctx, v)
+}
+
+func (megb *MaintenanceEntryGroupBy) sqlScan(ctx context.Context, v any) error {
+	for _, f := range megb.fields {
+		if !maintenanceentry.ValidColumn(f) {
+			return &ValidationError{Name: f, err: fmt.Errorf("invalid field %q for group-by", f)}
+		}
+	}
+	selector := megb.sqlQuery()
+	if err := selector.Err(); err != nil {
+		return err
+	}
+	rows := &sql.Rows{}
+	query, args := selector.Query()
+	if err := megb.driver.Query(ctx, query, args, rows); err != nil {
+		return err
+	}
+	defer rows.Close()
+	return sql.ScanSlice(rows, v)
+}
+
+func (megb *MaintenanceEntryGroupBy) sqlQuery() *sql.Selector {
+	selector := megb.sql.Select()
+	aggregation := make([]string, 0, len(megb.fns))
+	for _, fn := range megb.fns {
+		aggregation = append(aggregation, fn(selector))
+	}
+	if len(selector.SelectedColumns()) == 0 {
+		columns := make([]string, 0, len(megb.fields)+len(megb.fns))
+		for _, f := range megb.fields {
+			columns = append(columns, selector.C(f))
+		}
+		columns = append(columns, aggregation...)
+		selector.Select(columns...)
+	}
+	return selector.GroupBy(selector.Columns(megb.fields...)...)
+}
+
+// MaintenanceEntrySelect is the builder for selecting fields of MaintenanceEntry entities.
+type MaintenanceEntrySelect struct {
+	*MaintenanceEntryQuery
+	selector
+	// intermediate query (i.e. traversal path).
+	sql *sql.Selector
+}
+
+// Aggregate adds the given aggregation functions to the selector query.
+func (mes *MaintenanceEntrySelect) Aggregate(fns ...AggregateFunc) *MaintenanceEntrySelect {
+	mes.fns = append(mes.fns, fns...)
+	return mes
+}
+
+// Scan applies the selector query and scans the result into the given value.
+func (mes *MaintenanceEntrySelect) Scan(ctx context.Context, v any) error {
+	if err := mes.prepareQuery(ctx); err != nil {
+		return err
+	}
+	mes.sql = mes.MaintenanceEntryQuery.sqlQuery(ctx)
+	return mes.sqlScan(ctx, v)
+}
+
+func (mes *MaintenanceEntrySelect) sqlScan(ctx context.Context, v any) error {
+	aggregation := make([]string, 0, len(mes.fns))
+	for _, fn := range mes.fns {
+		aggregation = append(aggregation, fn(mes.sql))
+	}
+	switch n := len(*mes.selector.flds); {
+	case n == 0 && len(aggregation) > 0:
+		mes.sql.Select(aggregation...)
+	case n != 0 && len(aggregation) > 0:
+		mes.sql.AppendSelect(aggregation...)
+	}
+	rows := &sql.Rows{}
+	query, args := mes.sql.Query()
+	if err := mes.driver.Query(ctx, query, args, rows); err != nil {
+		return err
+	}
+	defer rows.Close()
+	return sql.ScanSlice(rows, v)
+}
diff --git a/backend/internal/data/ent/maintenanceentry_update.go b/backend/internal/data/ent/maintenanceentry_update.go
new file mode 100644
index 0000000..af0aafd
--- /dev/null
+++ b/backend/internal/data/ent/maintenanceentry_update.go
@@ -0,0 +1,594 @@
+// Code generated by ent, DO NOT EDIT.
+
+package ent
+
+import (
+	"context"
+	"errors"
+	"fmt"
+	"time"
+
+	"entgo.io/ent/dialect/sql"
+	"entgo.io/ent/dialect/sql/sqlgraph"
+	"entgo.io/ent/schema/field"
+	"github.com/google/uuid"
+	"github.com/hay-kot/homebox/backend/internal/data/ent/item"
+	"github.com/hay-kot/homebox/backend/internal/data/ent/maintenanceentry"
+	"github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
+)
+
+// MaintenanceEntryUpdate is the builder for updating MaintenanceEntry entities.
+type MaintenanceEntryUpdate struct {
+	config
+	hooks    []Hook
+	mutation *MaintenanceEntryMutation
+}
+
+// Where appends a list predicates to the MaintenanceEntryUpdate builder.
+func (meu *MaintenanceEntryUpdate) Where(ps ...predicate.MaintenanceEntry) *MaintenanceEntryUpdate {
+	meu.mutation.Where(ps...)
+	return meu
+}
+
+// SetUpdatedAt sets the "updated_at" field.
+func (meu *MaintenanceEntryUpdate) SetUpdatedAt(t time.Time) *MaintenanceEntryUpdate {
+	meu.mutation.SetUpdatedAt(t)
+	return meu
+}
+
+// SetItemID sets the "item_id" field.
+func (meu *MaintenanceEntryUpdate) SetItemID(u uuid.UUID) *MaintenanceEntryUpdate {
+	meu.mutation.SetItemID(u)
+	return meu
+}
+
+// SetDate sets the "date" field.
+func (meu *MaintenanceEntryUpdate) SetDate(t time.Time) *MaintenanceEntryUpdate {
+	meu.mutation.SetDate(t)
+	return meu
+}
+
+// SetNillableDate sets the "date" field if the given value is not nil.
+func (meu *MaintenanceEntryUpdate) SetNillableDate(t *time.Time) *MaintenanceEntryUpdate {
+	if t != nil {
+		meu.SetDate(*t)
+	}
+	return meu
+}
+
+// SetName sets the "name" field.
+func (meu *MaintenanceEntryUpdate) SetName(s string) *MaintenanceEntryUpdate {
+	meu.mutation.SetName(s)
+	return meu
+}
+
+// SetDescription sets the "description" field.
+func (meu *MaintenanceEntryUpdate) SetDescription(s string) *MaintenanceEntryUpdate {
+	meu.mutation.SetDescription(s)
+	return meu
+}
+
+// SetNillableDescription sets the "description" field if the given value is not nil.
+func (meu *MaintenanceEntryUpdate) SetNillableDescription(s *string) *MaintenanceEntryUpdate {
+	if s != nil {
+		meu.SetDescription(*s)
+	}
+	return meu
+}
+
+// ClearDescription clears the value of the "description" field.
+func (meu *MaintenanceEntryUpdate) ClearDescription() *MaintenanceEntryUpdate {
+	meu.mutation.ClearDescription()
+	return meu
+}
+
+// SetCost sets the "cost" field.
+func (meu *MaintenanceEntryUpdate) SetCost(f float64) *MaintenanceEntryUpdate {
+	meu.mutation.ResetCost()
+	meu.mutation.SetCost(f)
+	return meu
+}
+
+// SetNillableCost sets the "cost" field if the given value is not nil.
+func (meu *MaintenanceEntryUpdate) SetNillableCost(f *float64) *MaintenanceEntryUpdate {
+	if f != nil {
+		meu.SetCost(*f)
+	}
+	return meu
+}
+
+// AddCost adds f to the "cost" field.
+func (meu *MaintenanceEntryUpdate) AddCost(f float64) *MaintenanceEntryUpdate {
+	meu.mutation.AddCost(f)
+	return meu
+}
+
+// SetItem sets the "item" edge to the Item entity.
+func (meu *MaintenanceEntryUpdate) SetItem(i *Item) *MaintenanceEntryUpdate {
+	return meu.SetItemID(i.ID)
+}
+
+// Mutation returns the MaintenanceEntryMutation object of the builder.
+func (meu *MaintenanceEntryUpdate) Mutation() *MaintenanceEntryMutation {
+	return meu.mutation
+}
+
+// ClearItem clears the "item" edge to the Item entity.
+func (meu *MaintenanceEntryUpdate) ClearItem() *MaintenanceEntryUpdate {
+	meu.mutation.ClearItem()
+	return meu
+}
+
+// Save executes the query and returns the number of nodes affected by the update operation.
+func (meu *MaintenanceEntryUpdate) Save(ctx context.Context) (int, error) {
+	var (
+		err      error
+		affected int
+	)
+	meu.defaults()
+	if len(meu.hooks) == 0 {
+		if err = meu.check(); err != nil {
+			return 0, err
+		}
+		affected, err = meu.sqlSave(ctx)
+	} else {
+		var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) {
+			mutation, ok := m.(*MaintenanceEntryMutation)
+			if !ok {
+				return nil, fmt.Errorf("unexpected mutation type %T", m)
+			}
+			if err = meu.check(); err != nil {
+				return 0, err
+			}
+			meu.mutation = mutation
+			affected, err = meu.sqlSave(ctx)
+			mutation.done = true
+			return affected, err
+		})
+		for i := len(meu.hooks) - 1; i >= 0; i-- {
+			if meu.hooks[i] == nil {
+				return 0, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)")
+			}
+			mut = meu.hooks[i](mut)
+		}
+		if _, err := mut.Mutate(ctx, meu.mutation); err != nil {
+			return 0, err
+		}
+	}
+	return affected, err
+}
+
+// SaveX is like Save, but panics if an error occurs.
+func (meu *MaintenanceEntryUpdate) SaveX(ctx context.Context) int {
+	affected, err := meu.Save(ctx)
+	if err != nil {
+		panic(err)
+	}
+	return affected
+}
+
+// Exec executes the query.
+func (meu *MaintenanceEntryUpdate) Exec(ctx context.Context) error {
+	_, err := meu.Save(ctx)
+	return err
+}
+
+// ExecX is like Exec, but panics if an error occurs.
+func (meu *MaintenanceEntryUpdate) ExecX(ctx context.Context) {
+	if err := meu.Exec(ctx); err != nil {
+		panic(err)
+	}
+}
+
+// defaults sets the default values of the builder before save.
+func (meu *MaintenanceEntryUpdate) defaults() {
+	if _, ok := meu.mutation.UpdatedAt(); !ok {
+		v := maintenanceentry.UpdateDefaultUpdatedAt()
+		meu.mutation.SetUpdatedAt(v)
+	}
+}
+
+// check runs all checks and user-defined validators on the builder.
+func (meu *MaintenanceEntryUpdate) check() error {
+	if v, ok := meu.mutation.Name(); ok {
+		if err := maintenanceentry.NameValidator(v); err != nil {
+			return &ValidationError{Name: "name", err: fmt.Errorf(`ent: validator failed for field "MaintenanceEntry.name": %w`, err)}
+		}
+	}
+	if v, ok := meu.mutation.Description(); ok {
+		if err := maintenanceentry.DescriptionValidator(v); err != nil {
+			return &ValidationError{Name: "description", err: fmt.Errorf(`ent: validator failed for field "MaintenanceEntry.description": %w`, err)}
+		}
+	}
+	if _, ok := meu.mutation.ItemID(); meu.mutation.ItemCleared() && !ok {
+		return errors.New(`ent: clearing a required unique edge "MaintenanceEntry.item"`)
+	}
+	return nil
+}
+
+func (meu *MaintenanceEntryUpdate) sqlSave(ctx context.Context) (n int, err error) {
+	_spec := &sqlgraph.UpdateSpec{
+		Node: &sqlgraph.NodeSpec{
+			Table:   maintenanceentry.Table,
+			Columns: maintenanceentry.Columns,
+			ID: &sqlgraph.FieldSpec{
+				Type:   field.TypeUUID,
+				Column: maintenanceentry.FieldID,
+			},
+		},
+	}
+	if ps := meu.mutation.predicates; len(ps) > 0 {
+		_spec.Predicate = func(selector *sql.Selector) {
+			for i := range ps {
+				ps[i](selector)
+			}
+		}
+	}
+	if value, ok := meu.mutation.UpdatedAt(); ok {
+		_spec.SetField(maintenanceentry.FieldUpdatedAt, field.TypeTime, value)
+	}
+	if value, ok := meu.mutation.Date(); ok {
+		_spec.SetField(maintenanceentry.FieldDate, field.TypeTime, value)
+	}
+	if value, ok := meu.mutation.Name(); ok {
+		_spec.SetField(maintenanceentry.FieldName, field.TypeString, value)
+	}
+	if value, ok := meu.mutation.Description(); ok {
+		_spec.SetField(maintenanceentry.FieldDescription, field.TypeString, value)
+	}
+	if meu.mutation.DescriptionCleared() {
+		_spec.ClearField(maintenanceentry.FieldDescription, field.TypeString)
+	}
+	if value, ok := meu.mutation.Cost(); ok {
+		_spec.SetField(maintenanceentry.FieldCost, field.TypeFloat64, value)
+	}
+	if value, ok := meu.mutation.AddedCost(); ok {
+		_spec.AddField(maintenanceentry.FieldCost, field.TypeFloat64, value)
+	}
+	if meu.mutation.ItemCleared() {
+		edge := &sqlgraph.EdgeSpec{
+			Rel:     sqlgraph.M2O,
+			Inverse: true,
+			Table:   maintenanceentry.ItemTable,
+			Columns: []string{maintenanceentry.ItemColumn},
+			Bidi:    false,
+			Target: &sqlgraph.EdgeTarget{
+				IDSpec: &sqlgraph.FieldSpec{
+					Type:   field.TypeUUID,
+					Column: item.FieldID,
+				},
+			},
+		}
+		_spec.Edges.Clear = append(_spec.Edges.Clear, edge)
+	}
+	if nodes := meu.mutation.ItemIDs(); len(nodes) > 0 {
+		edge := &sqlgraph.EdgeSpec{
+			Rel:     sqlgraph.M2O,
+			Inverse: true,
+			Table:   maintenanceentry.ItemTable,
+			Columns: []string{maintenanceentry.ItemColumn},
+			Bidi:    false,
+			Target: &sqlgraph.EdgeTarget{
+				IDSpec: &sqlgraph.FieldSpec{
+					Type:   field.TypeUUID,
+					Column: item.FieldID,
+				},
+			},
+		}
+		for _, k := range nodes {
+			edge.Target.Nodes = append(edge.Target.Nodes, k)
+		}
+		_spec.Edges.Add = append(_spec.Edges.Add, edge)
+	}
+	if n, err = sqlgraph.UpdateNodes(ctx, meu.driver, _spec); err != nil {
+		if _, ok := err.(*sqlgraph.NotFoundError); ok {
+			err = &NotFoundError{maintenanceentry.Label}
+		} else if sqlgraph.IsConstraintError(err) {
+			err = &ConstraintError{msg: err.Error(), wrap: err}
+		}
+		return 0, err
+	}
+	return n, nil
+}
+
+// MaintenanceEntryUpdateOne is the builder for updating a single MaintenanceEntry entity.
+type MaintenanceEntryUpdateOne struct {
+	config
+	fields   []string
+	hooks    []Hook
+	mutation *MaintenanceEntryMutation
+}
+
+// SetUpdatedAt sets the "updated_at" field.
+func (meuo *MaintenanceEntryUpdateOne) SetUpdatedAt(t time.Time) *MaintenanceEntryUpdateOne {
+	meuo.mutation.SetUpdatedAt(t)
+	return meuo
+}
+
+// SetItemID sets the "item_id" field.
+func (meuo *MaintenanceEntryUpdateOne) SetItemID(u uuid.UUID) *MaintenanceEntryUpdateOne {
+	meuo.mutation.SetItemID(u)
+	return meuo
+}
+
+// SetDate sets the "date" field.
+func (meuo *MaintenanceEntryUpdateOne) SetDate(t time.Time) *MaintenanceEntryUpdateOne {
+	meuo.mutation.SetDate(t)
+	return meuo
+}
+
+// SetNillableDate sets the "date" field if the given value is not nil.
+func (meuo *MaintenanceEntryUpdateOne) SetNillableDate(t *time.Time) *MaintenanceEntryUpdateOne {
+	if t != nil {
+		meuo.SetDate(*t)
+	}
+	return meuo
+}
+
+// SetName sets the "name" field.
+func (meuo *MaintenanceEntryUpdateOne) SetName(s string) *MaintenanceEntryUpdateOne {
+	meuo.mutation.SetName(s)
+	return meuo
+}
+
+// SetDescription sets the "description" field.
+func (meuo *MaintenanceEntryUpdateOne) SetDescription(s string) *MaintenanceEntryUpdateOne {
+	meuo.mutation.SetDescription(s)
+	return meuo
+}
+
+// SetNillableDescription sets the "description" field if the given value is not nil.
+func (meuo *MaintenanceEntryUpdateOne) SetNillableDescription(s *string) *MaintenanceEntryUpdateOne {
+	if s != nil {
+		meuo.SetDescription(*s)
+	}
+	return meuo
+}
+
+// ClearDescription clears the value of the "description" field.
+func (meuo *MaintenanceEntryUpdateOne) ClearDescription() *MaintenanceEntryUpdateOne {
+	meuo.mutation.ClearDescription()
+	return meuo
+}
+
+// SetCost sets the "cost" field.
+func (meuo *MaintenanceEntryUpdateOne) SetCost(f float64) *MaintenanceEntryUpdateOne {
+	meuo.mutation.ResetCost()
+	meuo.mutation.SetCost(f)
+	return meuo
+}
+
+// SetNillableCost sets the "cost" field if the given value is not nil.
+func (meuo *MaintenanceEntryUpdateOne) SetNillableCost(f *float64) *MaintenanceEntryUpdateOne {
+	if f != nil {
+		meuo.SetCost(*f)
+	}
+	return meuo
+}
+
+// AddCost adds f to the "cost" field.
+func (meuo *MaintenanceEntryUpdateOne) AddCost(f float64) *MaintenanceEntryUpdateOne {
+	meuo.mutation.AddCost(f)
+	return meuo
+}
+
+// SetItem sets the "item" edge to the Item entity.
+func (meuo *MaintenanceEntryUpdateOne) SetItem(i *Item) *MaintenanceEntryUpdateOne {
+	return meuo.SetItemID(i.ID)
+}
+
+// Mutation returns the MaintenanceEntryMutation object of the builder.
+func (meuo *MaintenanceEntryUpdateOne) Mutation() *MaintenanceEntryMutation {
+	return meuo.mutation
+}
+
+// ClearItem clears the "item" edge to the Item entity.
+func (meuo *MaintenanceEntryUpdateOne) ClearItem() *MaintenanceEntryUpdateOne {
+	meuo.mutation.ClearItem()
+	return meuo
+}
+
+// Select allows selecting one or more fields (columns) of the returned entity.
+// The default is selecting all fields defined in the entity schema.
+func (meuo *MaintenanceEntryUpdateOne) Select(field string, fields ...string) *MaintenanceEntryUpdateOne {
+	meuo.fields = append([]string{field}, fields...)
+	return meuo
+}
+
+// Save executes the query and returns the updated MaintenanceEntry entity.
+func (meuo *MaintenanceEntryUpdateOne) Save(ctx context.Context) (*MaintenanceEntry, error) {
+	var (
+		err  error
+		node *MaintenanceEntry
+	)
+	meuo.defaults()
+	if len(meuo.hooks) == 0 {
+		if err = meuo.check(); err != nil {
+			return nil, err
+		}
+		node, err = meuo.sqlSave(ctx)
+	} else {
+		var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) {
+			mutation, ok := m.(*MaintenanceEntryMutation)
+			if !ok {
+				return nil, fmt.Errorf("unexpected mutation type %T", m)
+			}
+			if err = meuo.check(); err != nil {
+				return nil, err
+			}
+			meuo.mutation = mutation
+			node, err = meuo.sqlSave(ctx)
+			mutation.done = true
+			return node, err
+		})
+		for i := len(meuo.hooks) - 1; i >= 0; i-- {
+			if meuo.hooks[i] == nil {
+				return nil, fmt.Errorf("ent: uninitialized hook (forgotten import ent/runtime?)")
+			}
+			mut = meuo.hooks[i](mut)
+		}
+		v, err := mut.Mutate(ctx, meuo.mutation)
+		if err != nil {
+			return nil, err
+		}
+		nv, ok := v.(*MaintenanceEntry)
+		if !ok {
+			return nil, fmt.Errorf("unexpected node type %T returned from MaintenanceEntryMutation", v)
+		}
+		node = nv
+	}
+	return node, err
+}
+
+// SaveX is like Save, but panics if an error occurs.
+func (meuo *MaintenanceEntryUpdateOne) SaveX(ctx context.Context) *MaintenanceEntry {
+	node, err := meuo.Save(ctx)
+	if err != nil {
+		panic(err)
+	}
+	return node
+}
+
+// Exec executes the query on the entity.
+func (meuo *MaintenanceEntryUpdateOne) Exec(ctx context.Context) error {
+	_, err := meuo.Save(ctx)
+	return err
+}
+
+// ExecX is like Exec, but panics if an error occurs.
+func (meuo *MaintenanceEntryUpdateOne) ExecX(ctx context.Context) {
+	if err := meuo.Exec(ctx); err != nil {
+		panic(err)
+	}
+}
+
+// defaults sets the default values of the builder before save.
+func (meuo *MaintenanceEntryUpdateOne) defaults() {
+	if _, ok := meuo.mutation.UpdatedAt(); !ok {
+		v := maintenanceentry.UpdateDefaultUpdatedAt()
+		meuo.mutation.SetUpdatedAt(v)
+	}
+}
+
+// check runs all checks and user-defined validators on the builder.
+func (meuo *MaintenanceEntryUpdateOne) check() error {
+	if v, ok := meuo.mutation.Name(); ok {
+		if err := maintenanceentry.NameValidator(v); err != nil {
+			return &ValidationError{Name: "name", err: fmt.Errorf(`ent: validator failed for field "MaintenanceEntry.name": %w`, err)}
+		}
+	}
+	if v, ok := meuo.mutation.Description(); ok {
+		if err := maintenanceentry.DescriptionValidator(v); err != nil {
+			return &ValidationError{Name: "description", err: fmt.Errorf(`ent: validator failed for field "MaintenanceEntry.description": %w`, err)}
+		}
+	}
+	if _, ok := meuo.mutation.ItemID(); meuo.mutation.ItemCleared() && !ok {
+		return errors.New(`ent: clearing a required unique edge "MaintenanceEntry.item"`)
+	}
+	return nil
+}
+
+func (meuo *MaintenanceEntryUpdateOne) sqlSave(ctx context.Context) (_node *MaintenanceEntry, err error) {
+	_spec := &sqlgraph.UpdateSpec{
+		Node: &sqlgraph.NodeSpec{
+			Table:   maintenanceentry.Table,
+			Columns: maintenanceentry.Columns,
+			ID: &sqlgraph.FieldSpec{
+				Type:   field.TypeUUID,
+				Column: maintenanceentry.FieldID,
+			},
+		},
+	}
+	id, ok := meuo.mutation.ID()
+	if !ok {
+		return nil, &ValidationError{Name: "id", err: errors.New(`ent: missing "MaintenanceEntry.id" for update`)}
+	}
+	_spec.Node.ID.Value = id
+	if fields := meuo.fields; len(fields) > 0 {
+		_spec.Node.Columns = make([]string, 0, len(fields))
+		_spec.Node.Columns = append(_spec.Node.Columns, maintenanceentry.FieldID)
+		for _, f := range fields {
+			if !maintenanceentry.ValidColumn(f) {
+				return nil, &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)}
+			}
+			if f != maintenanceentry.FieldID {
+				_spec.Node.Columns = append(_spec.Node.Columns, f)
+			}
+		}
+	}
+	if ps := meuo.mutation.predicates; len(ps) > 0 {
+		_spec.Predicate = func(selector *sql.Selector) {
+			for i := range ps {
+				ps[i](selector)
+			}
+		}
+	}
+	if value, ok := meuo.mutation.UpdatedAt(); ok {
+		_spec.SetField(maintenanceentry.FieldUpdatedAt, field.TypeTime, value)
+	}
+	if value, ok := meuo.mutation.Date(); ok {
+		_spec.SetField(maintenanceentry.FieldDate, field.TypeTime, value)
+	}
+	if value, ok := meuo.mutation.Name(); ok {
+		_spec.SetField(maintenanceentry.FieldName, field.TypeString, value)
+	}
+	if value, ok := meuo.mutation.Description(); ok {
+		_spec.SetField(maintenanceentry.FieldDescription, field.TypeString, value)
+	}
+	if meuo.mutation.DescriptionCleared() {
+		_spec.ClearField(maintenanceentry.FieldDescription, field.TypeString)
+	}
+	if value, ok := meuo.mutation.Cost(); ok {
+		_spec.SetField(maintenanceentry.FieldCost, field.TypeFloat64, value)
+	}
+	if value, ok := meuo.mutation.AddedCost(); ok {
+		_spec.AddField(maintenanceentry.FieldCost, field.TypeFloat64, value)
+	}
+	if meuo.mutation.ItemCleared() {
+		edge := &sqlgraph.EdgeSpec{
+			Rel:     sqlgraph.M2O,
+			Inverse: true,
+			Table:   maintenanceentry.ItemTable,
+			Columns: []string{maintenanceentry.ItemColumn},
+			Bidi:    false,
+			Target: &sqlgraph.EdgeTarget{
+				IDSpec: &sqlgraph.FieldSpec{
+					Type:   field.TypeUUID,
+					Column: item.FieldID,
+				},
+			},
+		}
+		_spec.Edges.Clear = append(_spec.Edges.Clear, edge)
+	}
+	if nodes := meuo.mutation.ItemIDs(); len(nodes) > 0 {
+		edge := &sqlgraph.EdgeSpec{
+			Rel:     sqlgraph.M2O,
+			Inverse: true,
+			Table:   maintenanceentry.ItemTable,
+			Columns: []string{maintenanceentry.ItemColumn},
+			Bidi:    false,
+			Target: &sqlgraph.EdgeTarget{
+				IDSpec: &sqlgraph.FieldSpec{
+					Type:   field.TypeUUID,
+					Column: item.FieldID,
+				},
+			},
+		}
+		for _, k := range nodes {
+			edge.Target.Nodes = append(edge.Target.Nodes, k)
+		}
+		_spec.Edges.Add = append(_spec.Edges.Add, edge)
+	}
+	_node = &MaintenanceEntry{config: meuo.config}
+	_spec.Assign = _node.assignValues
+	_spec.ScanValues = _node.scanValues
+	if err = sqlgraph.UpdateNode(ctx, meuo.driver, _spec); err != nil {
+		if _, ok := err.(*sqlgraph.NotFoundError); ok {
+			err = &NotFoundError{maintenanceentry.Label}
+		} else if sqlgraph.IsConstraintError(err) {
+			err = &ConstraintError{msg: err.Error(), wrap: err}
+		}
+		return nil, err
+	}
+	return _node, nil
+}
diff --git a/backend/internal/data/ent/migrate/schema.go b/backend/internal/data/ent/migrate/schema.go
index 10beaa4..c700d34 100644
--- a/backend/internal/data/ent/migrate/schema.go
+++ b/backend/internal/data/ent/migrate/schema.go
@@ -53,7 +53,7 @@ var (
 				Symbol:     "auth_roles_auth_tokens_roles",
 				Columns:    []*schema.Column{AuthRolesColumns[2]},
 				RefColumns: []*schema.Column{AuthTokensColumns[0]},
-				OnDelete:   schema.SetNull,
+				OnDelete:   schema.Cascade,
 			},
 		},
 	}
@@ -110,37 +110,6 @@ var (
 			},
 		},
 	}
-	// DocumentTokensColumns holds the columns for the "document_tokens" table.
-	DocumentTokensColumns = []*schema.Column{
-		{Name: "id", Type: field.TypeUUID},
-		{Name: "created_at", Type: field.TypeTime},
-		{Name: "updated_at", Type: field.TypeTime},
-		{Name: "token", Type: field.TypeBytes, Unique: true},
-		{Name: "uses", Type: field.TypeInt, Default: 1},
-		{Name: "expires_at", Type: field.TypeTime},
-		{Name: "document_document_tokens", Type: field.TypeUUID, Nullable: true},
-	}
-	// DocumentTokensTable holds the schema information for the "document_tokens" table.
-	DocumentTokensTable = &schema.Table{
-		Name:       "document_tokens",
-		Columns:    DocumentTokensColumns,
-		PrimaryKey: []*schema.Column{DocumentTokensColumns[0]},
-		ForeignKeys: []*schema.ForeignKey{
-			{
-				Symbol:     "document_tokens_documents_document_tokens",
-				Columns:    []*schema.Column{DocumentTokensColumns[6]},
-				RefColumns: []*schema.Column{DocumentsColumns[0]},
-				OnDelete:   schema.Cascade,
-			},
-		},
-		Indexes: []*schema.Index{
-			{
-				Name:    "documenttoken_token",
-				Unique:  false,
-				Columns: []*schema.Column{DocumentTokensColumns[3]},
-			},
-		},
-	}
 	// GroupsColumns holds the columns for the "groups" table.
 	GroupsColumns = []*schema.Column{
 		{Name: "id", Type: field.TypeUUID},
@@ -349,6 +318,31 @@ var (
 			},
 		},
 	}
+	// MaintenanceEntriesColumns holds the columns for the "maintenance_entries" table.
+	MaintenanceEntriesColumns = []*schema.Column{
+		{Name: "id", Type: field.TypeUUID},
+		{Name: "created_at", Type: field.TypeTime},
+		{Name: "updated_at", Type: field.TypeTime},
+		{Name: "date", Type: field.TypeTime},
+		{Name: "name", Type: field.TypeString, Size: 255},
+		{Name: "description", Type: field.TypeString, Nullable: true, Size: 2500},
+		{Name: "cost", Type: field.TypeFloat64, Default: 0},
+		{Name: "item_id", Type: field.TypeUUID},
+	}
+	// MaintenanceEntriesTable holds the schema information for the "maintenance_entries" table.
+	MaintenanceEntriesTable = &schema.Table{
+		Name:       "maintenance_entries",
+		Columns:    MaintenanceEntriesColumns,
+		PrimaryKey: []*schema.Column{MaintenanceEntriesColumns[0]},
+		ForeignKeys: []*schema.ForeignKey{
+			{
+				Symbol:     "maintenance_entries_items_maintenance_entries",
+				Columns:    []*schema.Column{MaintenanceEntriesColumns[7]},
+				RefColumns: []*schema.Column{ItemsColumns[0]},
+				OnDelete:   schema.Cascade,
+			},
+		},
+	}
 	// UsersColumns holds the columns for the "users" table.
 	UsersColumns = []*schema.Column{
 		{Name: "id", Type: field.TypeUUID},
@@ -408,13 +402,13 @@ var (
 		AuthRolesTable,
 		AuthTokensTable,
 		DocumentsTable,
-		DocumentTokensTable,
 		GroupsTable,
 		GroupInvitationTokensTable,
 		ItemsTable,
 		ItemFieldsTable,
 		LabelsTable,
 		LocationsTable,
+		MaintenanceEntriesTable,
 		UsersTable,
 		LabelItemsTable,
 	}
@@ -426,7 +420,6 @@ func init() {
 	AuthRolesTable.ForeignKeys[0].RefTable = AuthTokensTable
 	AuthTokensTable.ForeignKeys[0].RefTable = UsersTable
 	DocumentsTable.ForeignKeys[0].RefTable = GroupsTable
-	DocumentTokensTable.ForeignKeys[0].RefTable = DocumentsTable
 	GroupInvitationTokensTable.ForeignKeys[0].RefTable = GroupsTable
 	ItemsTable.ForeignKeys[0].RefTable = GroupsTable
 	ItemsTable.ForeignKeys[1].RefTable = ItemsTable
@@ -435,6 +428,7 @@ func init() {
 	LabelsTable.ForeignKeys[0].RefTable = GroupsTable
 	LocationsTable.ForeignKeys[0].RefTable = GroupsTable
 	LocationsTable.ForeignKeys[1].RefTable = LocationsTable
+	MaintenanceEntriesTable.ForeignKeys[0].RefTable = ItemsTable
 	UsersTable.ForeignKeys[0].RefTable = GroupsTable
 	LabelItemsTable.ForeignKeys[0].RefTable = LabelsTable
 	LabelItemsTable.ForeignKeys[1].RefTable = ItemsTable
diff --git a/backend/internal/data/ent/mutation.go b/backend/internal/data/ent/mutation.go
index ea9a441..da57310 100644
--- a/backend/internal/data/ent/mutation.go
+++ b/backend/internal/data/ent/mutation.go
@@ -14,13 +14,13 @@ import (
 	"github.com/hay-kot/homebox/backend/internal/data/ent/authroles"
 	"github.com/hay-kot/homebox/backend/internal/data/ent/authtokens"
 	"github.com/hay-kot/homebox/backend/internal/data/ent/document"
-	"github.com/hay-kot/homebox/backend/internal/data/ent/documenttoken"
 	"github.com/hay-kot/homebox/backend/internal/data/ent/group"
 	"github.com/hay-kot/homebox/backend/internal/data/ent/groupinvitationtoken"
 	"github.com/hay-kot/homebox/backend/internal/data/ent/item"
 	"github.com/hay-kot/homebox/backend/internal/data/ent/itemfield"
 	"github.com/hay-kot/homebox/backend/internal/data/ent/label"
 	"github.com/hay-kot/homebox/backend/internal/data/ent/location"
+	"github.com/hay-kot/homebox/backend/internal/data/ent/maintenanceentry"
 	"github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
 	"github.com/hay-kot/homebox/backend/internal/data/ent/user"
 
@@ -40,13 +40,13 @@ const (
 	TypeAuthRoles            = "AuthRoles"
 	TypeAuthTokens           = "AuthTokens"
 	TypeDocument             = "Document"
-	TypeDocumentToken        = "DocumentToken"
 	TypeGroup                = "Group"
 	TypeGroupInvitationToken = "GroupInvitationToken"
 	TypeItem                 = "Item"
 	TypeItemField            = "ItemField"
 	TypeLabel                = "Label"
 	TypeLocation             = "Location"
+	TypeMaintenanceEntry     = "MaintenanceEntry"
 	TypeUser                 = "User"
 )
 
@@ -1587,25 +1587,22 @@ func (m *AuthTokensMutation) ResetEdge(name string) error {
 // DocumentMutation represents an operation that mutates the Document nodes in the graph.
 type DocumentMutation struct {
 	config
-	op                     Op
-	typ                    string
-	id                     *uuid.UUID
-	created_at             *time.Time
-	updated_at             *time.Time
-	title                  *string
-	_path                  *string
-	clearedFields          map[string]struct{}
-	group                  *uuid.UUID
-	clearedgroup           bool
-	document_tokens        map[uuid.UUID]struct{}
-	removeddocument_tokens map[uuid.UUID]struct{}
-	cleareddocument_tokens bool
-	attachments            map[uuid.UUID]struct{}
-	removedattachments     map[uuid.UUID]struct{}
-	clearedattachments     bool
-	done                   bool
-	oldValue               func(context.Context) (*Document, error)
-	predicates             []predicate.Document
+	op                 Op
+	typ                string
+	id                 *uuid.UUID
+	created_at         *time.Time
+	updated_at         *time.Time
+	title              *string
+	_path              *string
+	clearedFields      map[string]struct{}
+	group              *uuid.UUID
+	clearedgroup       bool
+	attachments        map[uuid.UUID]struct{}
+	removedattachments map[uuid.UUID]struct{}
+	clearedattachments bool
+	done               bool
+	oldValue           func(context.Context) (*Document, error)
+	predicates         []predicate.Document
 }
 
 var _ ent.Mutation = (*DocumentMutation)(nil)
@@ -1895,60 +1892,6 @@ func (m *DocumentMutation) ResetGroup() {
 	m.clearedgroup = false
 }
 
-// AddDocumentTokenIDs adds the "document_tokens" edge to the DocumentToken entity by ids.
-func (m *DocumentMutation) AddDocumentTokenIDs(ids ...uuid.UUID) {
-	if m.document_tokens == nil {
-		m.document_tokens = make(map[uuid.UUID]struct{})
-	}
-	for i := range ids {
-		m.document_tokens[ids[i]] = struct{}{}
-	}
-}
-
-// ClearDocumentTokens clears the "document_tokens" edge to the DocumentToken entity.
-func (m *DocumentMutation) ClearDocumentTokens() {
-	m.cleareddocument_tokens = true
-}
-
-// DocumentTokensCleared reports if the "document_tokens" edge to the DocumentToken entity was cleared.
-func (m *DocumentMutation) DocumentTokensCleared() bool {
-	return m.cleareddocument_tokens
-}
-
-// RemoveDocumentTokenIDs removes the "document_tokens" edge to the DocumentToken entity by IDs.
-func (m *DocumentMutation) RemoveDocumentTokenIDs(ids ...uuid.UUID) {
-	if m.removeddocument_tokens == nil {
-		m.removeddocument_tokens = make(map[uuid.UUID]struct{})
-	}
-	for i := range ids {
-		delete(m.document_tokens, ids[i])
-		m.removeddocument_tokens[ids[i]] = struct{}{}
-	}
-}
-
-// RemovedDocumentTokens returns the removed IDs of the "document_tokens" edge to the DocumentToken entity.
-func (m *DocumentMutation) RemovedDocumentTokensIDs() (ids []uuid.UUID) {
-	for id := range m.removeddocument_tokens {
-		ids = append(ids, id)
-	}
-	return
-}
-
-// DocumentTokensIDs returns the "document_tokens" edge IDs in the mutation.
-func (m *DocumentMutation) DocumentTokensIDs() (ids []uuid.UUID) {
-	for id := range m.document_tokens {
-		ids = append(ids, id)
-	}
-	return
-}
-
-// ResetDocumentTokens resets all changes to the "document_tokens" edge.
-func (m *DocumentMutation) ResetDocumentTokens() {
-	m.document_tokens = nil
-	m.cleareddocument_tokens = false
-	m.removeddocument_tokens = nil
-}
-
 // AddAttachmentIDs adds the "attachments" edge to the Attachment entity by ids.
 func (m *DocumentMutation) AddAttachmentIDs(ids ...uuid.UUID) {
 	if m.attachments == nil {
@@ -2172,13 +2115,10 @@ func (m *DocumentMutation) ResetField(name string) error {
 
 // AddedEdges returns all edge names that were set/added in this mutation.
 func (m *DocumentMutation) AddedEdges() []string {
-	edges := make([]string, 0, 3)
+	edges := make([]string, 0, 2)
 	if m.group != nil {
 		edges = append(edges, document.EdgeGroup)
 	}
-	if m.document_tokens != nil {
-		edges = append(edges, document.EdgeDocumentTokens)
-	}
 	if m.attachments != nil {
 		edges = append(edges, document.EdgeAttachments)
 	}
@@ -2193,12 +2133,6 @@ func (m *DocumentMutation) AddedIDs(name string) []ent.Value {
 		if id := m.group; id != nil {
 			return []ent.Value{*id}
 		}
-	case document.EdgeDocumentTokens:
-		ids := make([]ent.Value, 0, len(m.document_tokens))
-		for id := range m.document_tokens {
-			ids = append(ids, id)
-		}
-		return ids
 	case document.EdgeAttachments:
 		ids := make([]ent.Value, 0, len(m.attachments))
 		for id := range m.attachments {
@@ -2211,10 +2145,7 @@ func (m *DocumentMutation) AddedIDs(name string) []ent.Value {
 
 // RemovedEdges returns all edge names that were removed in this mutation.
 func (m *DocumentMutation) RemovedEdges() []string {
-	edges := make([]string, 0, 3)
-	if m.removeddocument_tokens != nil {
-		edges = append(edges, document.EdgeDocumentTokens)
-	}
+	edges := make([]string, 0, 2)
 	if m.removedattachments != nil {
 		edges = append(edges, document.EdgeAttachments)
 	}
@@ -2225,12 +2156,6 @@ func (m *DocumentMutation) RemovedEdges() []string {
 // the given name in this mutation.
 func (m *DocumentMutation) RemovedIDs(name string) []ent.Value {
 	switch name {
-	case document.EdgeDocumentTokens:
-		ids := make([]ent.Value, 0, len(m.removeddocument_tokens))
-		for id := range m.removeddocument_tokens {
-			ids = append(ids, id)
-		}
-		return ids
 	case document.EdgeAttachments:
 		ids := make([]ent.Value, 0, len(m.removedattachments))
 		for id := range m.removedattachments {
@@ -2243,13 +2168,10 @@ func (m *DocumentMutation) RemovedIDs(name string) []ent.Value {
 
 // ClearedEdges returns all edge names that were cleared in this mutation.
 func (m *DocumentMutation) ClearedEdges() []string {
-	edges := make([]string, 0, 3)
+	edges := make([]string, 0, 2)
 	if m.clearedgroup {
 		edges = append(edges, document.EdgeGroup)
 	}
-	if m.cleareddocument_tokens {
-		edges = append(edges, document.EdgeDocumentTokens)
-	}
 	if m.clearedattachments {
 		edges = append(edges, document.EdgeAttachments)
 	}
@@ -2262,8 +2184,6 @@ func (m *DocumentMutation) EdgeCleared(name string) bool {
 	switch name {
 	case document.EdgeGroup:
 		return m.clearedgroup
-	case document.EdgeDocumentTokens:
-		return m.cleareddocument_tokens
 	case document.EdgeAttachments:
 		return m.clearedattachments
 	}
@@ -2288,9 +2208,6 @@ func (m *DocumentMutation) ResetEdge(name string) error {
 	case document.EdgeGroup:
 		m.ResetGroup()
 		return nil
-	case document.EdgeDocumentTokens:
-		m.ResetDocumentTokens()
-		return nil
 	case document.EdgeAttachments:
 		m.ResetAttachments()
 		return nil
@@ -2298,642 +2215,6 @@ func (m *DocumentMutation) ResetEdge(name string) error {
 	return fmt.Errorf("unknown Document edge %s", name)
 }
 
-// DocumentTokenMutation represents an operation that mutates the DocumentToken nodes in the graph.
-type DocumentTokenMutation struct {
-	config
-	op              Op
-	typ             string
-	id              *uuid.UUID
-	created_at      *time.Time
-	updated_at      *time.Time
-	token           *[]byte
-	uses            *int
-	adduses         *int
-	expires_at      *time.Time
-	clearedFields   map[string]struct{}
-	document        *uuid.UUID
-	cleareddocument bool
-	done            bool
-	oldValue        func(context.Context) (*DocumentToken, error)
-	predicates      []predicate.DocumentToken
-}
-
-var _ ent.Mutation = (*DocumentTokenMutation)(nil)
-
-// documenttokenOption allows management of the mutation configuration using functional options.
-type documenttokenOption func(*DocumentTokenMutation)
-
-// newDocumentTokenMutation creates new mutation for the DocumentToken entity.
-func newDocumentTokenMutation(c config, op Op, opts ...documenttokenOption) *DocumentTokenMutation {
-	m := &DocumentTokenMutation{
-		config:        c,
-		op:            op,
-		typ:           TypeDocumentToken,
-		clearedFields: make(map[string]struct{}),
-	}
-	for _, opt := range opts {
-		opt(m)
-	}
-	return m
-}
-
-// withDocumentTokenID sets the ID field of the mutation.
-func withDocumentTokenID(id uuid.UUID) documenttokenOption {
-	return func(m *DocumentTokenMutation) {
-		var (
-			err   error
-			once  sync.Once
-			value *DocumentToken
-		)
-		m.oldValue = func(ctx context.Context) (*DocumentToken, error) {
-			once.Do(func() {
-				if m.done {
-					err = errors.New("querying old values post mutation is not allowed")
-				} else {
-					value, err = m.Client().DocumentToken.Get(ctx, id)
-				}
-			})
-			return value, err
-		}
-		m.id = &id
-	}
-}
-
-// withDocumentToken sets the old DocumentToken of the mutation.
-func withDocumentToken(node *DocumentToken) documenttokenOption {
-	return func(m *DocumentTokenMutation) {
-		m.oldValue = func(context.Context) (*DocumentToken, error) {
-			return node, nil
-		}
-		m.id = &node.ID
-	}
-}
-
-// Client returns a new `ent.Client` from the mutation. If the mutation was
-// executed in a transaction (ent.Tx), a transactional client is returned.
-func (m DocumentTokenMutation) Client() *Client {
-	client := &Client{config: m.config}
-	client.init()
-	return client
-}
-
-// Tx returns an `ent.Tx` for mutations that were executed in transactions;
-// it returns an error otherwise.
-func (m DocumentTokenMutation) Tx() (*Tx, error) {
-	if _, ok := m.driver.(*txDriver); !ok {
-		return nil, errors.New("ent: mutation is not running in a transaction")
-	}
-	tx := &Tx{config: m.config}
-	tx.init()
-	return tx, nil
-}
-
-// SetID sets the value of the id field. Note that this
-// operation is only accepted on creation of DocumentToken entities.
-func (m *DocumentTokenMutation) SetID(id uuid.UUID) {
-	m.id = &id
-}
-
-// ID returns the ID value in the mutation. Note that the ID is only available
-// if it was provided to the builder or after it was returned from the database.
-func (m *DocumentTokenMutation) ID() (id uuid.UUID, exists bool) {
-	if m.id == nil {
-		return
-	}
-	return *m.id, true
-}
-
-// IDs queries the database and returns the entity ids that match the mutation's predicate.
-// That means, if the mutation is applied within a transaction with an isolation level such
-// as sql.LevelSerializable, the returned ids match the ids of the rows that will be updated
-// or updated by the mutation.
-func (m *DocumentTokenMutation) IDs(ctx context.Context) ([]uuid.UUID, error) {
-	switch {
-	case m.op.Is(OpUpdateOne | OpDeleteOne):
-		id, exists := m.ID()
-		if exists {
-			return []uuid.UUID{id}, nil
-		}
-		fallthrough
-	case m.op.Is(OpUpdate | OpDelete):
-		return m.Client().DocumentToken.Query().Where(m.predicates...).IDs(ctx)
-	default:
-		return nil, fmt.Errorf("IDs is not allowed on %s operations", m.op)
-	}
-}
-
-// SetCreatedAt sets the "created_at" field.
-func (m *DocumentTokenMutation) SetCreatedAt(t time.Time) {
-	m.created_at = &t
-}
-
-// CreatedAt returns the value of the "created_at" field in the mutation.
-func (m *DocumentTokenMutation) CreatedAt() (r time.Time, exists bool) {
-	v := m.created_at
-	if v == nil {
-		return
-	}
-	return *v, true
-}
-
-// OldCreatedAt returns the old "created_at" field's value of the DocumentToken entity.
-// If the DocumentToken object wasn't provided to the builder, the object is fetched from the database.
-// An error is returned if the mutation operation is not UpdateOne, or the database query fails.
-func (m *DocumentTokenMutation) OldCreatedAt(ctx context.Context) (v time.Time, err error) {
-	if !m.op.Is(OpUpdateOne) {
-		return v, errors.New("OldCreatedAt is only allowed on UpdateOne operations")
-	}
-	if m.id == nil || m.oldValue == nil {
-		return v, errors.New("OldCreatedAt requires an ID field in the mutation")
-	}
-	oldValue, err := m.oldValue(ctx)
-	if err != nil {
-		return v, fmt.Errorf("querying old value for OldCreatedAt: %w", err)
-	}
-	return oldValue.CreatedAt, nil
-}
-
-// ResetCreatedAt resets all changes to the "created_at" field.
-func (m *DocumentTokenMutation) ResetCreatedAt() {
-	m.created_at = nil
-}
-
-// SetUpdatedAt sets the "updated_at" field.
-func (m *DocumentTokenMutation) SetUpdatedAt(t time.Time) {
-	m.updated_at = &t
-}
-
-// UpdatedAt returns the value of the "updated_at" field in the mutation.
-func (m *DocumentTokenMutation) UpdatedAt() (r time.Time, exists bool) {
-	v := m.updated_at
-	if v == nil {
-		return
-	}
-	return *v, true
-}
-
-// OldUpdatedAt returns the old "updated_at" field's value of the DocumentToken entity.
-// If the DocumentToken object wasn't provided to the builder, the object is fetched from the database.
-// An error is returned if the mutation operation is not UpdateOne, or the database query fails.
-func (m *DocumentTokenMutation) OldUpdatedAt(ctx context.Context) (v time.Time, err error) {
-	if !m.op.Is(OpUpdateOne) {
-		return v, errors.New("OldUpdatedAt is only allowed on UpdateOne operations")
-	}
-	if m.id == nil || m.oldValue == nil {
-		return v, errors.New("OldUpdatedAt requires an ID field in the mutation")
-	}
-	oldValue, err := m.oldValue(ctx)
-	if err != nil {
-		return v, fmt.Errorf("querying old value for OldUpdatedAt: %w", err)
-	}
-	return oldValue.UpdatedAt, nil
-}
-
-// ResetUpdatedAt resets all changes to the "updated_at" field.
-func (m *DocumentTokenMutation) ResetUpdatedAt() {
-	m.updated_at = nil
-}
-
-// SetToken sets the "token" field.
-func (m *DocumentTokenMutation) SetToken(b []byte) {
-	m.token = &b
-}
-
-// Token returns the value of the "token" field in the mutation.
-func (m *DocumentTokenMutation) Token() (r []byte, exists bool) {
-	v := m.token
-	if v == nil {
-		return
-	}
-	return *v, true
-}
-
-// OldToken returns the old "token" field's value of the DocumentToken entity.
-// If the DocumentToken object wasn't provided to the builder, the object is fetched from the database.
-// An error is returned if the mutation operation is not UpdateOne, or the database query fails.
-func (m *DocumentTokenMutation) OldToken(ctx context.Context) (v []byte, err error) {
-	if !m.op.Is(OpUpdateOne) {
-		return v, errors.New("OldToken is only allowed on UpdateOne operations")
-	}
-	if m.id == nil || m.oldValue == nil {
-		return v, errors.New("OldToken requires an ID field in the mutation")
-	}
-	oldValue, err := m.oldValue(ctx)
-	if err != nil {
-		return v, fmt.Errorf("querying old value for OldToken: %w", err)
-	}
-	return oldValue.Token, nil
-}
-
-// ResetToken resets all changes to the "token" field.
-func (m *DocumentTokenMutation) ResetToken() {
-	m.token = nil
-}
-
-// SetUses sets the "uses" field.
-func (m *DocumentTokenMutation) SetUses(i int) {
-	m.uses = &i
-	m.adduses = nil
-}
-
-// Uses returns the value of the "uses" field in the mutation.
-func (m *DocumentTokenMutation) Uses() (r int, exists bool) {
-	v := m.uses
-	if v == nil {
-		return
-	}
-	return *v, true
-}
-
-// OldUses returns the old "uses" field's value of the DocumentToken entity.
-// If the DocumentToken object wasn't provided to the builder, the object is fetched from the database.
-// An error is returned if the mutation operation is not UpdateOne, or the database query fails.
-func (m *DocumentTokenMutation) OldUses(ctx context.Context) (v int, err error) {
-	if !m.op.Is(OpUpdateOne) {
-		return v, errors.New("OldUses is only allowed on UpdateOne operations")
-	}
-	if m.id == nil || m.oldValue == nil {
-		return v, errors.New("OldUses requires an ID field in the mutation")
-	}
-	oldValue, err := m.oldValue(ctx)
-	if err != nil {
-		return v, fmt.Errorf("querying old value for OldUses: %w", err)
-	}
-	return oldValue.Uses, nil
-}
-
-// AddUses adds i to the "uses" field.
-func (m *DocumentTokenMutation) AddUses(i int) {
-	if m.adduses != nil {
-		*m.adduses += i
-	} else {
-		m.adduses = &i
-	}
-}
-
-// AddedUses returns the value that was added to the "uses" field in this mutation.
-func (m *DocumentTokenMutation) AddedUses() (r int, exists bool) {
-	v := m.adduses
-	if v == nil {
-		return
-	}
-	return *v, true
-}
-
-// ResetUses resets all changes to the "uses" field.
-func (m *DocumentTokenMutation) ResetUses() {
-	m.uses = nil
-	m.adduses = nil
-}
-
-// SetExpiresAt sets the "expires_at" field.
-func (m *DocumentTokenMutation) SetExpiresAt(t time.Time) {
-	m.expires_at = &t
-}
-
-// ExpiresAt returns the value of the "expires_at" field in the mutation.
-func (m *DocumentTokenMutation) ExpiresAt() (r time.Time, exists bool) {
-	v := m.expires_at
-	if v == nil {
-		return
-	}
-	return *v, true
-}
-
-// OldExpiresAt returns the old "expires_at" field's value of the DocumentToken entity.
-// If the DocumentToken object wasn't provided to the builder, the object is fetched from the database.
-// An error is returned if the mutation operation is not UpdateOne, or the database query fails.
-func (m *DocumentTokenMutation) OldExpiresAt(ctx context.Context) (v time.Time, err error) {
-	if !m.op.Is(OpUpdateOne) {
-		return v, errors.New("OldExpiresAt is only allowed on UpdateOne operations")
-	}
-	if m.id == nil || m.oldValue == nil {
-		return v, errors.New("OldExpiresAt requires an ID field in the mutation")
-	}
-	oldValue, err := m.oldValue(ctx)
-	if err != nil {
-		return v, fmt.Errorf("querying old value for OldExpiresAt: %w", err)
-	}
-	return oldValue.ExpiresAt, nil
-}
-
-// ResetExpiresAt resets all changes to the "expires_at" field.
-func (m *DocumentTokenMutation) ResetExpiresAt() {
-	m.expires_at = nil
-}
-
-// SetDocumentID sets the "document" edge to the Document entity by id.
-func (m *DocumentTokenMutation) SetDocumentID(id uuid.UUID) {
-	m.document = &id
-}
-
-// ClearDocument clears the "document" edge to the Document entity.
-func (m *DocumentTokenMutation) ClearDocument() {
-	m.cleareddocument = true
-}
-
-// DocumentCleared reports if the "document" edge to the Document entity was cleared.
-func (m *DocumentTokenMutation) DocumentCleared() bool {
-	return m.cleareddocument
-}
-
-// DocumentID returns the "document" edge ID in the mutation.
-func (m *DocumentTokenMutation) DocumentID() (id uuid.UUID, exists bool) {
-	if m.document != nil {
-		return *m.document, true
-	}
-	return
-}
-
-// DocumentIDs returns the "document" edge IDs in the mutation.
-// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use
-// DocumentID instead. It exists only for internal usage by the builders.
-func (m *DocumentTokenMutation) DocumentIDs() (ids []uuid.UUID) {
-	if id := m.document; id != nil {
-		ids = append(ids, *id)
-	}
-	return
-}
-
-// ResetDocument resets all changes to the "document" edge.
-func (m *DocumentTokenMutation) ResetDocument() {
-	m.document = nil
-	m.cleareddocument = false
-}
-
-// Where appends a list predicates to the DocumentTokenMutation builder.
-func (m *DocumentTokenMutation) Where(ps ...predicate.DocumentToken) {
-	m.predicates = append(m.predicates, ps...)
-}
-
-// Op returns the operation name.
-func (m *DocumentTokenMutation) Op() Op {
-	return m.op
-}
-
-// Type returns the node type of this mutation (DocumentToken).
-func (m *DocumentTokenMutation) Type() string {
-	return m.typ
-}
-
-// Fields returns all fields that were changed during this mutation. Note that in
-// order to get all numeric fields that were incremented/decremented, call
-// AddedFields().
-func (m *DocumentTokenMutation) Fields() []string {
-	fields := make([]string, 0, 5)
-	if m.created_at != nil {
-		fields = append(fields, documenttoken.FieldCreatedAt)
-	}
-	if m.updated_at != nil {
-		fields = append(fields, documenttoken.FieldUpdatedAt)
-	}
-	if m.token != nil {
-		fields = append(fields, documenttoken.FieldToken)
-	}
-	if m.uses != nil {
-		fields = append(fields, documenttoken.FieldUses)
-	}
-	if m.expires_at != nil {
-		fields = append(fields, documenttoken.FieldExpiresAt)
-	}
-	return fields
-}
-
-// Field returns the value of a field with the given name. The second boolean
-// return value indicates that this field was not set, or was not defined in the
-// schema.
-func (m *DocumentTokenMutation) Field(name string) (ent.Value, bool) {
-	switch name {
-	case documenttoken.FieldCreatedAt:
-		return m.CreatedAt()
-	case documenttoken.FieldUpdatedAt:
-		return m.UpdatedAt()
-	case documenttoken.FieldToken:
-		return m.Token()
-	case documenttoken.FieldUses:
-		return m.Uses()
-	case documenttoken.FieldExpiresAt:
-		return m.ExpiresAt()
-	}
-	return nil, false
-}
-
-// OldField returns the old value of the field from the database. An error is
-// returned if the mutation operation is not UpdateOne, or the query to the
-// database failed.
-func (m *DocumentTokenMutation) OldField(ctx context.Context, name string) (ent.Value, error) {
-	switch name {
-	case documenttoken.FieldCreatedAt:
-		return m.OldCreatedAt(ctx)
-	case documenttoken.FieldUpdatedAt:
-		return m.OldUpdatedAt(ctx)
-	case documenttoken.FieldToken:
-		return m.OldToken(ctx)
-	case documenttoken.FieldUses:
-		return m.OldUses(ctx)
-	case documenttoken.FieldExpiresAt:
-		return m.OldExpiresAt(ctx)
-	}
-	return nil, fmt.Errorf("unknown DocumentToken field %s", name)
-}
-
-// SetField sets the value of a field with the given name. It returns an error if
-// the field is not defined in the schema, or if the type mismatched the field
-// type.
-func (m *DocumentTokenMutation) SetField(name string, value ent.Value) error {
-	switch name {
-	case documenttoken.FieldCreatedAt:
-		v, ok := value.(time.Time)
-		if !ok {
-			return fmt.Errorf("unexpected type %T for field %s", value, name)
-		}
-		m.SetCreatedAt(v)
-		return nil
-	case documenttoken.FieldUpdatedAt:
-		v, ok := value.(time.Time)
-		if !ok {
-			return fmt.Errorf("unexpected type %T for field %s", value, name)
-		}
-		m.SetUpdatedAt(v)
-		return nil
-	case documenttoken.FieldToken:
-		v, ok := value.([]byte)
-		if !ok {
-			return fmt.Errorf("unexpected type %T for field %s", value, name)
-		}
-		m.SetToken(v)
-		return nil
-	case documenttoken.FieldUses:
-		v, ok := value.(int)
-		if !ok {
-			return fmt.Errorf("unexpected type %T for field %s", value, name)
-		}
-		m.SetUses(v)
-		return nil
-	case documenttoken.FieldExpiresAt:
-		v, ok := value.(time.Time)
-		if !ok {
-			return fmt.Errorf("unexpected type %T for field %s", value, name)
-		}
-		m.SetExpiresAt(v)
-		return nil
-	}
-	return fmt.Errorf("unknown DocumentToken field %s", name)
-}
-
-// AddedFields returns all numeric fields that were incremented/decremented during
-// this mutation.
-func (m *DocumentTokenMutation) AddedFields() []string {
-	var fields []string
-	if m.adduses != nil {
-		fields = append(fields, documenttoken.FieldUses)
-	}
-	return fields
-}
-
-// AddedField returns the numeric value that was incremented/decremented on a field
-// with the given name. The second boolean return value indicates that this field
-// was not set, or was not defined in the schema.
-func (m *DocumentTokenMutation) AddedField(name string) (ent.Value, bool) {
-	switch name {
-	case documenttoken.FieldUses:
-		return m.AddedUses()
-	}
-	return nil, false
-}
-
-// AddField adds the value to the field with the given name. It returns an error if
-// the field is not defined in the schema, or if the type mismatched the field
-// type.
-func (m *DocumentTokenMutation) AddField(name string, value ent.Value) error {
-	switch name {
-	case documenttoken.FieldUses:
-		v, ok := value.(int)
-		if !ok {
-			return fmt.Errorf("unexpected type %T for field %s", value, name)
-		}
-		m.AddUses(v)
-		return nil
-	}
-	return fmt.Errorf("unknown DocumentToken numeric field %s", name)
-}
-
-// ClearedFields returns all nullable fields that were cleared during this
-// mutation.
-func (m *DocumentTokenMutation) ClearedFields() []string {
-	return nil
-}
-
-// FieldCleared returns a boolean indicating if a field with the given name was
-// cleared in this mutation.
-func (m *DocumentTokenMutation) FieldCleared(name string) bool {
-	_, ok := m.clearedFields[name]
-	return ok
-}
-
-// ClearField clears the value of the field with the given name. It returns an
-// error if the field is not defined in the schema.
-func (m *DocumentTokenMutation) ClearField(name string) error {
-	return fmt.Errorf("unknown DocumentToken nullable field %s", name)
-}
-
-// ResetField resets all changes in the mutation for the field with the given name.
-// It returns an error if the field is not defined in the schema.
-func (m *DocumentTokenMutation) ResetField(name string) error {
-	switch name {
-	case documenttoken.FieldCreatedAt:
-		m.ResetCreatedAt()
-		return nil
-	case documenttoken.FieldUpdatedAt:
-		m.ResetUpdatedAt()
-		return nil
-	case documenttoken.FieldToken:
-		m.ResetToken()
-		return nil
-	case documenttoken.FieldUses:
-		m.ResetUses()
-		return nil
-	case documenttoken.FieldExpiresAt:
-		m.ResetExpiresAt()
-		return nil
-	}
-	return fmt.Errorf("unknown DocumentToken field %s", name)
-}
-
-// AddedEdges returns all edge names that were set/added in this mutation.
-func (m *DocumentTokenMutation) AddedEdges() []string {
-	edges := make([]string, 0, 1)
-	if m.document != nil {
-		edges = append(edges, documenttoken.EdgeDocument)
-	}
-	return edges
-}
-
-// AddedIDs returns all IDs (to other nodes) that were added for the given edge
-// name in this mutation.
-func (m *DocumentTokenMutation) AddedIDs(name string) []ent.Value {
-	switch name {
-	case documenttoken.EdgeDocument:
-		if id := m.document; id != nil {
-			return []ent.Value{*id}
-		}
-	}
-	return nil
-}
-
-// RemovedEdges returns all edge names that were removed in this mutation.
-func (m *DocumentTokenMutation) RemovedEdges() []string {
-	edges := make([]string, 0, 1)
-	return edges
-}
-
-// RemovedIDs returns all IDs (to other nodes) that were removed for the edge with
-// the given name in this mutation.
-func (m *DocumentTokenMutation) RemovedIDs(name string) []ent.Value {
-	return nil
-}
-
-// ClearedEdges returns all edge names that were cleared in this mutation.
-func (m *DocumentTokenMutation) ClearedEdges() []string {
-	edges := make([]string, 0, 1)
-	if m.cleareddocument {
-		edges = append(edges, documenttoken.EdgeDocument)
-	}
-	return edges
-}
-
-// EdgeCleared returns a boolean which indicates if the edge with the given name
-// was cleared in this mutation.
-func (m *DocumentTokenMutation) EdgeCleared(name string) bool {
-	switch name {
-	case documenttoken.EdgeDocument:
-		return m.cleareddocument
-	}
-	return false
-}
-
-// ClearEdge clears the value of the edge with the given name. It returns an error
-// if that edge is not defined in the schema.
-func (m *DocumentTokenMutation) ClearEdge(name string) error {
-	switch name {
-	case documenttoken.EdgeDocument:
-		m.ClearDocument()
-		return nil
-	}
-	return fmt.Errorf("unknown DocumentToken unique edge %s", name)
-}
-
-// ResetEdge resets all changes to the edge with the given name in this mutation.
-// It returns an error if the edge is not defined in the schema.
-func (m *DocumentTokenMutation) ResetEdge(name string) error {
-	switch name {
-	case documenttoken.EdgeDocument:
-		m.ResetDocument()
-		return nil
-	}
-	return fmt.Errorf("unknown DocumentToken edge %s", name)
-}
-
 // GroupMutation represents an operation that mutates the Group nodes in the graph.
 type GroupMutation struct {
 	config
@@ -4560,58 +3841,61 @@ func (m *GroupInvitationTokenMutation) ResetEdge(name string) error {
 // ItemMutation represents an operation that mutates the Item nodes in the graph.
 type ItemMutation struct {
 	config
-	op                 Op
-	typ                string
-	id                 *uuid.UUID
-	created_at         *time.Time
-	updated_at         *time.Time
-	name               *string
-	description        *string
-	import_ref         *string
-	notes              *string
-	quantity           *int
-	addquantity        *int
-	insured            *bool
-	archived           *bool
-	asset_id           *int
-	addasset_id        *int
-	serial_number      *string
-	model_number       *string
-	manufacturer       *string
-	lifetime_warranty  *bool
-	warranty_expires   *time.Time
-	warranty_details   *string
-	purchase_time      *time.Time
-	purchase_from      *string
-	purchase_price     *float64
-	addpurchase_price  *float64
-	sold_time          *time.Time
-	sold_to            *string
-	sold_price         *float64
-	addsold_price      *float64
-	sold_notes         *string
-	clearedFields      map[string]struct{}
-	parent             *uuid.UUID
-	clearedparent      bool
-	children           map[uuid.UUID]struct{}
-	removedchildren    map[uuid.UUID]struct{}
-	clearedchildren    bool
-	group              *uuid.UUID
-	clearedgroup       bool
-	label              map[uuid.UUID]struct{}
-	removedlabel       map[uuid.UUID]struct{}
-	clearedlabel       bool
-	location           *uuid.UUID
-	clearedlocation    bool
-	fields             map[uuid.UUID]struct{}
-	removedfields      map[uuid.UUID]struct{}
-	clearedfields      bool
-	attachments        map[uuid.UUID]struct{}
-	removedattachments map[uuid.UUID]struct{}
-	clearedattachments bool
-	done               bool
-	oldValue           func(context.Context) (*Item, error)
-	predicates         []predicate.Item
+	op                         Op
+	typ                        string
+	id                         *uuid.UUID
+	created_at                 *time.Time
+	updated_at                 *time.Time
+	name                       *string
+	description                *string
+	import_ref                 *string
+	notes                      *string
+	quantity                   *int
+	addquantity                *int
+	insured                    *bool
+	archived                   *bool
+	asset_id                   *int
+	addasset_id                *int
+	serial_number              *string
+	model_number               *string
+	manufacturer               *string
+	lifetime_warranty          *bool
+	warranty_expires           *time.Time
+	warranty_details           *string
+	purchase_time              *time.Time
+	purchase_from              *string
+	purchase_price             *float64
+	addpurchase_price          *float64
+	sold_time                  *time.Time
+	sold_to                    *string
+	sold_price                 *float64
+	addsold_price              *float64
+	sold_notes                 *string
+	clearedFields              map[string]struct{}
+	parent                     *uuid.UUID
+	clearedparent              bool
+	children                   map[uuid.UUID]struct{}
+	removedchildren            map[uuid.UUID]struct{}
+	clearedchildren            bool
+	group                      *uuid.UUID
+	clearedgroup               bool
+	label                      map[uuid.UUID]struct{}
+	removedlabel               map[uuid.UUID]struct{}
+	clearedlabel               bool
+	location                   *uuid.UUID
+	clearedlocation            bool
+	fields                     map[uuid.UUID]struct{}
+	removedfields              map[uuid.UUID]struct{}
+	clearedfields              bool
+	maintenance_entries        map[uuid.UUID]struct{}
+	removedmaintenance_entries map[uuid.UUID]struct{}
+	clearedmaintenance_entries bool
+	attachments                map[uuid.UUID]struct{}
+	removedattachments         map[uuid.UUID]struct{}
+	clearedattachments         bool
+	done                       bool
+	oldValue                   func(context.Context) (*Item, error)
+	predicates                 []predicate.Item
 }
 
 var _ ent.Mutation = (*ItemMutation)(nil)
@@ -6074,6 +5358,60 @@ func (m *ItemMutation) ResetFields() {
 	m.removedfields = nil
 }
 
+// AddMaintenanceEntryIDs adds the "maintenance_entries" edge to the MaintenanceEntry entity by ids.
+func (m *ItemMutation) AddMaintenanceEntryIDs(ids ...uuid.UUID) {
+	if m.maintenance_entries == nil {
+		m.maintenance_entries = make(map[uuid.UUID]struct{})
+	}
+	for i := range ids {
+		m.maintenance_entries[ids[i]] = struct{}{}
+	}
+}
+
+// ClearMaintenanceEntries clears the "maintenance_entries" edge to the MaintenanceEntry entity.
+func (m *ItemMutation) ClearMaintenanceEntries() {
+	m.clearedmaintenance_entries = true
+}
+
+// MaintenanceEntriesCleared reports if the "maintenance_entries" edge to the MaintenanceEntry entity was cleared.
+func (m *ItemMutation) MaintenanceEntriesCleared() bool {
+	return m.clearedmaintenance_entries
+}
+
+// RemoveMaintenanceEntryIDs removes the "maintenance_entries" edge to the MaintenanceEntry entity by IDs.
+func (m *ItemMutation) RemoveMaintenanceEntryIDs(ids ...uuid.UUID) {
+	if m.removedmaintenance_entries == nil {
+		m.removedmaintenance_entries = make(map[uuid.UUID]struct{})
+	}
+	for i := range ids {
+		delete(m.maintenance_entries, ids[i])
+		m.removedmaintenance_entries[ids[i]] = struct{}{}
+	}
+}
+
+// RemovedMaintenanceEntries returns the removed IDs of the "maintenance_entries" edge to the MaintenanceEntry entity.
+func (m *ItemMutation) RemovedMaintenanceEntriesIDs() (ids []uuid.UUID) {
+	for id := range m.removedmaintenance_entries {
+		ids = append(ids, id)
+	}
+	return
+}
+
+// MaintenanceEntriesIDs returns the "maintenance_entries" edge IDs in the mutation.
+func (m *ItemMutation) MaintenanceEntriesIDs() (ids []uuid.UUID) {
+	for id := range m.maintenance_entries {
+		ids = append(ids, id)
+	}
+	return
+}
+
+// ResetMaintenanceEntries resets all changes to the "maintenance_entries" edge.
+func (m *ItemMutation) ResetMaintenanceEntries() {
+	m.maintenance_entries = nil
+	m.clearedmaintenance_entries = false
+	m.removedmaintenance_entries = nil
+}
+
 // AddAttachmentIDs adds the "attachments" edge to the Attachment entity by ids.
 func (m *ItemMutation) AddAttachmentIDs(ids ...uuid.UUID) {
 	if m.attachments == nil {
@@ -6752,7 +6090,7 @@ func (m *ItemMutation) ResetField(name string) error {
 
 // AddedEdges returns all edge names that were set/added in this mutation.
 func (m *ItemMutation) AddedEdges() []string {
-	edges := make([]string, 0, 7)
+	edges := make([]string, 0, 8)
 	if m.parent != nil {
 		edges = append(edges, item.EdgeParent)
 	}
@@ -6771,6 +6109,9 @@ func (m *ItemMutation) AddedEdges() []string {
 	if m.fields != nil {
 		edges = append(edges, item.EdgeFields)
 	}
+	if m.maintenance_entries != nil {
+		edges = append(edges, item.EdgeMaintenanceEntries)
+	}
 	if m.attachments != nil {
 		edges = append(edges, item.EdgeAttachments)
 	}
@@ -6811,6 +6152,12 @@ func (m *ItemMutation) AddedIDs(name string) []ent.Value {
 			ids = append(ids, id)
 		}
 		return ids
+	case item.EdgeMaintenanceEntries:
+		ids := make([]ent.Value, 0, len(m.maintenance_entries))
+		for id := range m.maintenance_entries {
+			ids = append(ids, id)
+		}
+		return ids
 	case item.EdgeAttachments:
 		ids := make([]ent.Value, 0, len(m.attachments))
 		for id := range m.attachments {
@@ -6823,7 +6170,7 @@ func (m *ItemMutation) AddedIDs(name string) []ent.Value {
 
 // RemovedEdges returns all edge names that were removed in this mutation.
 func (m *ItemMutation) RemovedEdges() []string {
-	edges := make([]string, 0, 7)
+	edges := make([]string, 0, 8)
 	if m.removedchildren != nil {
 		edges = append(edges, item.EdgeChildren)
 	}
@@ -6833,6 +6180,9 @@ func (m *ItemMutation) RemovedEdges() []string {
 	if m.removedfields != nil {
 		edges = append(edges, item.EdgeFields)
 	}
+	if m.removedmaintenance_entries != nil {
+		edges = append(edges, item.EdgeMaintenanceEntries)
+	}
 	if m.removedattachments != nil {
 		edges = append(edges, item.EdgeAttachments)
 	}
@@ -6861,6 +6211,12 @@ func (m *ItemMutation) RemovedIDs(name string) []ent.Value {
 			ids = append(ids, id)
 		}
 		return ids
+	case item.EdgeMaintenanceEntries:
+		ids := make([]ent.Value, 0, len(m.removedmaintenance_entries))
+		for id := range m.removedmaintenance_entries {
+			ids = append(ids, id)
+		}
+		return ids
 	case item.EdgeAttachments:
 		ids := make([]ent.Value, 0, len(m.removedattachments))
 		for id := range m.removedattachments {
@@ -6873,7 +6229,7 @@ func (m *ItemMutation) RemovedIDs(name string) []ent.Value {
 
 // ClearedEdges returns all edge names that were cleared in this mutation.
 func (m *ItemMutation) ClearedEdges() []string {
-	edges := make([]string, 0, 7)
+	edges := make([]string, 0, 8)
 	if m.clearedparent {
 		edges = append(edges, item.EdgeParent)
 	}
@@ -6892,6 +6248,9 @@ func (m *ItemMutation) ClearedEdges() []string {
 	if m.clearedfields {
 		edges = append(edges, item.EdgeFields)
 	}
+	if m.clearedmaintenance_entries {
+		edges = append(edges, item.EdgeMaintenanceEntries)
+	}
 	if m.clearedattachments {
 		edges = append(edges, item.EdgeAttachments)
 	}
@@ -6914,6 +6273,8 @@ func (m *ItemMutation) EdgeCleared(name string) bool {
 		return m.clearedlocation
 	case item.EdgeFields:
 		return m.clearedfields
+	case item.EdgeMaintenanceEntries:
+		return m.clearedmaintenance_entries
 	case item.EdgeAttachments:
 		return m.clearedattachments
 	}
@@ -6959,6 +6320,9 @@ func (m *ItemMutation) ResetEdge(name string) error {
 	case item.EdgeFields:
 		m.ResetFields()
 		return nil
+	case item.EdgeMaintenanceEntries:
+		m.ResetMaintenanceEntries()
+		return nil
 	case item.EdgeAttachments:
 		m.ResetAttachments()
 		return nil
@@ -9400,6 +8764,758 @@ func (m *LocationMutation) ResetEdge(name string) error {
 	return fmt.Errorf("unknown Location edge %s", name)
 }
 
+// MaintenanceEntryMutation represents an operation that mutates the MaintenanceEntry nodes in the graph.
+type MaintenanceEntryMutation struct {
+	config
+	op            Op
+	typ           string
+	id            *uuid.UUID
+	created_at    *time.Time
+	updated_at    *time.Time
+	date          *time.Time
+	name          *string
+	description   *string
+	cost          *float64
+	addcost       *float64
+	clearedFields map[string]struct{}
+	item          *uuid.UUID
+	cleareditem   bool
+	done          bool
+	oldValue      func(context.Context) (*MaintenanceEntry, error)
+	predicates    []predicate.MaintenanceEntry
+}
+
+var _ ent.Mutation = (*MaintenanceEntryMutation)(nil)
+
+// maintenanceentryOption allows management of the mutation configuration using functional options.
+type maintenanceentryOption func(*MaintenanceEntryMutation)
+
+// newMaintenanceEntryMutation creates new mutation for the MaintenanceEntry entity.
+func newMaintenanceEntryMutation(c config, op Op, opts ...maintenanceentryOption) *MaintenanceEntryMutation {
+	m := &MaintenanceEntryMutation{
+		config:        c,
+		op:            op,
+		typ:           TypeMaintenanceEntry,
+		clearedFields: make(map[string]struct{}),
+	}
+	for _, opt := range opts {
+		opt(m)
+	}
+	return m
+}
+
+// withMaintenanceEntryID sets the ID field of the mutation.
+func withMaintenanceEntryID(id uuid.UUID) maintenanceentryOption {
+	return func(m *MaintenanceEntryMutation) {
+		var (
+			err   error
+			once  sync.Once
+			value *MaintenanceEntry
+		)
+		m.oldValue = func(ctx context.Context) (*MaintenanceEntry, error) {
+			once.Do(func() {
+				if m.done {
+					err = errors.New("querying old values post mutation is not allowed")
+				} else {
+					value, err = m.Client().MaintenanceEntry.Get(ctx, id)
+				}
+			})
+			return value, err
+		}
+		m.id = &id
+	}
+}
+
+// withMaintenanceEntry sets the old MaintenanceEntry of the mutation.
+func withMaintenanceEntry(node *MaintenanceEntry) maintenanceentryOption {
+	return func(m *MaintenanceEntryMutation) {
+		m.oldValue = func(context.Context) (*MaintenanceEntry, error) {
+			return node, nil
+		}
+		m.id = &node.ID
+	}
+}
+
+// Client returns a new `ent.Client` from the mutation. If the mutation was
+// executed in a transaction (ent.Tx), a transactional client is returned.
+func (m MaintenanceEntryMutation) Client() *Client {
+	client := &Client{config: m.config}
+	client.init()
+	return client
+}
+
+// Tx returns an `ent.Tx` for mutations that were executed in transactions;
+// it returns an error otherwise.
+func (m MaintenanceEntryMutation) Tx() (*Tx, error) {
+	if _, ok := m.driver.(*txDriver); !ok {
+		return nil, errors.New("ent: mutation is not running in a transaction")
+	}
+	tx := &Tx{config: m.config}
+	tx.init()
+	return tx, nil
+}
+
+// SetID sets the value of the id field. Note that this
+// operation is only accepted on creation of MaintenanceEntry entities.
+func (m *MaintenanceEntryMutation) SetID(id uuid.UUID) {
+	m.id = &id
+}
+
+// ID returns the ID value in the mutation. Note that the ID is only available
+// if it was provided to the builder or after it was returned from the database.
+func (m *MaintenanceEntryMutation) ID() (id uuid.UUID, exists bool) {
+	if m.id == nil {
+		return
+	}
+	return *m.id, true
+}
+
+// IDs queries the database and returns the entity ids that match the mutation's predicate.
+// That means, if the mutation is applied within a transaction with an isolation level such
+// as sql.LevelSerializable, the returned ids match the ids of the rows that will be updated
+// or updated by the mutation.
+func (m *MaintenanceEntryMutation) IDs(ctx context.Context) ([]uuid.UUID, error) {
+	switch {
+	case m.op.Is(OpUpdateOne | OpDeleteOne):
+		id, exists := m.ID()
+		if exists {
+			return []uuid.UUID{id}, nil
+		}
+		fallthrough
+	case m.op.Is(OpUpdate | OpDelete):
+		return m.Client().MaintenanceEntry.Query().Where(m.predicates...).IDs(ctx)
+	default:
+		return nil, fmt.Errorf("IDs is not allowed on %s operations", m.op)
+	}
+}
+
+// SetCreatedAt sets the "created_at" field.
+func (m *MaintenanceEntryMutation) SetCreatedAt(t time.Time) {
+	m.created_at = &t
+}
+
+// CreatedAt returns the value of the "created_at" field in the mutation.
+func (m *MaintenanceEntryMutation) CreatedAt() (r time.Time, exists bool) {
+	v := m.created_at
+	if v == nil {
+		return
+	}
+	return *v, true
+}
+
+// OldCreatedAt returns the old "created_at" field's value of the MaintenanceEntry entity.
+// If the MaintenanceEntry object wasn't provided to the builder, the object is fetched from the database.
+// An error is returned if the mutation operation is not UpdateOne, or the database query fails.
+func (m *MaintenanceEntryMutation) OldCreatedAt(ctx context.Context) (v time.Time, err error) {
+	if !m.op.Is(OpUpdateOne) {
+		return v, errors.New("OldCreatedAt is only allowed on UpdateOne operations")
+	}
+	if m.id == nil || m.oldValue == nil {
+		return v, errors.New("OldCreatedAt requires an ID field in the mutation")
+	}
+	oldValue, err := m.oldValue(ctx)
+	if err != nil {
+		return v, fmt.Errorf("querying old value for OldCreatedAt: %w", err)
+	}
+	return oldValue.CreatedAt, nil
+}
+
+// ResetCreatedAt resets all changes to the "created_at" field.
+func (m *MaintenanceEntryMutation) ResetCreatedAt() {
+	m.created_at = nil
+}
+
+// SetUpdatedAt sets the "updated_at" field.
+func (m *MaintenanceEntryMutation) SetUpdatedAt(t time.Time) {
+	m.updated_at = &t
+}
+
+// UpdatedAt returns the value of the "updated_at" field in the mutation.
+func (m *MaintenanceEntryMutation) UpdatedAt() (r time.Time, exists bool) {
+	v := m.updated_at
+	if v == nil {
+		return
+	}
+	return *v, true
+}
+
+// OldUpdatedAt returns the old "updated_at" field's value of the MaintenanceEntry entity.
+// If the MaintenanceEntry object wasn't provided to the builder, the object is fetched from the database.
+// An error is returned if the mutation operation is not UpdateOne, or the database query fails.
+func (m *MaintenanceEntryMutation) OldUpdatedAt(ctx context.Context) (v time.Time, err error) {
+	if !m.op.Is(OpUpdateOne) {
+		return v, errors.New("OldUpdatedAt is only allowed on UpdateOne operations")
+	}
+	if m.id == nil || m.oldValue == nil {
+		return v, errors.New("OldUpdatedAt requires an ID field in the mutation")
+	}
+	oldValue, err := m.oldValue(ctx)
+	if err != nil {
+		return v, fmt.Errorf("querying old value for OldUpdatedAt: %w", err)
+	}
+	return oldValue.UpdatedAt, nil
+}
+
+// ResetUpdatedAt resets all changes to the "updated_at" field.
+func (m *MaintenanceEntryMutation) ResetUpdatedAt() {
+	m.updated_at = nil
+}
+
+// SetItemID sets the "item_id" field.
+func (m *MaintenanceEntryMutation) SetItemID(u uuid.UUID) {
+	m.item = &u
+}
+
+// ItemID returns the value of the "item_id" field in the mutation.
+func (m *MaintenanceEntryMutation) ItemID() (r uuid.UUID, exists bool) {
+	v := m.item
+	if v == nil {
+		return
+	}
+	return *v, true
+}
+
+// OldItemID returns the old "item_id" field's value of the MaintenanceEntry entity.
+// If the MaintenanceEntry object wasn't provided to the builder, the object is fetched from the database.
+// An error is returned if the mutation operation is not UpdateOne, or the database query fails.
+func (m *MaintenanceEntryMutation) OldItemID(ctx context.Context) (v uuid.UUID, err error) {
+	if !m.op.Is(OpUpdateOne) {
+		return v, errors.New("OldItemID is only allowed on UpdateOne operations")
+	}
+	if m.id == nil || m.oldValue == nil {
+		return v, errors.New("OldItemID requires an ID field in the mutation")
+	}
+	oldValue, err := m.oldValue(ctx)
+	if err != nil {
+		return v, fmt.Errorf("querying old value for OldItemID: %w", err)
+	}
+	return oldValue.ItemID, nil
+}
+
+// ResetItemID resets all changes to the "item_id" field.
+func (m *MaintenanceEntryMutation) ResetItemID() {
+	m.item = nil
+}
+
+// SetDate sets the "date" field.
+func (m *MaintenanceEntryMutation) SetDate(t time.Time) {
+	m.date = &t
+}
+
+// Date returns the value of the "date" field in the mutation.
+func (m *MaintenanceEntryMutation) Date() (r time.Time, exists bool) {
+	v := m.date
+	if v == nil {
+		return
+	}
+	return *v, true
+}
+
+// OldDate returns the old "date" field's value of the MaintenanceEntry entity.
+// If the MaintenanceEntry object wasn't provided to the builder, the object is fetched from the database.
+// An error is returned if the mutation operation is not UpdateOne, or the database query fails.
+func (m *MaintenanceEntryMutation) OldDate(ctx context.Context) (v time.Time, err error) {
+	if !m.op.Is(OpUpdateOne) {
+		return v, errors.New("OldDate is only allowed on UpdateOne operations")
+	}
+	if m.id == nil || m.oldValue == nil {
+		return v, errors.New("OldDate requires an ID field in the mutation")
+	}
+	oldValue, err := m.oldValue(ctx)
+	if err != nil {
+		return v, fmt.Errorf("querying old value for OldDate: %w", err)
+	}
+	return oldValue.Date, nil
+}
+
+// ResetDate resets all changes to the "date" field.
+func (m *MaintenanceEntryMutation) ResetDate() {
+	m.date = nil
+}
+
+// SetName sets the "name" field.
+func (m *MaintenanceEntryMutation) SetName(s string) {
+	m.name = &s
+}
+
+// Name returns the value of the "name" field in the mutation.
+func (m *MaintenanceEntryMutation) Name() (r string, exists bool) {
+	v := m.name
+	if v == nil {
+		return
+	}
+	return *v, true
+}
+
+// OldName returns the old "name" field's value of the MaintenanceEntry entity.
+// If the MaintenanceEntry object wasn't provided to the builder, the object is fetched from the database.
+// An error is returned if the mutation operation is not UpdateOne, or the database query fails.
+func (m *MaintenanceEntryMutation) OldName(ctx context.Context) (v string, err error) {
+	if !m.op.Is(OpUpdateOne) {
+		return v, errors.New("OldName is only allowed on UpdateOne operations")
+	}
+	if m.id == nil || m.oldValue == nil {
+		return v, errors.New("OldName requires an ID field in the mutation")
+	}
+	oldValue, err := m.oldValue(ctx)
+	if err != nil {
+		return v, fmt.Errorf("querying old value for OldName: %w", err)
+	}
+	return oldValue.Name, nil
+}
+
+// ResetName resets all changes to the "name" field.
+func (m *MaintenanceEntryMutation) ResetName() {
+	m.name = nil
+}
+
+// SetDescription sets the "description" field.
+func (m *MaintenanceEntryMutation) SetDescription(s string) {
+	m.description = &s
+}
+
+// Description returns the value of the "description" field in the mutation.
+func (m *MaintenanceEntryMutation) Description() (r string, exists bool) {
+	v := m.description
+	if v == nil {
+		return
+	}
+	return *v, true
+}
+
+// OldDescription returns the old "description" field's value of the MaintenanceEntry entity.
+// If the MaintenanceEntry object wasn't provided to the builder, the object is fetched from the database.
+// An error is returned if the mutation operation is not UpdateOne, or the database query fails.
+func (m *MaintenanceEntryMutation) OldDescription(ctx context.Context) (v string, err error) {
+	if !m.op.Is(OpUpdateOne) {
+		return v, errors.New("OldDescription is only allowed on UpdateOne operations")
+	}
+	if m.id == nil || m.oldValue == nil {
+		return v, errors.New("OldDescription requires an ID field in the mutation")
+	}
+	oldValue, err := m.oldValue(ctx)
+	if err != nil {
+		return v, fmt.Errorf("querying old value for OldDescription: %w", err)
+	}
+	return oldValue.Description, nil
+}
+
+// ClearDescription clears the value of the "description" field.
+func (m *MaintenanceEntryMutation) ClearDescription() {
+	m.description = nil
+	m.clearedFields[maintenanceentry.FieldDescription] = struct{}{}
+}
+
+// DescriptionCleared returns if the "description" field was cleared in this mutation.
+func (m *MaintenanceEntryMutation) DescriptionCleared() bool {
+	_, ok := m.clearedFields[maintenanceentry.FieldDescription]
+	return ok
+}
+
+// ResetDescription resets all changes to the "description" field.
+func (m *MaintenanceEntryMutation) ResetDescription() {
+	m.description = nil
+	delete(m.clearedFields, maintenanceentry.FieldDescription)
+}
+
+// SetCost sets the "cost" field.
+func (m *MaintenanceEntryMutation) SetCost(f float64) {
+	m.cost = &f
+	m.addcost = nil
+}
+
+// Cost returns the value of the "cost" field in the mutation.
+func (m *MaintenanceEntryMutation) Cost() (r float64, exists bool) {
+	v := m.cost
+	if v == nil {
+		return
+	}
+	return *v, true
+}
+
+// OldCost returns the old "cost" field's value of the MaintenanceEntry entity.
+// If the MaintenanceEntry object wasn't provided to the builder, the object is fetched from the database.
+// An error is returned if the mutation operation is not UpdateOne, or the database query fails.
+func (m *MaintenanceEntryMutation) OldCost(ctx context.Context) (v float64, err error) {
+	if !m.op.Is(OpUpdateOne) {
+		return v, errors.New("OldCost is only allowed on UpdateOne operations")
+	}
+	if m.id == nil || m.oldValue == nil {
+		return v, errors.New("OldCost requires an ID field in the mutation")
+	}
+	oldValue, err := m.oldValue(ctx)
+	if err != nil {
+		return v, fmt.Errorf("querying old value for OldCost: %w", err)
+	}
+	return oldValue.Cost, nil
+}
+
+// AddCost adds f to the "cost" field.
+func (m *MaintenanceEntryMutation) AddCost(f float64) {
+	if m.addcost != nil {
+		*m.addcost += f
+	} else {
+		m.addcost = &f
+	}
+}
+
+// AddedCost returns the value that was added to the "cost" field in this mutation.
+func (m *MaintenanceEntryMutation) AddedCost() (r float64, exists bool) {
+	v := m.addcost
+	if v == nil {
+		return
+	}
+	return *v, true
+}
+
+// ResetCost resets all changes to the "cost" field.
+func (m *MaintenanceEntryMutation) ResetCost() {
+	m.cost = nil
+	m.addcost = nil
+}
+
+// ClearItem clears the "item" edge to the Item entity.
+func (m *MaintenanceEntryMutation) ClearItem() {
+	m.cleareditem = true
+}
+
+// ItemCleared reports if the "item" edge to the Item entity was cleared.
+func (m *MaintenanceEntryMutation) ItemCleared() bool {
+	return m.cleareditem
+}
+
+// ItemIDs returns the "item" edge IDs in the mutation.
+// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use
+// ItemID instead. It exists only for internal usage by the builders.
+func (m *MaintenanceEntryMutation) ItemIDs() (ids []uuid.UUID) {
+	if id := m.item; id != nil {
+		ids = append(ids, *id)
+	}
+	return
+}
+
+// ResetItem resets all changes to the "item" edge.
+func (m *MaintenanceEntryMutation) ResetItem() {
+	m.item = nil
+	m.cleareditem = false
+}
+
+// Where appends a list predicates to the MaintenanceEntryMutation builder.
+func (m *MaintenanceEntryMutation) Where(ps ...predicate.MaintenanceEntry) {
+	m.predicates = append(m.predicates, ps...)
+}
+
+// Op returns the operation name.
+func (m *MaintenanceEntryMutation) Op() Op {
+	return m.op
+}
+
+// Type returns the node type of this mutation (MaintenanceEntry).
+func (m *MaintenanceEntryMutation) Type() string {
+	return m.typ
+}
+
+// Fields returns all fields that were changed during this mutation. Note that in
+// order to get all numeric fields that were incremented/decremented, call
+// AddedFields().
+func (m *MaintenanceEntryMutation) Fields() []string {
+	fields := make([]string, 0, 7)
+	if m.created_at != nil {
+		fields = append(fields, maintenanceentry.FieldCreatedAt)
+	}
+	if m.updated_at != nil {
+		fields = append(fields, maintenanceentry.FieldUpdatedAt)
+	}
+	if m.item != nil {
+		fields = append(fields, maintenanceentry.FieldItemID)
+	}
+	if m.date != nil {
+		fields = append(fields, maintenanceentry.FieldDate)
+	}
+	if m.name != nil {
+		fields = append(fields, maintenanceentry.FieldName)
+	}
+	if m.description != nil {
+		fields = append(fields, maintenanceentry.FieldDescription)
+	}
+	if m.cost != nil {
+		fields = append(fields, maintenanceentry.FieldCost)
+	}
+	return fields
+}
+
+// Field returns the value of a field with the given name. The second boolean
+// return value indicates that this field was not set, or was not defined in the
+// schema.
+func (m *MaintenanceEntryMutation) Field(name string) (ent.Value, bool) {
+	switch name {
+	case maintenanceentry.FieldCreatedAt:
+		return m.CreatedAt()
+	case maintenanceentry.FieldUpdatedAt:
+		return m.UpdatedAt()
+	case maintenanceentry.FieldItemID:
+		return m.ItemID()
+	case maintenanceentry.FieldDate:
+		return m.Date()
+	case maintenanceentry.FieldName:
+		return m.Name()
+	case maintenanceentry.FieldDescription:
+		return m.Description()
+	case maintenanceentry.FieldCost:
+		return m.Cost()
+	}
+	return nil, false
+}
+
+// OldField returns the old value of the field from the database. An error is
+// returned if the mutation operation is not UpdateOne, or the query to the
+// database failed.
+func (m *MaintenanceEntryMutation) OldField(ctx context.Context, name string) (ent.Value, error) {
+	switch name {
+	case maintenanceentry.FieldCreatedAt:
+		return m.OldCreatedAt(ctx)
+	case maintenanceentry.FieldUpdatedAt:
+		return m.OldUpdatedAt(ctx)
+	case maintenanceentry.FieldItemID:
+		return m.OldItemID(ctx)
+	case maintenanceentry.FieldDate:
+		return m.OldDate(ctx)
+	case maintenanceentry.FieldName:
+		return m.OldName(ctx)
+	case maintenanceentry.FieldDescription:
+		return m.OldDescription(ctx)
+	case maintenanceentry.FieldCost:
+		return m.OldCost(ctx)
+	}
+	return nil, fmt.Errorf("unknown MaintenanceEntry field %s", name)
+}
+
+// SetField sets the value of a field with the given name. It returns an error if
+// the field is not defined in the schema, or if the type mismatched the field
+// type.
+func (m *MaintenanceEntryMutation) SetField(name string, value ent.Value) error {
+	switch name {
+	case maintenanceentry.FieldCreatedAt:
+		v, ok := value.(time.Time)
+		if !ok {
+			return fmt.Errorf("unexpected type %T for field %s", value, name)
+		}
+		m.SetCreatedAt(v)
+		return nil
+	case maintenanceentry.FieldUpdatedAt:
+		v, ok := value.(time.Time)
+		if !ok {
+			return fmt.Errorf("unexpected type %T for field %s", value, name)
+		}
+		m.SetUpdatedAt(v)
+		return nil
+	case maintenanceentry.FieldItemID:
+		v, ok := value.(uuid.UUID)
+		if !ok {
+			return fmt.Errorf("unexpected type %T for field %s", value, name)
+		}
+		m.SetItemID(v)
+		return nil
+	case maintenanceentry.FieldDate:
+		v, ok := value.(time.Time)
+		if !ok {
+			return fmt.Errorf("unexpected type %T for field %s", value, name)
+		}
+		m.SetDate(v)
+		return nil
+	case maintenanceentry.FieldName:
+		v, ok := value.(string)
+		if !ok {
+			return fmt.Errorf("unexpected type %T for field %s", value, name)
+		}
+		m.SetName(v)
+		return nil
+	case maintenanceentry.FieldDescription:
+		v, ok := value.(string)
+		if !ok {
+			return fmt.Errorf("unexpected type %T for field %s", value, name)
+		}
+		m.SetDescription(v)
+		return nil
+	case maintenanceentry.FieldCost:
+		v, ok := value.(float64)
+		if !ok {
+			return fmt.Errorf("unexpected type %T for field %s", value, name)
+		}
+		m.SetCost(v)
+		return nil
+	}
+	return fmt.Errorf("unknown MaintenanceEntry field %s", name)
+}
+
+// AddedFields returns all numeric fields that were incremented/decremented during
+// this mutation.
+func (m *MaintenanceEntryMutation) AddedFields() []string {
+	var fields []string
+	if m.addcost != nil {
+		fields = append(fields, maintenanceentry.FieldCost)
+	}
+	return fields
+}
+
+// AddedField returns the numeric value that was incremented/decremented on a field
+// with the given name. The second boolean return value indicates that this field
+// was not set, or was not defined in the schema.
+func (m *MaintenanceEntryMutation) AddedField(name string) (ent.Value, bool) {
+	switch name {
+	case maintenanceentry.FieldCost:
+		return m.AddedCost()
+	}
+	return nil, false
+}
+
+// AddField adds the value to the field with the given name. It returns an error if
+// the field is not defined in the schema, or if the type mismatched the field
+// type.
+func (m *MaintenanceEntryMutation) AddField(name string, value ent.Value) error {
+	switch name {
+	case maintenanceentry.FieldCost:
+		v, ok := value.(float64)
+		if !ok {
+			return fmt.Errorf("unexpected type %T for field %s", value, name)
+		}
+		m.AddCost(v)
+		return nil
+	}
+	return fmt.Errorf("unknown MaintenanceEntry numeric field %s", name)
+}
+
+// ClearedFields returns all nullable fields that were cleared during this
+// mutation.
+func (m *MaintenanceEntryMutation) ClearedFields() []string {
+	var fields []string
+	if m.FieldCleared(maintenanceentry.FieldDescription) {
+		fields = append(fields, maintenanceentry.FieldDescription)
+	}
+	return fields
+}
+
+// FieldCleared returns a boolean indicating if a field with the given name was
+// cleared in this mutation.
+func (m *MaintenanceEntryMutation) FieldCleared(name string) bool {
+	_, ok := m.clearedFields[name]
+	return ok
+}
+
+// ClearField clears the value of the field with the given name. It returns an
+// error if the field is not defined in the schema.
+func (m *MaintenanceEntryMutation) ClearField(name string) error {
+	switch name {
+	case maintenanceentry.FieldDescription:
+		m.ClearDescription()
+		return nil
+	}
+	return fmt.Errorf("unknown MaintenanceEntry nullable field %s", name)
+}
+
+// ResetField resets all changes in the mutation for the field with the given name.
+// It returns an error if the field is not defined in the schema.
+func (m *MaintenanceEntryMutation) ResetField(name string) error {
+	switch name {
+	case maintenanceentry.FieldCreatedAt:
+		m.ResetCreatedAt()
+		return nil
+	case maintenanceentry.FieldUpdatedAt:
+		m.ResetUpdatedAt()
+		return nil
+	case maintenanceentry.FieldItemID:
+		m.ResetItemID()
+		return nil
+	case maintenanceentry.FieldDate:
+		m.ResetDate()
+		return nil
+	case maintenanceentry.FieldName:
+		m.ResetName()
+		return nil
+	case maintenanceentry.FieldDescription:
+		m.ResetDescription()
+		return nil
+	case maintenanceentry.FieldCost:
+		m.ResetCost()
+		return nil
+	}
+	return fmt.Errorf("unknown MaintenanceEntry field %s", name)
+}
+
+// AddedEdges returns all edge names that were set/added in this mutation.
+func (m *MaintenanceEntryMutation) AddedEdges() []string {
+	edges := make([]string, 0, 1)
+	if m.item != nil {
+		edges = append(edges, maintenanceentry.EdgeItem)
+	}
+	return edges
+}
+
+// AddedIDs returns all IDs (to other nodes) that were added for the given edge
+// name in this mutation.
+func (m *MaintenanceEntryMutation) AddedIDs(name string) []ent.Value {
+	switch name {
+	case maintenanceentry.EdgeItem:
+		if id := m.item; id != nil {
+			return []ent.Value{*id}
+		}
+	}
+	return nil
+}
+
+// RemovedEdges returns all edge names that were removed in this mutation.
+func (m *MaintenanceEntryMutation) RemovedEdges() []string {
+	edges := make([]string, 0, 1)
+	return edges
+}
+
+// RemovedIDs returns all IDs (to other nodes) that were removed for the edge with
+// the given name in this mutation.
+func (m *MaintenanceEntryMutation) RemovedIDs(name string) []ent.Value {
+	return nil
+}
+
+// ClearedEdges returns all edge names that were cleared in this mutation.
+func (m *MaintenanceEntryMutation) ClearedEdges() []string {
+	edges := make([]string, 0, 1)
+	if m.cleareditem {
+		edges = append(edges, maintenanceentry.EdgeItem)
+	}
+	return edges
+}
+
+// EdgeCleared returns a boolean which indicates if the edge with the given name
+// was cleared in this mutation.
+func (m *MaintenanceEntryMutation) EdgeCleared(name string) bool {
+	switch name {
+	case maintenanceentry.EdgeItem:
+		return m.cleareditem
+	}
+	return false
+}
+
+// ClearEdge clears the value of the edge with the given name. It returns an error
+// if that edge is not defined in the schema.
+func (m *MaintenanceEntryMutation) ClearEdge(name string) error {
+	switch name {
+	case maintenanceentry.EdgeItem:
+		m.ClearItem()
+		return nil
+	}
+	return fmt.Errorf("unknown MaintenanceEntry unique edge %s", name)
+}
+
+// ResetEdge resets all changes to the edge with the given name in this mutation.
+// It returns an error if the edge is not defined in the schema.
+func (m *MaintenanceEntryMutation) ResetEdge(name string) error {
+	switch name {
+	case maintenanceentry.EdgeItem:
+		m.ResetItem()
+		return nil
+	}
+	return fmt.Errorf("unknown MaintenanceEntry edge %s", name)
+}
+
 // UserMutation represents an operation that mutates the User nodes in the graph.
 type UserMutation struct {
 	config
diff --git a/backend/internal/data/ent/predicate/predicate.go b/backend/internal/data/ent/predicate/predicate.go
index 21a0a71..b1fbe67 100644
--- a/backend/internal/data/ent/predicate/predicate.go
+++ b/backend/internal/data/ent/predicate/predicate.go
@@ -18,9 +18,6 @@ type AuthTokens func(*sql.Selector)
 // Document is the predicate function for document builders.
 type Document func(*sql.Selector)
 
-// DocumentToken is the predicate function for documenttoken builders.
-type DocumentToken func(*sql.Selector)
-
 // Group is the predicate function for group builders.
 type Group func(*sql.Selector)
 
@@ -39,5 +36,8 @@ type Label func(*sql.Selector)
 // Location is the predicate function for location builders.
 type Location func(*sql.Selector)
 
+// MaintenanceEntry is the predicate function for maintenanceentry builders.
+type MaintenanceEntry func(*sql.Selector)
+
 // User is the predicate function for user builders.
 type User func(*sql.Selector)
diff --git a/backend/internal/data/ent/runtime.go b/backend/internal/data/ent/runtime.go
index a9edda6..4ce9d2c 100644
--- a/backend/internal/data/ent/runtime.go
+++ b/backend/internal/data/ent/runtime.go
@@ -9,13 +9,13 @@ import (
 	"github.com/hay-kot/homebox/backend/internal/data/ent/attachment"
 	"github.com/hay-kot/homebox/backend/internal/data/ent/authtokens"
 	"github.com/hay-kot/homebox/backend/internal/data/ent/document"
-	"github.com/hay-kot/homebox/backend/internal/data/ent/documenttoken"
 	"github.com/hay-kot/homebox/backend/internal/data/ent/group"
 	"github.com/hay-kot/homebox/backend/internal/data/ent/groupinvitationtoken"
 	"github.com/hay-kot/homebox/backend/internal/data/ent/item"
 	"github.com/hay-kot/homebox/backend/internal/data/ent/itemfield"
 	"github.com/hay-kot/homebox/backend/internal/data/ent/label"
 	"github.com/hay-kot/homebox/backend/internal/data/ent/location"
+	"github.com/hay-kot/homebox/backend/internal/data/ent/maintenanceentry"
 	"github.com/hay-kot/homebox/backend/internal/data/ent/schema"
 	"github.com/hay-kot/homebox/backend/internal/data/ent/user"
 )
@@ -123,37 +123,6 @@ func init() {
 	documentDescID := documentMixinFields0[0].Descriptor()
 	// document.DefaultID holds the default value on creation for the id field.
 	document.DefaultID = documentDescID.Default.(func() uuid.UUID)
-	documenttokenMixin := schema.DocumentToken{}.Mixin()
-	documenttokenMixinFields0 := documenttokenMixin[0].Fields()
-	_ = documenttokenMixinFields0
-	documenttokenFields := schema.DocumentToken{}.Fields()
-	_ = documenttokenFields
-	// documenttokenDescCreatedAt is the schema descriptor for created_at field.
-	documenttokenDescCreatedAt := documenttokenMixinFields0[1].Descriptor()
-	// documenttoken.DefaultCreatedAt holds the default value on creation for the created_at field.
-	documenttoken.DefaultCreatedAt = documenttokenDescCreatedAt.Default.(func() time.Time)
-	// documenttokenDescUpdatedAt is the schema descriptor for updated_at field.
-	documenttokenDescUpdatedAt := documenttokenMixinFields0[2].Descriptor()
-	// documenttoken.DefaultUpdatedAt holds the default value on creation for the updated_at field.
-	documenttoken.DefaultUpdatedAt = documenttokenDescUpdatedAt.Default.(func() time.Time)
-	// documenttoken.UpdateDefaultUpdatedAt holds the default value on update for the updated_at field.
-	documenttoken.UpdateDefaultUpdatedAt = documenttokenDescUpdatedAt.UpdateDefault.(func() time.Time)
-	// documenttokenDescToken is the schema descriptor for token field.
-	documenttokenDescToken := documenttokenFields[0].Descriptor()
-	// documenttoken.TokenValidator is a validator for the "token" field. It is called by the builders before save.
-	documenttoken.TokenValidator = documenttokenDescToken.Validators[0].(func([]byte) error)
-	// documenttokenDescUses is the schema descriptor for uses field.
-	documenttokenDescUses := documenttokenFields[1].Descriptor()
-	// documenttoken.DefaultUses holds the default value on creation for the uses field.
-	documenttoken.DefaultUses = documenttokenDescUses.Default.(int)
-	// documenttokenDescExpiresAt is the schema descriptor for expires_at field.
-	documenttokenDescExpiresAt := documenttokenFields[2].Descriptor()
-	// documenttoken.DefaultExpiresAt holds the default value on creation for the expires_at field.
-	documenttoken.DefaultExpiresAt = documenttokenDescExpiresAt.Default.(func() time.Time)
-	// documenttokenDescID is the schema descriptor for id field.
-	documenttokenDescID := documenttokenMixinFields0[0].Descriptor()
-	// documenttoken.DefaultID holds the default value on creation for the id field.
-	documenttoken.DefaultID = documenttokenDescID.Default.(func() uuid.UUID)
 	groupMixin := schema.Group{}.Mixin()
 	groupMixinFields0 := groupMixin[0].Fields()
 	_ = groupMixinFields0
@@ -462,6 +431,55 @@ func init() {
 	locationDescID := locationMixinFields0[0].Descriptor()
 	// location.DefaultID holds the default value on creation for the id field.
 	location.DefaultID = locationDescID.Default.(func() uuid.UUID)
+	maintenanceentryMixin := schema.MaintenanceEntry{}.Mixin()
+	maintenanceentryMixinFields0 := maintenanceentryMixin[0].Fields()
+	_ = maintenanceentryMixinFields0
+	maintenanceentryFields := schema.MaintenanceEntry{}.Fields()
+	_ = maintenanceentryFields
+	// maintenanceentryDescCreatedAt is the schema descriptor for created_at field.
+	maintenanceentryDescCreatedAt := maintenanceentryMixinFields0[1].Descriptor()
+	// maintenanceentry.DefaultCreatedAt holds the default value on creation for the created_at field.
+	maintenanceentry.DefaultCreatedAt = maintenanceentryDescCreatedAt.Default.(func() time.Time)
+	// maintenanceentryDescUpdatedAt is the schema descriptor for updated_at field.
+	maintenanceentryDescUpdatedAt := maintenanceentryMixinFields0[2].Descriptor()
+	// maintenanceentry.DefaultUpdatedAt holds the default value on creation for the updated_at field.
+	maintenanceentry.DefaultUpdatedAt = maintenanceentryDescUpdatedAt.Default.(func() time.Time)
+	// maintenanceentry.UpdateDefaultUpdatedAt holds the default value on update for the updated_at field.
+	maintenanceentry.UpdateDefaultUpdatedAt = maintenanceentryDescUpdatedAt.UpdateDefault.(func() time.Time)
+	// maintenanceentryDescDate is the schema descriptor for date field.
+	maintenanceentryDescDate := maintenanceentryFields[1].Descriptor()
+	// maintenanceentry.DefaultDate holds the default value on creation for the date field.
+	maintenanceentry.DefaultDate = maintenanceentryDescDate.Default.(func() time.Time)
+	// maintenanceentryDescName is the schema descriptor for name field.
+	maintenanceentryDescName := maintenanceentryFields[2].Descriptor()
+	// maintenanceentry.NameValidator is a validator for the "name" field. It is called by the builders before save.
+	maintenanceentry.NameValidator = func() func(string) error {
+		validators := maintenanceentryDescName.Validators
+		fns := [...]func(string) error{
+			validators[0].(func(string) error),
+			validators[1].(func(string) error),
+		}
+		return func(name string) error {
+			for _, fn := range fns {
+				if err := fn(name); err != nil {
+					return err
+				}
+			}
+			return nil
+		}
+	}()
+	// maintenanceentryDescDescription is the schema descriptor for description field.
+	maintenanceentryDescDescription := maintenanceentryFields[3].Descriptor()
+	// maintenanceentry.DescriptionValidator is a validator for the "description" field. It is called by the builders before save.
+	maintenanceentry.DescriptionValidator = maintenanceentryDescDescription.Validators[0].(func(string) error)
+	// maintenanceentryDescCost is the schema descriptor for cost field.
+	maintenanceentryDescCost := maintenanceentryFields[4].Descriptor()
+	// maintenanceentry.DefaultCost holds the default value on creation for the cost field.
+	maintenanceentry.DefaultCost = maintenanceentryDescCost.Default.(float64)
+	// maintenanceentryDescID is the schema descriptor for id field.
+	maintenanceentryDescID := maintenanceentryMixinFields0[0].Descriptor()
+	// maintenanceentry.DefaultID holds the default value on creation for the id field.
+	maintenanceentry.DefaultID = maintenanceentryDescID.Default.(func() uuid.UUID)
 	userMixin := schema.User{}.Mixin()
 	userMixinFields0 := userMixin[0].Fields()
 	_ = userMixinFields0
diff --git a/backend/internal/data/ent/schema/auth_tokens.go b/backend/internal/data/ent/schema/auth_tokens.go
index e29b79a..71b22d7 100644
--- a/backend/internal/data/ent/schema/auth_tokens.go
+++ b/backend/internal/data/ent/schema/auth_tokens.go
@@ -4,6 +4,7 @@ import (
 	"time"
 
 	"entgo.io/ent"
+	"entgo.io/ent/dialect/entsql"
 	"entgo.io/ent/schema/edge"
 	"entgo.io/ent/schema/field"
 	"entgo.io/ent/schema/index"
@@ -38,7 +39,10 @@ func (AuthTokens) Edges() []ent.Edge {
 			Ref("auth_tokens").
 			Unique(),
 		edge.To("roles", AuthRoles.Type).
-			Unique(),
+			Unique().
+			Annotations(entsql.Annotation{
+				OnDelete: entsql.Cascade,
+			}),
 	}
 }
 
diff --git a/backend/internal/data/ent/schema/document.go b/backend/internal/data/ent/schema/document.go
index 2293c39..a2c26e2 100644
--- a/backend/internal/data/ent/schema/document.go
+++ b/backend/internal/data/ent/schema/document.go
@@ -38,10 +38,6 @@ func (Document) Edges() []ent.Edge {
 			Ref("documents").
 			Required().
 			Unique(),
-		edge.To("document_tokens", DocumentToken.Type).
-			Annotations(entsql.Annotation{
-				OnDelete: entsql.Cascade,
-			}),
 		edge.To("attachments", Attachment.Type).
 			Annotations(entsql.Annotation{
 				OnDelete: entsql.Cascade,
diff --git a/backend/internal/data/ent/schema/document_token.go b/backend/internal/data/ent/schema/document_token.go
deleted file mode 100644
index c5ec72f..0000000
--- a/backend/internal/data/ent/schema/document_token.go
+++ /dev/null
@@ -1,50 +0,0 @@
-package schema
-
-import (
-	"time"
-
-	"entgo.io/ent"
-	"entgo.io/ent/schema/edge"
-	"entgo.io/ent/schema/field"
-	"entgo.io/ent/schema/index"
-	"github.com/hay-kot/homebox/backend/internal/data/ent/schema/mixins"
-)
-
-// DocumentToken holds the schema definition for the DocumentToken entity.
-type DocumentToken struct {
-	ent.Schema
-}
-
-func (DocumentToken) Mixin() []ent.Mixin {
-	return []ent.Mixin{
-		mixins.BaseMixin{},
-	}
-}
-
-// Fields of the DocumentToken.
-func (DocumentToken) Fields() []ent.Field {
-	return []ent.Field{
-		field.Bytes("token").
-			NotEmpty().
-			Unique(),
-		field.Int("uses").
-			Default(1),
-		field.Time("expires_at").
-			Default(func() time.Time { return time.Now().Add(time.Minute * 10) }),
-	}
-}
-
-// Edges of the DocumentToken.
-func (DocumentToken) Edges() []ent.Edge {
-	return []ent.Edge{
-		edge.From("document", Document.Type).
-			Ref("document_tokens").
-			Unique(),
-	}
-}
-
-func (DocumentToken) Indexes() []ent.Index {
-	return []ent.Index{
-		index.Fields("token"),
-	}
-}
diff --git a/backend/internal/data/ent/schema/item.go b/backend/internal/data/ent/schema/item.go
index 388566d..5180f27 100644
--- a/backend/internal/data/ent/schema/item.go
+++ b/backend/internal/data/ent/schema/item.go
@@ -116,6 +116,10 @@ func (Item) Edges() []ent.Edge {
 			Annotations(entsql.Annotation{
 				OnDelete: entsql.Cascade,
 			}),
+		edge.To("maintenance_entries", MaintenanceEntry.Type).
+			Annotations(entsql.Annotation{
+				OnDelete: entsql.Cascade,
+			}),
 		edge.To("attachments", Attachment.Type).
 			Annotations(entsql.Annotation{
 				OnDelete: entsql.Cascade,
diff --git a/backend/internal/data/ent/schema/maintenance_entry.go b/backend/internal/data/ent/schema/maintenance_entry.go
new file mode 100644
index 0000000..7fd9643
--- /dev/null
+++ b/backend/internal/data/ent/schema/maintenance_entry.go
@@ -0,0 +1,48 @@
+package schema
+
+import (
+	"time"
+
+	"entgo.io/ent"
+	"entgo.io/ent/schema/edge"
+	"entgo.io/ent/schema/field"
+	"github.com/google/uuid"
+	"github.com/hay-kot/homebox/backend/internal/data/ent/schema/mixins"
+)
+
+type MaintenanceEntry struct {
+	ent.Schema
+}
+
+func (MaintenanceEntry) Mixin() []ent.Mixin {
+	return []ent.Mixin{
+		mixins.BaseMixin{},
+	}
+}
+
+func (MaintenanceEntry) Fields() []ent.Field {
+	return []ent.Field{
+		field.UUID("item_id", uuid.UUID{}),
+		field.Time("date").
+			Default(time.Now),
+		field.String("name").
+			MaxLen(255).
+			NotEmpty(),
+		field.String("description").
+			MaxLen(2500).
+			Optional(),
+		field.Float("cost").
+			Default(0.0),
+	}
+}
+
+// Edges of the ItemField.
+func (MaintenanceEntry) Edges() []ent.Edge {
+	return []ent.Edge{
+		edge.From("item", Item.Type).
+			Field("item_id").
+			Ref("maintenance_entries").
+			Required().
+			Unique(),
+	}
+}
diff --git a/backend/internal/data/ent/schema/templates/has_id.tmpl b/backend/internal/data/ent/schema/templates/has_id.tmpl
index 42b0cd8..cc6e30a 100644
--- a/backend/internal/data/ent/schema/templates/has_id.tmpl
+++ b/backend/internal/data/ent/schema/templates/has_id.tmpl
@@ -9,8 +9,13 @@
 import 	"github.com/google/uuid"
 {{/* Loop over all nodes and implement the "HasID" interface */}}
 {{ range $n := $.Nodes }}
+    {{ if not $n.ID }}
+        {{/* If the node doesn't have an ID field, we skip it. */}}
+        {{ continue }}
+    {{ end }}
+    {{/* The "HasID" interface is implemented by the "ID" method. */}}
     {{ $receiver := $n.Receiver }}
-    func ({{ $receiver }} *{{ $n.Name }}) GetID() uuid.UUID {
+    func ({{ $receiver }} *{{ $n.Name }}) GetID() {{ $n.ID.Type }} {
         return {{ $receiver }}.ID
     }
 {{ end }}
diff --git a/backend/internal/data/ent/tx.go b/backend/internal/data/ent/tx.go
index 8af5b01..0703ce5 100644
--- a/backend/internal/data/ent/tx.go
+++ b/backend/internal/data/ent/tx.go
@@ -20,8 +20,6 @@ type Tx struct {
 	AuthTokens *AuthTokensClient
 	// Document is the client for interacting with the Document builders.
 	Document *DocumentClient
-	// DocumentToken is the client for interacting with the DocumentToken builders.
-	DocumentToken *DocumentTokenClient
 	// Group is the client for interacting with the Group builders.
 	Group *GroupClient
 	// GroupInvitationToken is the client for interacting with the GroupInvitationToken builders.
@@ -34,6 +32,8 @@ type Tx struct {
 	Label *LabelClient
 	// Location is the client for interacting with the Location builders.
 	Location *LocationClient
+	// MaintenanceEntry is the client for interacting with the MaintenanceEntry builders.
+	MaintenanceEntry *MaintenanceEntryClient
 	// User is the client for interacting with the User builders.
 	User *UserClient
 
@@ -171,13 +171,13 @@ func (tx *Tx) init() {
 	tx.AuthRoles = NewAuthRolesClient(tx.config)
 	tx.AuthTokens = NewAuthTokensClient(tx.config)
 	tx.Document = NewDocumentClient(tx.config)
-	tx.DocumentToken = NewDocumentTokenClient(tx.config)
 	tx.Group = NewGroupClient(tx.config)
 	tx.GroupInvitationToken = NewGroupInvitationTokenClient(tx.config)
 	tx.Item = NewItemClient(tx.config)
 	tx.ItemField = NewItemFieldClient(tx.config)
 	tx.Label = NewLabelClient(tx.config)
 	tx.Location = NewLocationClient(tx.config)
+	tx.MaintenanceEntry = NewMaintenanceEntryClient(tx.config)
 	tx.User = NewUserClient(tx.config)
 }
 
diff --git a/backend/internal/data/migrations/migrations.go b/backend/internal/data/migrations/migrations.go
index 5fcb8e3..fba84c5 100644
--- a/backend/internal/data/migrations/migrations.go
+++ b/backend/internal/data/migrations/migrations.go
@@ -6,7 +6,7 @@ import (
 	"path/filepath"
 )
 
-// go:embed all:migrations
+//go:embed all:migrations
 var Files embed.FS
 
 // Write writes the embedded migrations to a temporary directory.
@@ -18,7 +18,7 @@ func Write(temp string) error {
 		return err
 	}
 
-	fsDir, err := Files.ReadDir(".")
+	fsDir, err := Files.ReadDir("migrations")
 	if err != nil {
 		return err
 	}
diff --git a/backend/internal/data/migrations/migrations/20221205230404_drop_document_tokens.sql b/backend/internal/data/migrations/migrations/20221205230404_drop_document_tokens.sql
new file mode 100644
index 0000000..e130abe
--- /dev/null
+++ b/backend/internal/data/migrations/migrations/20221205230404_drop_document_tokens.sql
@@ -0,0 +1,5 @@
+-- disable the enforcement of foreign-keys constraints
+PRAGMA foreign_keys = off;
+DROP TABLE `document_tokens`;
+-- enable back the enforcement of foreign-keys constraints
+PRAGMA foreign_keys = on;
\ No newline at end of file
diff --git a/backend/internal/data/migrations/migrations/20221205234214_add_maintenance_entries.sql b/backend/internal/data/migrations/migrations/20221205234214_add_maintenance_entries.sql
new file mode 100644
index 0000000..2491ec4
--- /dev/null
+++ b/backend/internal/data/migrations/migrations/20221205234214_add_maintenance_entries.sql
@@ -0,0 +1,2 @@
+-- create "maintenance_entries" table
+CREATE TABLE `maintenance_entries` (`id` uuid NOT NULL, `created_at` datetime NOT NULL, `updated_at` datetime NOT NULL, `date` datetime NOT NULL, `name` text NOT NULL, `description` text NULL, `cost` real NOT NULL DEFAULT 0, `item_id` uuid NOT NULL, PRIMARY KEY (`id`), CONSTRAINT `maintenance_entries_items_maintenance_entries` FOREIGN KEY (`item_id`) REFERENCES `items` (`id`) ON DELETE CASCADE);
diff --git a/backend/internal/data/migrations/migrations/20221205234812_cascade_delete_roles.sql b/backend/internal/data/migrations/migrations/20221205234812_cascade_delete_roles.sql
new file mode 100644
index 0000000..8a37c11
--- /dev/null
+++ b/backend/internal/data/migrations/migrations/20221205234812_cascade_delete_roles.sql
@@ -0,0 +1,16 @@
+-- disable the enforcement of foreign-keys constraints
+PRAGMA foreign_keys = off;
+-- create "new_auth_roles" table
+CREATE TABLE `new_auth_roles` (`id` integer NOT NULL PRIMARY KEY AUTOINCREMENT, `role` text NOT NULL DEFAULT 'user', `auth_tokens_roles` uuid NULL, CONSTRAINT `auth_roles_auth_tokens_roles` FOREIGN KEY (`auth_tokens_roles`) REFERENCES `auth_tokens` (`id`) ON DELETE CASCADE);
+-- copy rows from old table "auth_roles" to new temporary table "new_auth_roles"
+INSERT INTO `new_auth_roles` (`id`, `role`, `auth_tokens_roles`) SELECT `id`, `role`, `auth_tokens_roles` FROM `auth_roles`;
+-- drop "auth_roles" table after copying rows
+DROP TABLE `auth_roles`;
+-- rename temporary table "new_auth_roles" to "auth_roles"
+ALTER TABLE `new_auth_roles` RENAME TO `auth_roles`;
+-- create index "auth_roles_auth_tokens_roles_key" to table: "auth_roles"
+CREATE UNIQUE INDEX `auth_roles_auth_tokens_roles_key` ON `auth_roles` (`auth_tokens_roles`);
+-- delete where tokens is null
+DELETE FROM `auth_roles` WHERE `auth_tokens_roles` IS NULL;
+-- enable back the enforcement of foreign-keys constraints
+PRAGMA foreign_keys = on;
diff --git a/backend/internal/data/migrations/migrations/atlas.sum b/backend/internal/data/migrations/migrations/atlas.sum
index 0c9927b..5de79cc 100644
--- a/backend/internal/data/migrations/migrations/atlas.sum
+++ b/backend/internal/data/migrations/migrations/atlas.sum
@@ -1,4 +1,4 @@
-h1:oo2QbYbKkbf4oTfkRXqo9XGPp8S76j33WQvDZITv5s8=
+h1:dn3XsqwgjCxEtpLXmHlt2ALRwg2cZB6m8lg2faxeLXM=
 20220929052825_init.sql h1:ZlCqm1wzjDmofeAcSX3jE4h4VcdTNGpRg2eabztDy9Q=
 20221001210956_group_invitations.sql h1:YQKJFtE39wFOcRNbZQ/d+ZlHwrcfcsZlcv/pLEYdpjw=
 20221009173029_add_user_roles.sql h1:vWmzAfgEWQeGk0Vn70zfVPCcfEZth3E0JcvyKTjpYyU=
@@ -6,3 +6,6 @@ h1:oo2QbYbKkbf4oTfkRXqo9XGPp8S76j33WQvDZITv5s8=
 20221101041931_add_archived_field.sql h1:L2WxiOh1svRn817cNURgqnEQg6DIcodZ1twK4tvxW94=
 20221113012312_add_asset_id_field.sql h1:DjD7e1PS8OfxGBWic8h0nO/X6CNnHEMqQjDCaaQ3M3Q=
 20221203053132_add_token_roles.sql h1:wFTIh+KBoHfLfy/L0ZmJz4cNXKHdACG9ZK/yvVKjF0M=
+20221205230404_drop_document_tokens.sql h1:9dCbNFcjtsT6lEhkxCn/vYaGRmQrl1LefdEJgvkfhGg=
+20221205234214_add_maintenance_entries.sql h1:B56VzCuDsed1k3/sYUoKlOkP90DcdLufxFK0qYvoafU=
+20221205234812_cascade_delete_roles.sql h1:VIiaImR48nCHF3uFbOYOX1E79Ta5HsUBetGaSAbh9Gk=
diff --git a/backend/internal/data/repo/map_helpers.go b/backend/internal/data/repo/map_helpers.go
index a9c0bca..9404cb0 100644
--- a/backend/internal/data/repo/map_helpers.go
+++ b/backend/internal/data/repo/map_helpers.go
@@ -16,17 +16,16 @@ func mapTErrFunc[T any, Y any](fn func(T) Y) func(T, error) (Y, error) {
 	}
 }
 
-// TODO: Future Usage
-// func mapEachFunc[T any, Y any](fn func(T) Y) func([]T) []Y {
-// 	return func(items []T) []Y {
-// 		result := make([]Y, len(items))
-// 		for i, item := range items {
-// 			result[i] = fn(item)
-// 		}
+func mapTEachFunc[T any, Y any](fn func(T) Y) func([]T) []Y {
+	return func(items []T) []Y {
+		result := make([]Y, len(items))
+		for i, item := range items {
+			result[i] = fn(item)
+		}
 
-// 		return result
-// 	}
-// }
+		return result
+	}
+}
 
 func mapTEachErrFunc[T any, Y any](fn func(T) Y) func([]T, error) ([]Y, error) {
 	return func(items []T, err error) ([]Y, error) {
diff --git a/backend/internal/data/repo/repo_document_tokens.go b/backend/internal/data/repo/repo_document_tokens.go
deleted file mode 100644
index 018ea61..0000000
--- a/backend/internal/data/repo/repo_document_tokens.go
+++ /dev/null
@@ -1,68 +0,0 @@
-package repo
-
-import (
-	"context"
-	"time"
-
-	"github.com/google/uuid"
-	"github.com/hay-kot/homebox/backend/internal/data/ent"
-	"github.com/hay-kot/homebox/backend/internal/data/ent/documenttoken"
-)
-
-// DocumentTokensRepository is a repository for Document entity
-type DocumentTokensRepository struct {
-	db *ent.Client
-}
-
-type (
-	DocumentToken struct {
-		ID         uuid.UUID `json:"-"`
-		TokenHash  []byte    `json:"tokenHash"`
-		ExpiresAt  time.Time `json:"expiresAt"`
-		DocumentID uuid.UUID `json:"documentId"`
-	}
-
-	DocumentTokenCreate struct {
-		TokenHash  []byte    `json:"tokenHash"`
-		DocumentID uuid.UUID `json:"documentId"`
-		ExpiresAt  time.Time `json:"expiresAt"`
-	}
-)
-
-var (
-	mapDocumentTokenErr = mapTErrFunc(mapDocumentToken)
-)
-
-func mapDocumentToken(e *ent.DocumentToken) DocumentToken {
-	return DocumentToken{
-		ID:         e.ID,
-		TokenHash:  e.Token,
-		ExpiresAt:  e.ExpiresAt,
-		DocumentID: e.Edges.Document.ID,
-	}
-}
-
-func (r *DocumentTokensRepository) Create(ctx context.Context, data DocumentTokenCreate) (DocumentToken, error) {
-	result, err := r.db.DocumentToken.Create().
-		SetDocumentID(data.DocumentID).
-		SetToken(data.TokenHash).
-		SetExpiresAt(data.ExpiresAt).
-		Save(ctx)
-
-	if err != nil {
-		return DocumentToken{}, err
-	}
-
-	return mapDocumentTokenErr(r.db.DocumentToken.Query().
-		Where(documenttoken.ID(result.ID)).
-		WithDocument().
-		Only(ctx))
-}
-
-func (r *DocumentTokensRepository) PurgeExpiredTokens(ctx context.Context) (int, error) {
-	return r.db.DocumentToken.Delete().Where(documenttoken.ExpiresAtLT(time.Now())).Exec(ctx)
-}
-
-func (r *DocumentTokensRepository) Delete(ctx context.Context, id uuid.UUID) error {
-	return r.db.DocumentToken.DeleteOneID(id).Exec(ctx)
-}
diff --git a/backend/internal/data/repo/repo_document_tokens_test.go b/backend/internal/data/repo/repo_document_tokens_test.go
deleted file mode 100644
index 6646eca..0000000
--- a/backend/internal/data/repo/repo_document_tokens_test.go
+++ /dev/null
@@ -1,150 +0,0 @@
-package repo
-
-import (
-	"context"
-	"testing"
-	"time"
-
-	"github.com/google/uuid"
-	"github.com/hay-kot/homebox/backend/internal/data/ent"
-	"github.com/hay-kot/homebox/backend/internal/data/ent/documenttoken"
-	"github.com/stretchr/testify/assert"
-)
-
-func TestDocumentTokensRepository_Create(t *testing.T) {
-	entities := useDocs(t, 1)
-	doc := entities[0]
-	expires := fk.Time()
-
-	type args struct {
-		ctx  context.Context
-		data DocumentTokenCreate
-	}
-	tests := []struct {
-		name    string
-		args    args
-		want    *ent.DocumentToken
-		wantErr bool
-	}{
-		{
-			name: "create document token",
-			args: args{
-				ctx: context.Background(),
-				data: DocumentTokenCreate{
-					DocumentID: doc.ID,
-					TokenHash:  []byte("token"),
-					ExpiresAt:  expires,
-				},
-			},
-			want: &ent.DocumentToken{
-				Edges: ent.DocumentTokenEdges{
-					Document: &ent.Document{
-						ID: doc.ID,
-					},
-				},
-				Token:     []byte("token"),
-				ExpiresAt: expires,
-			},
-			wantErr: false,
-		},
-		{
-			name: "create document token with empty token",
-			args: args{
-				ctx: context.Background(),
-				data: DocumentTokenCreate{
-					DocumentID: doc.ID,
-					TokenHash:  []byte(""),
-					ExpiresAt:  expires,
-				},
-			},
-			want:    nil,
-			wantErr: true,
-		},
-		{
-			name: "create document token with empty document id",
-			args: args{
-				ctx: context.Background(),
-				data: DocumentTokenCreate{
-					DocumentID: uuid.Nil,
-					TokenHash:  []byte("token"),
-					ExpiresAt:  expires,
-				},
-			},
-			want:    nil,
-			wantErr: true,
-		},
-	}
-
-	ids := make([]uuid.UUID, 0, len(tests))
-
-	t.Cleanup(func() {
-		for _, id := range ids {
-			_ = tRepos.DocTokens.Delete(context.Background(), id)
-		}
-	})
-
-	for _, tt := range tests {
-		t.Run(tt.name, func(t *testing.T) {
-
-			got, err := tRepos.DocTokens.Create(tt.args.ctx, tt.args.data)
-			if (err != nil) != tt.wantErr {
-				t.Errorf("DocumentTokensRepository.Create() error = %v, wantErr %v", err, tt.wantErr)
-				return
-			}
-			if tt.wantErr {
-				return
-			}
-
-			assert.Equal(t, tt.want.Token, got.TokenHash)
-			assert.WithinDuration(t, tt.want.ExpiresAt, got.ExpiresAt, time.Duration(1)*time.Second)
-			assert.Equal(t, tt.want.Edges.Document.ID, got.DocumentID)
-		})
-
-	}
-}
-
-func useDocTokens(t *testing.T, num int) []DocumentToken {
-	entity := useDocs(t, 1)[0]
-
-	results := make([]DocumentToken, 0, num)
-
-	ids := make([]uuid.UUID, 0, num)
-	t.Cleanup(func() {
-		for _, id := range ids {
-			_ = tRepos.DocTokens.Delete(context.Background(), id)
-		}
-	})
-
-	for i := 0; i < num; i++ {
-		e, err := tRepos.DocTokens.Create(context.Background(), DocumentTokenCreate{
-			DocumentID: entity.ID,
-			TokenHash:  []byte(fk.Str(10)),
-			ExpiresAt:  fk.Time(),
-		})
-
-		assert.NoError(t, err)
-		results = append(results, e)
-		ids = append(ids, e.ID)
-	}
-
-	return results
-}
-
-func TestDocumentTokensRepository_PurgeExpiredTokens(t *testing.T) {
-	entities := useDocTokens(t, 2)
-
-	// set expired token
-	tRepos.DocTokens.db.DocumentToken.Update().
-		Where(documenttoken.ID(entities[0].ID)).
-		SetExpiresAt(time.Now().Add(-time.Hour)).
-		ExecX(context.Background())
-
-	count, err := tRepos.DocTokens.PurgeExpiredTokens(context.Background())
-	assert.NoError(t, err)
-	assert.Equal(t, 1, count)
-
-	all, err := tRepos.DocTokens.db.DocumentToken.Query().All(context.Background())
-	assert.NoError(t, err)
-	assert.Len(t, all, 1)
-	assert.Equal(t, entities[1].ID, all[0].ID)
-}
diff --git a/backend/internal/data/repo/repo_maintenance_entry.go b/backend/internal/data/repo/repo_maintenance_entry.go
new file mode 100644
index 0000000..175bd7e
--- /dev/null
+++ b/backend/internal/data/repo/repo_maintenance_entry.go
@@ -0,0 +1,136 @@
+package repo
+
+import (
+	"context"
+	"time"
+
+	"github.com/google/uuid"
+	"github.com/hay-kot/homebox/backend/internal/data/ent"
+	"github.com/hay-kot/homebox/backend/internal/data/ent/maintenanceentry"
+)
+
+// MaintenanceEntryRepository is a repository for maintenance entries that are
+// associated with an item in the database. An entry represents a maintenance event
+// that has been performed on an item.
+type MaintenanceEntryRepository struct {
+	db *ent.Client
+}
+type (
+	MaintenanceEntryCreate struct {
+		Date        time.Time `json:"date"`
+		Name        string    `json:"name"`
+		Description string    `json:"description"`
+		Cost        float64   `json:"cost,string"`
+	}
+
+	MaintenanceEntry struct {
+		ID          uuid.UUID `json:"id"`
+		Date        time.Time `json:"date"`
+		Name        string    `json:"name"`
+		Description string    `json:"description"`
+		Cost        float64   `json:"cost,string"`
+	}
+
+	MaintenanceEntryUpdate struct {
+		Date        time.Time `json:"date"`
+		Name        string    `json:"name"`
+		Description string    `json:"description"`
+		Cost        float64   `json:"cost,string"`
+	}
+
+	MaintenanceLog struct {
+		ItemID      uuid.UUID          `json:"itemId"`
+		CostAverage float64            `json:"costAverage"`
+		CostTotal   float64            `json:"costTotal"`
+		Entries     []MaintenanceEntry `json:"entries"`
+	}
+)
+
+var (
+	mapMaintenanceEntryErr  = mapTErrFunc(mapMaintenanceEntry)
+	mapEachMaintenanceEntry = mapTEachFunc(mapMaintenanceEntry)
+)
+
+func mapMaintenanceEntry(entry *ent.MaintenanceEntry) MaintenanceEntry {
+	return MaintenanceEntry{
+		ID:          entry.ID,
+		Date:        entry.Date,
+		Name:        entry.Name,
+		Description: entry.Description,
+		Cost:        entry.Cost,
+	}
+}
+
+func (r *MaintenanceEntryRepository) Create(ctx context.Context, itemID uuid.UUID, input MaintenanceEntryCreate) (MaintenanceEntry, error) {
+	item, err := r.db.MaintenanceEntry.Create().
+		SetItemID(itemID).
+		SetDate(input.Date).
+		SetName(input.Name).
+		SetDescription(input.Description).
+		SetCost(input.Cost).
+		Save(ctx)
+
+	return mapMaintenanceEntryErr(item, err)
+}
+
+func (r *MaintenanceEntryRepository) Update(ctx context.Context, ID uuid.UUID, input MaintenanceEntryUpdate) (MaintenanceEntry, error) {
+	item, err := r.db.MaintenanceEntry.UpdateOneID(ID).
+		SetDate(input.Date).
+		SetName(input.Name).
+		SetDescription(input.Description).
+		SetCost(input.Cost).
+		Save(ctx)
+
+	return mapMaintenanceEntryErr(item, err)
+}
+
+func (r *MaintenanceEntryRepository) GetLog(ctx context.Context, itemID uuid.UUID) (MaintenanceLog, error) {
+	log := MaintenanceLog{
+		ItemID: itemID,
+	}
+
+	entries, err := r.db.MaintenanceEntry.Query().
+		Where(maintenanceentry.ItemID(itemID)).
+		Order(ent.Desc(maintenanceentry.FieldDate)).
+		All(ctx)
+
+	if err != nil {
+		return MaintenanceLog{}, err
+	}
+
+	log.Entries = mapEachMaintenanceEntry(entries)
+
+	var maybeTotal *float64
+	var maybeAverage *float64
+
+	q := `
+SELECT
+  SUM(cost_total) AS total_of_totals,
+  AVG(cost_total) AS avg_of_averages
+FROM
+  (
+    SELECT
+      strftime('%m-%Y', date) AS my,
+      SUM(cost) AS cost_total
+    FROM
+      maintenance_entries
+    WHERE
+      item_id = ?
+    GROUP BY
+      my
+  )`
+
+	row := r.db.Sql().QueryRowContext(ctx, q, itemID)
+	err = row.Scan(&maybeTotal, &maybeAverage)
+	if err != nil {
+		return MaintenanceLog{}, err
+	}
+
+	log.CostAverage = orDefault(maybeAverage, 0)
+	log.CostTotal = orDefault(maybeTotal, 0)
+	return log, nil
+}
+
+func (r *MaintenanceEntryRepository) Delete(ctx context.Context, ID uuid.UUID) error {
+	return r.db.MaintenanceEntry.DeleteOneID(ID).Exec(ctx)
+}
diff --git a/backend/internal/data/repo/repo_maintenance_entry_test.go b/backend/internal/data/repo/repo_maintenance_entry_test.go
new file mode 100644
index 0000000..8babefc
--- /dev/null
+++ b/backend/internal/data/repo/repo_maintenance_entry_test.go
@@ -0,0 +1,65 @@
+package repo
+
+import (
+	"context"
+	"testing"
+	"time"
+
+	"github.com/stretchr/testify/assert"
+)
+
+func TestMaintenanceEntryRepository_GetLog(t *testing.T) {
+	item := useItems(t, 1)[0]
+
+	// Create 10 maintenance entries for the item
+	created := make([]MaintenanceEntryCreate, 10)
+
+	lastMonth := time.Now().AddDate(0, -1, 0)
+	thisMonth := time.Now()
+
+	for i := 0; i < 10; i++ {
+		dt := lastMonth
+		if i%2 == 0 {
+			dt = thisMonth
+		}
+
+		created[i] = MaintenanceEntryCreate{
+			Date:        dt,
+			Name:        "Maintenance",
+			Description: "Maintenance description",
+			Cost:        10,
+		}
+	}
+
+	for _, entry := range created {
+		_, err := tRepos.MaintEntry.Create(context.Background(), item.ID, entry)
+		if err != nil {
+			t.Fatalf("failed to create maintenance entry: %v", err)
+		}
+	}
+
+	// Get the log for the item
+	log, err := tRepos.MaintEntry.GetLog(context.Background(), item.ID)
+
+	if err != nil {
+		t.Fatalf("failed to get maintenance log: %v", err)
+	}
+
+	assert.Equal(t, item.ID, log.ItemID)
+	assert.Equal(t, 10, len(log.Entries))
+
+	// Calculate the average cost
+	var total float64
+
+	for _, entry := range log.Entries {
+		total += entry.Cost
+	}
+
+	assert.Equal(t, total, log.CostTotal, "total cost should be equal to the sum of all entries")
+	assert.Equal(t, total/2, log.CostAverage, "average cost should be the average of the two months")
+
+	for _, entry := range log.Entries {
+		err := tRepos.MaintEntry.Delete(context.Background(), entry.ID)
+		assert.NoError(t, err)
+	}
+}
diff --git a/backend/internal/data/repo/repos_all.go b/backend/internal/data/repo/repos_all.go
index e726e88..40748cb 100644
--- a/backend/internal/data/repo/repos_all.go
+++ b/backend/internal/data/repo/repos_all.go
@@ -11,8 +11,8 @@ type AllRepos struct {
 	Labels      *LabelRepository
 	Items       *ItemsRepository
 	Docs        *DocumentRepository
-	DocTokens   *DocumentTokensRepository
 	Attachments *AttachmentRepo
+	MaintEntry  *MaintenanceEntryRepository
 }
 
 func New(db *ent.Client, root string) *AllRepos {
@@ -24,7 +24,7 @@ func New(db *ent.Client, root string) *AllRepos {
 		Labels:      &LabelRepository{db},
 		Items:       &ItemsRepository{db},
 		Docs:        &DocumentRepository{db, root},
-		DocTokens:   &DocumentTokensRepository{db},
 		Attachments: &AttachmentRepo{db},
+		MaintEntry:  &MaintenanceEntryRepository{db},
 	}
 }
diff --git a/backend/pkgs/hasher/password.go b/backend/pkgs/hasher/password.go
index 64e88b2..1be8251 100644
--- a/backend/pkgs/hasher/password.go
+++ b/backend/pkgs/hasher/password.go
@@ -13,7 +13,7 @@ func init() {
 	disableHas := os.Getenv("UNSAFE_DISABLE_PASSWORD_PROJECTION") == "yes_i_am_sure"
 
 	if disableHas {
-		fmt.Println("WARNING: Password projection is disabled. This is unsafe in production.")
+		fmt.Println("WARNING: Password protection is disabled. This is unsafe in production.")
 		enabled = false
 	}
 }
diff --git a/frontend/components/Base/Button.vue b/frontend/components/Base/Button.vue
index 1fcb2f2..915ef51 100644
--- a/frontend/components/Base/Button.vue
+++ b/frontend/components/Base/Button.vue
@@ -9,6 +9,7 @@
       'btn-sm': size === 'sm',
       'btn-lg': size === 'lg',
     }"
+    :style="upper ? '' : 'text-transform: none'"
   >
     <label v-if="$slots.icon" class="swap swap-rotate mr-2" :class="{ 'swap-active': isHover }">
       <slot name="icon" />
diff --git a/frontend/components/Base/Card.vue b/frontend/components/Base/Card.vue
index 0392784..464066f 100644
--- a/frontend/components/Base/Card.vue
+++ b/frontend/components/Base/Card.vue
@@ -1,6 +1,6 @@
 <template>
   <div class="card bg-base-100 shadow-xl sm:rounded-lg">
-    <div class="px-4 py-5 sm:px-6">
+    <div v-if="$slots.title" class="px-4 py-5 sm:px-6">
       <h3 class="text-lg font-medium leading-6">
         <slot name="title"></slot>
       </h3>
diff --git a/frontend/components/Form/DatePicker.vue b/frontend/components/Form/DatePicker.vue
index d5f41a2..d04a28b 100644
--- a/frontend/components/Form/DatePicker.vue
+++ b/frontend/components/Form/DatePicker.vue
@@ -13,6 +13,7 @@
             <button
               v-if="day.number != ''"
               :key="day.number"
+              type="button"
               class="text-center btn-xs btn btn-outline"
               @click="select($event, day.date)"
             >
@@ -22,11 +23,11 @@
           </template>
         </div>
         <div class="flex justify-between mt-1 items-center">
-          <button class="btn btn-xs" @click="prevMonth">
+          <button type="button" class="btn btn-xs" @click="prevMonth">
             <Icon class="h-5 w-5" name="mdi-arrow-left"></Icon>
           </button>
           <p class="text-center">{{ month }} {{ year }}</p>
-          <button class="btn btn-xs" @click="nextMonth">
+          <button type="button" class="btn btn-xs" @click="nextMonth">
             <Icon class="h-5 w-5" name="mdi-arrow-right"></Icon>
           </button>
         </div>
diff --git a/frontend/components/global/DateTime.vue b/frontend/components/global/DateTime.vue
index 940ee3e..4f349e3 100644
--- a/frontend/components/global/DateTime.vue
+++ b/frontend/components/global/DateTime.vue
@@ -3,12 +3,37 @@
 </template>
 
 <script setup lang="ts">
-  enum DateTimeFormat {
-    RELATIVE = "relative",
-    LONG = "long",
-    SHORT = "short",
+  type DateTimeFormat = "relative" | "long" | "short" | "human";
+
+  function ordinalIndicator(num: number) {
+    if (num > 3 && num < 21) return "th";
+    switch (num % 10) {
+      case 1:
+        return "st";
+      case 2:
+        return "nd";
+      case 3:
+        return "rd";
+      default:
+        return "th";
+    }
   }
 
+  const months = [
+    "January",
+    "February",
+    "March",
+    "April",
+    "May",
+    "June",
+    "July",
+    "August",
+    "September",
+    "October",
+    "November",
+    "December",
+  ];
+
   const value = computed(() => {
     if (!props.date) {
       return "";
@@ -24,12 +49,15 @@
     }
 
     switch (props.format) {
-      case DateTimeFormat.RELATIVE:
+      case "relative":
         return useTimeAgo(dt).value + useDateFormat(dt, " (MM-DD-YYYY)").value;
-      case DateTimeFormat.LONG:
+      case "long":
         return useDateFormat(dt, "MM-DD-YYYY (dddd)").value;
-      case DateTimeFormat.SHORT:
+      case "short":
         return useDateFormat(dt, "MM-DD-YYYY").value;
+      case "human":
+        // January 1st, 2021
+        return `${months[dt.getMonth()]} ${dt.getDate()}${ordinalIndicator(dt.getDate())}, ${dt.getFullYear()}`;
       default:
         return "";
     }
diff --git a/frontend/components/global/Markdown.vue b/frontend/components/global/Markdown.vue
index 9fbb197..0fb8a13 100644
--- a/frontend/components/global/Markdown.vue
+++ b/frontend/components/global/Markdown.vue
@@ -3,10 +3,12 @@
   import DOMPurify from "dompurify";
 
   type Props = {
-    source: string;
+    source: string | null | undefined;
   };
 
-  const props = withDefaults(defineProps<Props>(), {});
+  const props = withDefaults(defineProps<Props>(), {
+    source: null,
+  });
 
   const md = new MarkdownIt({
     html: true,
@@ -15,7 +17,7 @@
   });
 
   const raw = computed(() => {
-    const html = md.render(props.source);
+    const html = md.render(props.source || "");
     return DOMPurify.sanitize(html);
   });
 </script>
diff --git a/frontend/lib/api/__test__/user/items.test.ts b/frontend/lib/api/__test__/user/items.test.ts
index 7837e50..1a5f94e 100644
--- a/frontend/lib/api/__test__/user/items.test.ts
+++ b/frontend/lib/api/__test__/user/items.test.ts
@@ -33,6 +33,7 @@ describe("user should be able to create an item and add an attachment", () => {
     const [location, cleanup] = await useLocation(api);
 
     const { response, data: item } = await api.items.create({
+      parentId: null,
       name: "test-item",
       labelIds: [],
       description: "test-description",
@@ -43,7 +44,7 @@ describe("user should be able to create an item and add an attachment", () => {
     // Add attachment
     {
       const testFile = new Blob(["test"], { type: "text/plain" });
-      const { response } = await api.items.addAttachment(item.id, testFile, "test.txt", AttachmentTypes.Attachment);
+      const { response } = await api.items.attachments.add(item.id, testFile, "test.txt", AttachmentTypes.Attachment);
       expect(response.status).toBe(201);
     }
 
@@ -54,7 +55,7 @@ describe("user should be able to create an item and add an attachment", () => {
     expect(data.attachments).toHaveLength(1);
     expect(data.attachments[0].document.title).toBe("test.txt");
 
-    const resp = await api.items.deleteAttachment(data.id, data.attachments[0].id);
+    const resp = await api.items.attachments.delete(data.id, data.attachments[0].id);
     expect(resp.response.status).toBe(204);
 
     api.items.delete(item.id);
@@ -66,6 +67,7 @@ describe("user should be able to create an item and add an attachment", () => {
     const [location, cleanup] = await useLocation(api);
 
     const { response, data: item } = await api.items.create({
+      parentId: null,
       name: faker.vehicle.model(),
       labelIds: [],
       description: faker.lorem.paragraph(1),
@@ -82,6 +84,7 @@ describe("user should be able to create an item and add an attachment", () => {
 
     // Add fields
     const itemUpdate = {
+      parentId: null,
       ...item,
       locationId: item.location.id,
       labelIds: item.labels.map(l => l.id),
@@ -113,4 +116,41 @@ describe("user should be able to create an item and add an attachment", () => {
 
     cleanup();
   });
+
+  test("users should be able to create and few maintenance logs for an item", async () => {
+    const api = await sharedUserClient();
+    const [location, cleanup] = await useLocation(api);
+    const { response, data: item } = await api.items.create({
+      parentId: null,
+      name: faker.vehicle.model(),
+      labelIds: [],
+      description: faker.lorem.paragraph(1),
+      locationId: location.id,
+    });
+    expect(response.status).toBe(201);
+
+    const maintenanceEntries = [];
+    for (let i = 0; i < 5; i++) {
+      const { response, data } = await api.items.maintenance.create(item.id, {
+        name: faker.vehicle.model(),
+        description: faker.lorem.paragraph(1),
+        date: faker.date.past(1),
+        cost: faker.datatype.number(100).toString(),
+      });
+
+      expect(response.status).toBe(201);
+      maintenanceEntries.push(data);
+    }
+
+    // Log
+    {
+      const { response, data } = await api.items.maintenance.getLog(item.id);
+      expect(response.status).toBe(200);
+      expect(data.entries).toHaveLength(maintenanceEntries.length);
+      expect(data.costAverage).toBeGreaterThan(0);
+      expect(data.costTotal).toBeGreaterThan(0);
+    }
+
+    cleanup();
+  });
 });
diff --git a/frontend/lib/api/classes/items.ts b/frontend/lib/api/classes/items.ts
index f4fb38d..8522852 100644
--- a/frontend/lib/api/classes/items.ts
+++ b/frontend/lib/api/classes/items.ts
@@ -1,7 +1,18 @@
 import { BaseAPI, route } from "../base";
 import { parseDate } from "../base/base-api";
-import { ItemAttachmentUpdate, ItemCreate, ItemOut, ItemSummary, ItemUpdate } from "../types/data-contracts";
+import {
+  ItemAttachmentUpdate,
+  ItemCreate,
+  ItemOut,
+  ItemSummary,
+  ItemUpdate,
+  MaintenanceEntry,
+  MaintenanceEntryCreate,
+  MaintenanceEntryUpdate,
+  MaintenanceLog,
+} from "../types/data-contracts";
 import { AttachmentTypes, PaginationResult } from "../types/non-generated";
+import { Requests } from "~~/lib/requests";
 
 export type ItemsQuery = {
   includeArchived?: boolean;
@@ -12,7 +23,65 @@ export type ItemsQuery = {
   q?: string;
 };
 
+export class AttachmentsAPI extends BaseAPI {
+  add(id: string, file: File | Blob, filename: string, type: AttachmentTypes) {
+    const formData = new FormData();
+    formData.append("file", file);
+    formData.append("type", type);
+    formData.append("name", filename);
+
+    return this.http.post<FormData, ItemOut>({
+      url: route(`/items/${id}/attachments`),
+      data: formData,
+    });
+  }
+
+  delete(id: string, attachmentId: string) {
+    return this.http.delete<void>({ url: route(`/items/${id}/attachments/${attachmentId}`) });
+  }
+
+  update(id: string, attachmentId: string, data: ItemAttachmentUpdate) {
+    return this.http.put<ItemAttachmentUpdate, ItemOut>({
+      url: route(`/items/${id}/attachments/${attachmentId}`),
+      body: data,
+    });
+  }
+}
+
+export class MaintenanceAPI extends BaseAPI {
+  getLog(itemId: string) {
+    return this.http.get<MaintenanceLog>({ url: route(`/items/${itemId}/maintenance`) });
+  }
+
+  create(itemId: string, data: MaintenanceEntryCreate) {
+    return this.http.post<MaintenanceEntryCreate, MaintenanceEntry>({
+      url: route(`/items/${itemId}/maintenance`),
+      body: data,
+    });
+  }
+
+  delete(itemId: string, entryId: string) {
+    return this.http.delete<void>({ url: route(`/items/${itemId}/maintenance/${entryId}`) });
+  }
+
+  update(itemId: string, entryId: string, data: MaintenanceEntryUpdate) {
+    return this.http.put<MaintenanceEntryUpdate, MaintenanceEntry>({
+      url: route(`/items/${itemId}/maintenance/${entryId}`),
+      body: data,
+    });
+  }
+}
+
 export class ItemsApi extends BaseAPI {
+  attachments: AttachmentsAPI;
+  maintenance: MaintenanceAPI;
+
+  constructor(http: Requests, token: string) {
+    super(http, token);
+    this.attachments = new AttachmentsAPI(http);
+    this.maintenance = new MaintenanceAPI(http);
+  }
+
   getAll(q: ItemsQuery = {}) {
     return this.http.get<PaginationResult<ItemSummary>>({ url: route("/items", q) });
   }
@@ -59,27 +128,4 @@ export class ItemsApi extends BaseAPI {
       data: formData,
     });
   }
-
-  addAttachment(id: string, file: File | Blob, filename: string, type: AttachmentTypes) {
-    const formData = new FormData();
-    formData.append("file", file);
-    formData.append("type", type);
-    formData.append("name", filename);
-
-    return this.http.post<FormData, ItemOut>({
-      url: route(`/items/${id}/attachments`),
-      data: formData,
-    });
-  }
-
-  async deleteAttachment(id: string, attachmentId: string) {
-    return await this.http.delete<void>({ url: route(`/items/${id}/attachments/${attachmentId}`) });
-  }
-
-  async updateAttachment(id: string, attachmentId: string, data: ItemAttachmentUpdate) {
-    return await this.http.put<ItemAttachmentUpdate, ItemOut>({
-      url: route(`/items/${id}/attachments/${attachmentId}`),
-      body: data,
-    });
-  }
 }
diff --git a/frontend/lib/api/types/data-contracts.ts b/frontend/lib/api/types/data-contracts.ts
index 09f10e7..e313175 100644
--- a/frontend/lib/api/types/data-contracts.ts
+++ b/frontend/lib/api/types/data-contracts.ts
@@ -54,7 +54,6 @@ export interface ItemAttachmentUpdate {
 export interface ItemCreate {
   description: string;
   labelIds: string[];
-
   /** Edges */
   locationId: string;
   name: string;
@@ -73,8 +72,7 @@ export interface ItemField {
 
 export interface ItemOut {
   archived: boolean;
-
-  /** @example 0 */
+  /** @example "0" */
   assetId: string;
   attachments: ItemAttachment[];
   children: ItemSummary[];
@@ -84,33 +82,26 @@ export interface ItemOut {
   id: string;
   insured: boolean;
   labels: LabelSummary[];
-
   /** Warranty */
   lifetimeWarranty: boolean;
-
   /** Edges */
   location: LocationSummary | null;
   manufacturer: string;
   modelNumber: string;
   name: string;
-
   /** Extras */
   notes: string;
   parent: ItemSummary | null;
   purchaseFrom: string;
-
-  /** @example 0 */
+  /** @example "0" */
   purchasePrice: string;
-
   /** Purchase */
   purchaseTime: Date;
   quantity: number;
   serialNumber: string;
   soldNotes: string;
-
-  /** @example 0 */
+  /** @example "0" */
   soldPrice: string;
-
   /** Sold */
   soldTime: Date;
   soldTo: string;
@@ -126,7 +117,6 @@ export interface ItemSummary {
   id: string;
   insured: boolean;
   labels: LabelSummary[];
-
   /** Edges */
   location: LocationSummary | null;
   name: string;
@@ -142,35 +132,27 @@ export interface ItemUpdate {
   id: string;
   insured: boolean;
   labelIds: string[];
-
   /** Warranty */
   lifetimeWarranty: boolean;
-
   /** Edges */
   locationId: string;
   manufacturer: string;
   modelNumber: string;
   name: string;
-
   /** Extras */
   notes: string;
   parentId: string | null;
   purchaseFrom: string;
-
-  /** @example 0 */
+  /** @example "0" */
   purchasePrice: string;
-
   /** Purchase */
   purchaseTime: Date;
   quantity: number;
-
   /** Identifications */
   serialNumber: string;
   soldNotes: string;
-
-  /** @example 0 */
+  /** @example "0" */
   soldPrice: string;
-
   /** Sold */
   soldTime: Date;
   soldTo: string;
@@ -241,6 +223,38 @@ export interface LocationUpdate {
   parentId: string | null;
 }
 
+export interface MaintenanceEntry {
+  /** @example "0" */
+  cost: string;
+  date: Date;
+  description: string;
+  id: string;
+  name: string;
+}
+
+export interface MaintenanceEntryCreate {
+  /** @example "0" */
+  cost: string;
+  date: Date;
+  description: string;
+  name: string;
+}
+
+export interface MaintenanceEntryUpdate {
+  /** @example "0" */
+  cost: string;
+  date: Date;
+  description: string;
+  name: string;
+}
+
+export interface MaintenanceLog {
+  costAverage: number;
+  costTotal: number;
+  entries: MaintenanceEntry[];
+  itemId: string;
+}
+
 export interface PaginationResultRepoItemSummary {
   items: ItemSummary[];
   page: number;
@@ -278,7 +292,7 @@ export interface ValueOverTime {
 }
 
 export interface ValueOverTimeEntry {
-  date: string;
+  date: Date;
   name: string;
   value: number;
 }
diff --git a/frontend/nuxt.config.ts b/frontend/nuxt.config.ts
index 6019d26..6f4638b 100644
--- a/frontend/nuxt.config.ts
+++ b/frontend/nuxt.config.ts
@@ -1,20 +1,16 @@
-import { defineNuxtConfig } from "nuxt";
+import { defineNuxtConfig } from "nuxt/config";
 
 // https://v3.nuxtjs.org/api/configuration/nuxt.config
 export default defineNuxtConfig({
-  target: "static",
   ssr: false,
   modules: ["@nuxtjs/tailwindcss", "@pinia/nuxt", "@vueuse/nuxt"],
-  meta: {
-    title: "Homebox",
-    link: [{ rel: "icon", type: "image/x-icon", href: "/favicon.svg" }],
-  },
-  vite: {
-    server: {
-      proxy: {
-        "/api": "http://localhost:7745",
-      },
+  nitro: {
+    devProxy: {
+      "/api": {
+        target: "http://localhost:7745/api",
+        changeOrigin: true,
+      }
     },
-    plugins: [],
   },
+    plugins: [],
 });
diff --git a/frontend/package.json b/frontend/package.json
index a06db47..973e383 100644
--- a/frontend/package.json
+++ b/frontend/package.json
@@ -21,7 +21,7 @@
     "eslint-plugin-prettier": "^4.2.1",
     "eslint-plugin-vue": "^9.4.0",
     "isomorphic-fetch": "^3.0.0",
-    "nuxt": "3.0.0-rc.11",
+    "nuxt": "3.0.0",
     "prettier": "^2.7.1",
     "typescript": "^4.8.3",
     "vite-plugin-eslint": "^1.8.1",
@@ -29,7 +29,7 @@
   },
   "dependencies": {
     "@iconify/vue": "^3.2.1",
-    "@nuxtjs/tailwindcss": "^5.3.2",
+    "@nuxtjs/tailwindcss": "^6.1.3",
     "@pinia/nuxt": "^0.4.1",
     "@tailwindcss/aspect-ratio": "^0.4.0",
     "@tailwindcss/forms": "^0.5.2",
diff --git a/frontend/pages/home.vue b/frontend/pages/home.vue
index 92edee3..e8a52e7 100644
--- a/frontend/pages/home.vue
+++ b/frontend/pages/home.vue
@@ -46,7 +46,7 @@
   const importDialog = ref(false);
   const importCsv = ref(null);
   const importLoading = ref(false);
-  const importRef = ref<HTMLInputElement>(null);
+  const importRef = ref<HTMLInputElement>();
   whenever(
     () => !importDialog.value,
     () => {
@@ -120,7 +120,7 @@
       <section>
         <BaseCard>
           <template #title> Welcome Back, {{ auth.self ? auth.self.name : "Username" }} </template>
-          <template #subtitle> {{ auth.self.isSuperuser ? "Admin" : "User" }} </template>
+          <!-- <template #subtitle> {{ auth.self.isSuperuser ? "Admin" : "User" }} </template> -->
           <template #title-actions>
             <div class="flex justify-end gap-2">
               <div class="tooltip" data-tip="Import CSV File">
diff --git a/frontend/pages/item/[id]/edit.vue b/frontend/pages/item/[id]/edit.vue
index 0ed53d3..98b9f2a 100644
--- a/frontend/pages/item/[id]/edit.vue
+++ b/frontend/pages/item/[id]/edit.vue
@@ -214,7 +214,7 @@
       return;
     }
 
-    const { data, error } = await api.items.addAttachment(itemId.value, files[0], files[0].name, type);
+    const { data, error } = await api.items.attachments.add(itemId.value, files[0], files[0].name, type);
 
     if (error) {
       toast.error("Failed to upload attachment");
@@ -235,7 +235,7 @@
       return;
     }
 
-    const { error } = await api.items.deleteAttachment(itemId.value, attachmentId);
+    const { error } = await api.items.attachments.delete(itemId.value, attachmentId);
 
     if (error) {
       toast.error("Failed to delete attachment");
@@ -273,7 +273,7 @@
 
   async function updateAttachment() {
     editState.loading = true;
-    const { error, data } = await api.items.updateAttachment(itemId.value, editState.id, {
+    const { error, data } = await api.items.attachments.update(itemId.value, editState.id, {
       title: editState.title,
       type: editState.type,
     });
diff --git a/frontend/pages/item/[id]/index.vue b/frontend/pages/item/[id]/index.vue
index ecafe01..16d5a74 100644
--- a/frontend/pages/item/[id]/index.vue
+++ b/frontend/pages/item/[id]/index.vue
@@ -13,6 +13,10 @@
   const itemId = computed<string>(() => route.params.id as string);
   const preferences = useViewPreferences();
 
+  const hasNested = computed<boolean>(() => {
+    return route.fullPath.split("/").at(-1) !== itemId.value;
+  });
+
   const { data: item, refresh } = useAsyncData(itemId.value, async () => {
     const { data, error } = await api.items.get(itemId.value);
     if (error) {
@@ -219,7 +223,7 @@
     } else {
       details.push({
         name: "Warranty Expires",
-        text: item.value?.warrantyExpires,
+        text: item.value?.warrantyExpires || "",
         type: "date",
       });
     }
@@ -253,7 +257,7 @@
       },
       {
         name: "Purchase Date",
-        text: item.value.purchaseTime,
+        text: item.value?.purchaseTime || "",
         type: "date",
       },
     ];
@@ -309,12 +313,12 @@
   });
 
   function openDialog(img: Photo) {
-    refDialog.value.showModal();
+    refDialog.value?.showModal();
     dialoged.src = img.src;
   }
 
   function closeDialog() {
-    refDialog.value.close();
+    refDialog.value?.close();
   }
 
   const refDialogBody = ref<HTMLDivElement>();
@@ -340,10 +344,7 @@
       </div>
     </dialog>
     <section class="px-3">
-      <div class="flex justify-between items-center">
-        <div class="form-control"></div>
-      </div>
-      <div class="grid grid-cols-1 gap-3">
+      <div class="space-y-3">
         <BaseCard>
           <template #title>
             <BaseSectionHeader>
@@ -374,10 +375,16 @@
           </template>
           <template #title-actions>
             <div class="modal-action mt-0">
-              <label class="label cursor-pointer mr-auto">
+              <label v-if="!hasNested" class="label cursor-pointer mr-auto">
                 <input v-model="preferences.showEmpty" type="checkbox" class="toggle toggle-primary" />
                 <span class="label-text ml-4"> Show Empty </span>
               </label>
+              <BaseButton v-else class="mr-auto" size="sm" @click="$router.go(-1)">
+                <template #icon>
+                  <Icon name="mdi-arrow-left" class="h-5 w-5" />
+                </template>
+                Back
+              </BaseButton>
               <BaseButton size="sm" :to="`/item/${itemId}/edit`">
                 <template #icon>
                   <Icon name="mdi-pencil" />
@@ -390,75 +397,84 @@
                 </template>
                 Delete
               </BaseButton>
+              <BaseButton size="sm" :to="`/item/${itemId}/log`">
+                <template #icon>
+                  <Icon name="mdi-post" />
+                </template>
+                Log
+              </BaseButton>
             </div>
           </template>
 
-          <DetailsSection :details="itemDetails" />
+          <DetailsSection v-if="!hasNested" :details="itemDetails" />
         </BaseCard>
 
-        <BaseCard v-if="photos && photos.length > 0">
-          <template #title> Photos </template>
-          <div
-            class="container border-t border-gray-300 p-4 flex flex-wrap gap-2 mx-auto max-h-[500px] overflow-y-scroll scroll-bg"
-          >
-            <button v-for="(img, i) in photos" :key="i" @click="openDialog(img)">
-              <img class="rounded max-h-[200px]" :src="img.src" />
-            </button>
-          </div>
-        </BaseCard>
+        <NuxtPage :item="item" :page-key="itemId" />
+        <div v-if="!hasNested">
+          <BaseCard v-if="photos && photos.length > 0">
+            <template #title> Photos </template>
+            <div
+              class="container border-t border-gray-300 p-4 flex flex-wrap gap-2 mx-auto max-h-[500px] overflow-y-scroll scroll-bg"
+            >
+              <button v-for="(img, i) in photos" :key="i" @click="openDialog(img)">
+                <img class="rounded max-h-[200px]" :src="img.src" />
+              </button>
+            </div>
+          </BaseCard>
 
-        <BaseCard v-if="showAttachments">
-          <template #title> Attachments </template>
-          <DetailsSection :details="attachmentDetails">
-            <template #manuals>
-              <ItemAttachmentsList
-                v-if="attachments.manuals.length > 0"
-                :attachments="attachments.manuals"
-                :item-id="item.id"
-              />
-            </template>
-            <template #attachments>
-              <ItemAttachmentsList
-                v-if="attachments.attachments.length > 0"
-                :attachments="attachments.attachments"
-                :item-id="item.id"
-              />
-            </template>
-            <template #warranty>
-              <ItemAttachmentsList
-                v-if="attachments.warranty.length > 0"
-                :attachments="attachments.warranty"
-                :item-id="item.id"
-              />
-            </template>
-            <template #receipts>
-              <ItemAttachmentsList
-                v-if="attachments.receipts.length > 0"
-                :attachments="attachments.receipts"
-                :item-id="item.id"
-              />
-            </template>
-          </DetailsSection>
-        </BaseCard>
+          <BaseCard v-if="showAttachments">
+            <template #title> Attachments </template>
+            <DetailsSection :details="attachmentDetails">
+              <template #manuals>
+                <ItemAttachmentsList
+                  v-if="attachments.manuals.length > 0"
+                  :attachments="attachments.manuals"
+                  :item-id="item.id"
+                />
+              </template>
+              <template #attachments>
+                <ItemAttachmentsList
+                  v-if="attachments.attachments.length > 0"
+                  :attachments="attachments.attachments"
+                  :item-id="item.id"
+                />
+              </template>
+              <template #warranty>
+                <ItemAttachmentsList
+                  v-if="attachments.warranty.length > 0"
+                  :attachments="attachments.warranty"
+                  :item-id="item.id"
+                />
+              </template>
+              <template #receipts>
+                <ItemAttachmentsList
+                  v-if="attachments.receipts.length > 0"
+                  :attachments="attachments.receipts"
+                  :item-id="item.id"
+                />
+              </template>
+            </DetailsSection>
+          </BaseCard>
 
-        <BaseCard v-if="showPurchase">
-          <template #title> Purchase Details </template>
-          <DetailsSection :details="purchaseDetails" />
-        </BaseCard>
+          <BaseCard v-if="showPurchase">
+            <template #title> Purchase Details </template>
+            <DetailsSection :details="purchaseDetails" />
+          </BaseCard>
 
-        <BaseCard v-if="showWarranty">
-          <template #title> Warranty Details </template>
-          <DetailsSection :details="warrantyDetails" />
-        </BaseCard>
+          <BaseCard v-if="showWarranty">
+            <template #title> Warranty Details </template>
+            <DetailsSection :details="warrantyDetails" />
+          </BaseCard>
 
-        <BaseCard v-if="showSold">
-          <template #title> Sold Details </template>
-          <DetailsSection :details="soldDetails" />
-        </BaseCard>
+          <BaseCard v-if="showSold">
+            <template #title> Sold Details </template>
+            <DetailsSection :details="soldDetails" />
+          </BaseCard>
+        </div>
       </div>
     </section>
 
-    <section class="my-6 px-3">
+    <section v-if="!hasNested" class="my-6 px-3">
       <BaseSectionHeader v-if="item && item.children && item.children.length > 0"> Child Items </BaseSectionHeader>
       <div class="grid grid-cols-1 sm:grid-cols-2 gap-4">
         <ItemCard v-for="child in item.children" :key="child.id" :item="child" />
diff --git a/frontend/pages/item/[id]/index/log.vue b/frontend/pages/item/[id]/index/log.vue
new file mode 100644
index 0000000..ab701db
--- /dev/null
+++ b/frontend/pages/item/[id]/index/log.vue
@@ -0,0 +1,173 @@
+<script setup lang="ts">
+  import DatePicker from "~~/components/Form/DatePicker.vue";
+  import { ItemOut } from "~~/lib/api/types/data-contracts";
+
+  const props = defineProps<{
+    item: ItemOut;
+  }>();
+
+  const api = useUserApi();
+  const toast = useNotifier();
+
+  const { data: log, refresh: refreshLog } = useAsyncData(async () => {
+    const { data } = await api.items.maintenance.getLog(props.item.id);
+    return data;
+  });
+
+  const stats = computed(() => {
+    if (!log.value) return [];
+
+    return [
+      {
+        id: "total",
+        title: "Total Cost",
+        subtitle: "Sum over all entries",
+        value: fmtCurrency(log.value.costTotal),
+      },
+      {
+        id: "average",
+        title: "Monthly Average",
+        subtitle: "Average over all entries",
+        value: fmtCurrency(log.value.costAverage),
+      },
+    ];
+  });
+
+  const entry = reactive({
+    modal: false,
+    name: "",
+    date: new Date(),
+    description: "",
+    cost: "",
+  });
+
+  function newEntry() {
+    entry.modal = true;
+  }
+
+  async function createEntry() {
+    const { error } = await api.items.maintenance.create(props.item.id, {
+      name: entry.name,
+      date: entry.date,
+      description: entry.description,
+      cost: entry.cost,
+    });
+
+    if (error) {
+      toast.error("Failed to create entry");
+      return;
+    }
+
+    entry.modal = false;
+
+    refreshLog();
+  }
+
+  async function deleteEntry(id: string) {
+    const { error } = await api.items.maintenance.delete(props.item.id, id);
+
+    if (error) {
+      toast.error("Failed to delete entry");
+      return;
+    }
+
+    refreshLog();
+  }
+</script>
+
+<template>
+  <div v-if="log">
+    <BaseModal v-model="entry.modal">
+      <template #title> Create Entry </template>
+      <form @submit.prevent="createEntry">
+        <FormTextField v-model="entry.name" autofocus label="Entry Name" />
+        <DatePicker v-model="entry.date" label="Date" />
+        <FormTextArea v-model="entry.description" label="Notes" />
+        <FormTextField v-model="entry.cost" autofocus label="Cost" />
+        <div class="py-2 flex justify-end">
+          <BaseButton type="submit" class="ml-2">
+            <template #icon>
+              <Icon name="mdi-post" />
+            </template>
+            Create
+          </BaseButton>
+        </div>
+      </form>
+    </BaseModal>
+
+    <div class="flex">
+      <BaseButton class="ml-auto" size="sm" @click="newEntry()">
+        <template #icon>
+          <Icon name="mdi-post" />
+        </template>
+        Log Maintenance
+      </BaseButton>
+    </div>
+    <section class="page-layout my-6">
+      <div class="main-slot container space-y-6">
+        <BaseCard v-for="e in log.entries" :key="e.id">
+          <BaseSectionHeader class="p-6 border-b border-b-gray-300">
+            <span class="text-base-content">
+              {{ e.name }}
+            </span>
+            <template #description>
+              <div class="flex gap-2">
+                <div class="badge p-3">
+                  <Icon name="mdi-calendar" class="mr-2" />
+                  <DateTime :date="e.date" format="human" />
+                </div>
+                <div class="tooltip tooltip-primary" data-tip="Cost">
+                  <div class="badge badge-primary p-3">
+                    <Currency :amount="e.cost" />
+                  </div>
+                </div>
+              </div>
+            </template>
+          </BaseSectionHeader>
+          <div class="p-6">
+            <Markdown :source="e.description" />
+          </div>
+          <div class="flex justify-end p-4">
+            <BaseButton size="sm" @click="deleteEntry(e.id)">
+              <template #icon>
+                <Icon name="mdi-delete" />
+              </template>
+              Delete
+            </BaseButton>
+          </div>
+        </BaseCard>
+      </div>
+      <div class="side-slot space-y-6">
+        <div v-for="stat in stats" :key="stat.id" class="stats block shadow-xl border-l-primary">
+          <div class="stat">
+            <div class="stat-title">{{ stat.title }}</div>
+            <div class="stat-value text-primary">{{ stat.value }}</div>
+            <div class="stat-desc">{{ stat.subtitle }}</div>
+          </div>
+        </div>
+      </div>
+    </section>
+  </div>
+</template>
+
+<style scoped>
+  .page-layout {
+    display: grid;
+    grid-template-columns: auto minmax(0, 1fr);
+    grid-template-rows: auto;
+    grid-template-areas: "side main";
+    gap: 1rem;
+  }
+
+  .side-slot {
+    grid-area: side;
+  }
+
+  .main-slot {
+    grid-area: main;
+  }
+
+  .grid {
+    display: grid;
+  }
+</style>
diff --git a/frontend/pnpm-lock.yaml b/frontend/pnpm-lock.yaml
index fc11122..8013d1b 100644
--- a/frontend/pnpm-lock.yaml
+++ b/frontend/pnpm-lock.yaml
@@ -4,7 +4,7 @@ specifiers:
   '@faker-js/faker': ^7.5.0
   '@iconify/vue': ^3.2.1
   '@nuxtjs/eslint-config-typescript': ^11.0.0
-  '@nuxtjs/tailwindcss': ^5.3.2
+  '@nuxtjs/tailwindcss': ^6.1.3
   '@pinia/nuxt': ^0.4.1
   '@tailwindcss/aspect-ratio': ^0.4.0
   '@tailwindcss/forms': ^0.5.2
@@ -21,7 +21,7 @@ specifiers:
   eslint-plugin-vue: ^9.4.0
   isomorphic-fetch: ^3.0.0
   markdown-it: ^13.0.1
-  nuxt: 3.0.0-rc.11
+  nuxt: 3.0.0
   pinia: ^2.0.21
   postcss: ^8.4.16
   prettier: ^2.7.1
@@ -33,34 +33,34 @@ specifiers:
 
 dependencies:
   '@iconify/vue': 3.2.1_vue@3.2.45
-  '@nuxtjs/tailwindcss': 5.3.5
-  '@pinia/nuxt': 0.4.6_mgnvym7yiazkylwwogi5r767ue
+  '@nuxtjs/tailwindcss': 6.1.3
+  '@pinia/nuxt': 0.4.6_prq2uz4lho2pwp6irk4cfkrxwu
   '@tailwindcss/aspect-ratio': 0.4.2_tailwindcss@3.2.4
   '@tailwindcss/forms': 0.5.3_tailwindcss@3.2.4
   '@tailwindcss/typography': 0.5.8_tailwindcss@3.2.4
-  '@vueuse/nuxt': 9.6.0_34m4vklv7wytvv7hkkggjs6mui
+  '@vueuse/nuxt': 9.6.0_nuxt@3.0.0+vue@3.2.45
   autoprefixer: 10.4.13_postcss@8.4.19
-  daisyui: 2.42.1_2lwn2upnx27dqeg6hqdu7sq75m
+  daisyui: 2.43.0_2lwn2upnx27dqeg6hqdu7sq75m
   dompurify: 2.4.1
   markdown-it: 13.0.1
-  pinia: 2.0.27_mgnvym7yiazkylwwogi5r767ue
+  pinia: 2.0.28_prq2uz4lho2pwp6irk4cfkrxwu
   postcss: 8.4.19
   tailwindcss: 3.2.4_postcss@8.4.19
   vue: 3.2.45
 
 devDependencies:
   '@faker-js/faker': 7.6.0
-  '@nuxtjs/eslint-config-typescript': 11.0.0_s5ps7njkmjlaqajutnox5ntcla
-  '@typescript-eslint/eslint-plugin': 5.45.0_yjegg5cyoezm3fzsmuszzhetym
-  '@typescript-eslint/parser': 5.45.0_s5ps7njkmjlaqajutnox5ntcla
+  '@nuxtjs/eslint-config-typescript': 11.0.0_ha6vam6werchizxrnqvarmz2zu
+  '@typescript-eslint/eslint-plugin': 5.46.0_5mle7isnkfgjmrghnnczirv6iy
+  '@typescript-eslint/parser': 5.46.0_ha6vam6werchizxrnqvarmz2zu
   eslint: 8.29.0
   eslint-config-prettier: 8.5.0_eslint@8.29.0
-  eslint-plugin-prettier: 4.2.1_nrhoyyjffvfyk4vtlt5destxgm
+  eslint-plugin-prettier: 4.2.1_5dgjrgoi64tgrv3zzn3walur3u
   eslint-plugin-vue: 9.8.0_eslint@8.29.0
   isomorphic-fetch: 3.0.0
-  nuxt: 3.0.0-rc.11_s5ps7njkmjlaqajutnox5ntcla
-  prettier: 2.8.0
-  typescript: 4.9.3
+  nuxt: 3.0.0_ha6vam6werchizxrnqvarmz2zu
+  prettier: 2.8.1
+  typescript: 4.9.4
   vite-plugin-eslint: 1.8.1_eslint@8.29.0
   vitest: 0.22.1
 
@@ -347,16 +347,16 @@ packages:
       postcss-selector-parser: 6.0.11
     dev: false
 
-  /@esbuild/android-arm/0.15.16:
-    resolution: {integrity: sha512-nyB6CH++2mSgx3GbnrJsZSxzne5K0HMyNIWafDHqYy7IwxFc4fd/CgHVZXr8Eh+Q3KbIAcAe3vGyqIPhGblvMQ==}
+  /@esbuild/android-arm/0.15.18:
+    resolution: {integrity: sha512-5GT+kcs2WVGjVs7+boataCkO5Fg0y4kCjzkB5bAip7H4jfnOS3dA6KPiww9W1OEKTKeAcUVhdZGvgI65OXmUnw==}
     engines: {node: '>=12'}
     cpu: [arm]
     os: [android]
     requiresBuild: true
     optional: true
 
-  /@esbuild/linux-loong64/0.15.16:
-    resolution: {integrity: sha512-SDLfP1uoB0HZ14CdVYgagllgrG7Mdxhkt4jDJOKl/MldKrkQ6vDJMZKl2+5XsEY/Lzz37fjgLQoJBGuAw/x8kQ==}
+  /@esbuild/linux-loong64/0.15.18:
+    resolution: {integrity: sha512-L4jVKS82XVhw2nvzLg/19ClLWg0y27ulRwuP7lcyL6AbUWB5aPglXY3M21mauDQMDfRLs8cQmeT03r/+X3cZYQ==}
     engines: {node: '>=12'}
     cpu: [loong64]
     os: [linux]
@@ -475,7 +475,7 @@ packages:
       npmlog: 5.0.1
       rimraf: 3.0.2
       semver: 7.3.8
-      tar: 6.1.12
+      tar: 6.1.13
     transitivePeerDependencies:
       - encoding
       - supports-color
@@ -512,7 +512,7 @@ packages:
     engines: {node: ^14.16.0 || ^16.10.0 || ^17.0.0 || ^18.0.0 || ^19.0.0}
     dependencies:
       '@nuxt/schema': 3.0.0
-      c12: 1.0.1
+      c12: 1.1.0
       consola: 2.15.3
       defu: 6.1.1
       globby: 13.1.2
@@ -533,29 +533,30 @@ packages:
       - rollup
       - supports-color
 
-  /@nuxt/kit/3.0.0-rc.11:
-    resolution: {integrity: sha512-o0E/k635Lzcxp4K5t0ToHC6WwQ1wyN0EIqMAQEzgiUexoAhzdURr21QI0D6e6U461u4KP7x92wYM87VxhMFXmQ==}
-    engines: {node: ^14.16.0 || ^16.11.0 || ^17.0.0 || ^18.0.0}
+  /@nuxt/kit/3.0.0_rollup@2.79.1:
+    resolution: {integrity: sha512-7ZsOLt5s9a0ZleAIzmoD70JwkZf5ti6bDdxl6f8ew7Huxz+ni/oRfTPTX9TrORXsgW5CvDt6Q9M7IJNPkAN/Iw==}
+    engines: {node: ^14.16.0 || ^16.10.0 || ^17.0.0 || ^18.0.0 || ^19.0.0}
     dependencies:
-      '@nuxt/schema': 3.0.0-rc.11
-      c12: 0.2.13
+      '@nuxt/schema': 3.0.0_rollup@2.79.1
+      c12: 1.1.0
       consola: 2.15.3
       defu: 6.1.1
       globby: 13.1.2
       hash-sum: 2.0.0
       ignore: 5.2.1
       jiti: 1.16.0
-      knitwork: 0.1.3
+      knitwork: 1.0.0
       lodash.template: 4.5.0
-      mlly: 0.5.17
-      pathe: 0.3.9
-      pkg-types: 0.3.6
-      scule: 0.3.2
+      mlly: 1.0.0
+      pathe: 1.0.0
+      pkg-types: 1.0.1
+      scule: 1.0.0
       semver: 7.3.8
       unctx: 2.1.1
-      unimport: 0.6.8
-      untyped: 0.5.0
+      unimport: 1.0.1_rollup@2.79.1
+      untyped: 1.0.0
     transitivePeerDependencies:
+      - rollup
       - supports-color
 
   /@nuxt/postcss8/1.1.3:
@@ -577,7 +578,7 @@ packages:
     resolution: {integrity: sha512-5fwsidhs5NjFzR8sIzHMXO0WFGkI3tCH3ViANn2W4N5qCwoYZ0n1sZBkQ9Esn1VoEed6RsIlTpWrPZPVtqNkGQ==}
     engines: {node: ^14.16.0 || ^16.10.0 || ^17.0.0 || ^18.0.0 || ^19.0.0}
     dependencies:
-      c12: 1.0.1
+      c12: 1.1.0
       create-require: 1.1.1
       defu: 6.1.1
       jiti: 1.16.0
@@ -593,33 +594,37 @@ packages:
       - rollup
       - supports-color
 
-  /@nuxt/schema/3.0.0-rc.11:
-    resolution: {integrity: sha512-EIBYQeBxJ+JZ8RjPRGaXM9+vtWMHQ4HsqZIw5a+p6hqRLGf53fHANT4vjMQZA4fAYBnJZJI7dB/OXkfyb/kikA==}
-    engines: {node: ^14.16.0 || ^16.11.0 || ^17.0.0 || ^18.0.0}
+  /@nuxt/schema/3.0.0_rollup@2.79.1:
+    resolution: {integrity: sha512-5fwsidhs5NjFzR8sIzHMXO0WFGkI3tCH3ViANn2W4N5qCwoYZ0n1sZBkQ9Esn1VoEed6RsIlTpWrPZPVtqNkGQ==}
+    engines: {node: ^14.16.0 || ^16.10.0 || ^17.0.0 || ^18.0.0 || ^19.0.0}
     dependencies:
-      c12: 0.2.13
+      c12: 1.1.0
       create-require: 1.1.1
       defu: 6.1.1
       jiti: 1.16.0
-      pathe: 0.3.9
-      pkg-types: 0.3.6
+      pathe: 1.0.0
+      pkg-types: 1.0.1
       postcss-import-resolver: 2.0.0
-      scule: 0.3.2
+      scule: 1.0.0
       std-env: 3.3.1
-      ufo: 0.8.6
-      unimport: 0.6.8
+      ufo: 1.0.1
+      unimport: 1.0.1_rollup@2.79.1
+      untyped: 1.0.0
+    transitivePeerDependencies:
+      - rollup
+      - supports-color
 
   /@nuxt/telemetry/2.1.8:
     resolution: {integrity: sha512-WCHRrcPKRosuHQi8CD5WfjiXGAyjOWVJpK77xS6wlg8zwziBPCqmVIQdr4QpFTGFO1Nrh4z26l1VnivKy22KFQ==}
     hasBin: true
     dependencies:
       '@nuxt/kit': 3.0.0
-      chalk: 5.1.2
+      chalk: 5.2.0
       ci-info: 3.7.0
       consola: 2.15.3
       create-require: 1.1.1
       defu: 6.1.1
-      destr: 1.2.1
+      destr: 1.2.2
       dotenv: 16.0.3
       fs-extra: 10.1.0
       git-url-parse: 13.1.0
@@ -637,86 +642,88 @@ packages:
       - rollup
       - supports-color
 
-  /@nuxt/ui-templates/0.4.0:
-    resolution: {integrity: sha512-oFjUfn9r9U4vNljd5uU08+6M3mF6OSxZfCrfqJQaN5TtqVTcZmZFzOZ4H866Lq+Eaugv/Vte225kuaZCB3FR/g==}
+  /@nuxt/ui-templates/1.0.0:
+    resolution: {integrity: sha512-jfpVHxi1AHfNO3D6iD1RJE6fx/7cAzekvG90poIzVawp/L+I4DNdy8pCgqBScJW4bfWOpHeLYbtQQlL/hPmkjw==}
 
-  /@nuxt/vite-builder/3.0.0-rc.11_5akckbu4tmbn6phmzmqezegkrq:
-    resolution: {integrity: sha512-WkQ+/cfdIf5XVZea8xD+ciLXpmQkNu8d5p16WJSp10hEhj3Vt/cQ8OkXDVHGGRML+NsDL0bQXDeg3PcM/bw94w==}
-    engines: {node: ^14.16.0 || ^16.11.0 || ^17.0.0 || ^18.0.0}
+  /@nuxt/vite-builder/3.0.0_rsibta3vmmagu2awyzif4pq2lq:
+    resolution: {integrity: sha512-eMnpPpjHU8rGZcsJUksCuSX+6dpId03q8LOSStsm6rXzrNJtZIcwt0nBRTUaigckXIozX8ZNl5u2OPGUfUbMrw==}
+    engines: {node: ^14.16.0 || ^16.10.0 || ^17.0.0 || ^18.0.0 || ^19.0.0}
     peerDependencies:
-      vue: ^3.2.39
+      vue: ^3.2.45
     dependencies:
-      '@nuxt/kit': 3.0.0-rc.11
-      '@rollup/plugin-replace': 4.0.0_rollup@2.79.1
-      '@vitejs/plugin-vue': 3.2.0_vite@3.1.8+vue@3.2.45
-      '@vitejs/plugin-vue-jsx': 2.1.1_vite@3.1.8+vue@3.2.45
+      '@nuxt/kit': 3.0.0_rollup@2.79.1
+      '@rollup/plugin-replace': 5.0.1_rollup@2.79.1
+      '@vitejs/plugin-vue': 3.2.0_vite@3.2.5+vue@3.2.45
+      '@vitejs/plugin-vue-jsx': 2.1.1_vite@3.2.5+vue@3.2.45
       autoprefixer: 10.4.13_postcss@8.4.19
       chokidar: 3.5.3
       cssnano: 5.1.14_postcss@8.4.19
       defu: 6.1.1
-      esbuild: 0.15.16
+      esbuild: 0.15.18
       escape-string-regexp: 5.0.0
       estree-walker: 3.0.1
-      externality: 0.2.2
+      externality: 1.0.0
       fs-extra: 10.1.0
       get-port-please: 2.6.1
-      h3: 0.7.21
-      knitwork: 0.1.3
+      h3: 1.0.1
+      knitwork: 1.0.0
       magic-string: 0.26.7
-      mlly: 0.5.17
-      ohash: 0.1.5
-      pathe: 0.3.9
+      mlly: 1.0.0
+      ohash: 1.0.0
+      pathe: 1.0.0
       perfect-debounce: 0.1.3
-      pkg-types: 0.3.6
+      pkg-types: 1.0.1
       postcss: 8.4.19
-      postcss-import: 15.0.1_postcss@8.4.19
+      postcss-import: 15.1.0_postcss@8.4.19
       postcss-url: 10.1.3_postcss@8.4.19
       rollup: 2.79.1
       rollup-plugin-visualizer: 5.8.3_rollup@2.79.1
-      ufo: 0.8.6
-      unplugin: 0.9.6
-      vite: 3.1.8
-      vite-node: 0.23.4
-      vite-plugin-checker: 0.5.1_aa2q4hyfpz4qcuczedr7bawau4
+      ufo: 1.0.1
+      unplugin: 1.0.0
+      vite: 3.2.5
+      vite-node: 0.25.6
+      vite-plugin-checker: 0.5.1_2diyh6cvkl6stcaq3dzcmc6p6a
       vue: 3.2.45
-      vue-bundle-renderer: 0.4.4
+      vue-bundle-renderer: 1.0.0
     transitivePeerDependencies:
+      - '@types/node'
       - eslint
       - less
       - sass
       - stylus
+      - sugarss
       - supports-color
       - terser
       - typescript
       - vls
       - vti
 
-  /@nuxtjs/eslint-config-typescript/11.0.0_s5ps7njkmjlaqajutnox5ntcla:
+  /@nuxtjs/eslint-config-typescript/11.0.0_ha6vam6werchizxrnqvarmz2zu:
     resolution: {integrity: sha512-hmFjGtXT524ql8eTbK8BaRkamcXB6Z8YOW8nSQhosTP6oBw9WtOFUeWr7holyE278UhOmx+wDFG90BnyM9D+UA==}
     peerDependencies:
       eslint: ^8.23.0
     dependencies:
-      '@nuxtjs/eslint-config': 11.0.0_n542pvy4d6vz5nffbpq5koul4e
-      '@typescript-eslint/eslint-plugin': 5.45.0_yjegg5cyoezm3fzsmuszzhetym
-      '@typescript-eslint/parser': 5.45.0_s5ps7njkmjlaqajutnox5ntcla
+      '@nuxtjs/eslint-config': 11.0.0_hmezkefo75s2prddlqllgjxqc4
+      '@typescript-eslint/eslint-plugin': 5.46.0_5mle7isnkfgjmrghnnczirv6iy
+      '@typescript-eslint/parser': 5.46.0_ha6vam6werchizxrnqvarmz2zu
       eslint: 8.29.0
       eslint-import-resolver-typescript: 3.5.2_lt3hqehuojhfcbzgzqfngbtmrq
-      eslint-plugin-import: 2.26.0_n542pvy4d6vz5nffbpq5koul4e
+      eslint-plugin-import: 2.26.0_hmezkefo75s2prddlqllgjxqc4
     transitivePeerDependencies:
       - eslint-import-resolver-webpack
       - supports-color
       - typescript
     dev: true
 
-  /@nuxtjs/eslint-config/11.0.0_n542pvy4d6vz5nffbpq5koul4e:
+  /@nuxtjs/eslint-config/11.0.0_hmezkefo75s2prddlqllgjxqc4:
     resolution: {integrity: sha512-o4zFOpU8gJgwrC/gLE7c2E0XEjkv2fEixCGG1y+dZYzBPyzTorkQmfxskSF3WRXcZkpkS9uUYlRkeOSdYB7z0w==}
     peerDependencies:
       eslint: ^8.23.0
     dependencies:
       eslint: 8.29.0
-      eslint-config-standard: 17.0.0_jafpsg2texzosb7zvycotik6am
-      eslint-plugin-import: 2.26.0_n542pvy4d6vz5nffbpq5koul4e
-      eslint-plugin-n: 15.5.1_eslint@8.29.0
+      eslint-config-standard: 17.0.0_wnkmxhw54rcoqx42l6oqxte7qq
+      eslint-plugin-import: 2.26.0_hmezkefo75s2prddlqllgjxqc4
+      eslint-plugin-n: 15.6.0_eslint@8.29.0
       eslint-plugin-node: 11.1.0_eslint@8.29.0
       eslint-plugin-promise: 6.1.1_eslint@8.29.0
       eslint-plugin-unicorn: 43.0.2_eslint@8.29.0
@@ -728,16 +735,17 @@ packages:
       - supports-color
     dev: true
 
-  /@nuxtjs/tailwindcss/5.3.5:
-    resolution: {integrity: sha512-d6noacVfcN88R6Iqd5/kl7YyumE+EIsz6ky26JvidHtkTeAPxZt8XN/KFEMJ6xwSvhsUndrNW94XYPKv7l79jg==}
+  /@nuxtjs/tailwindcss/6.1.3:
+    resolution: {integrity: sha512-XgoltsFhpX5SCxgUA9cEyLuKyH9xkjlfT+npDQNhSW71/BpeNPmmtjyD+o5ShAvyiZD2AzvZ0/P/eMNDfT33fA==}
     dependencies:
       '@nuxt/kit': 3.0.0
       '@nuxt/postcss8': 1.1.3
       autoprefixer: 10.4.13_postcss@8.4.19
-      chalk: 5.1.2
+      chalk: 5.2.0
       clear-module: 4.1.2
       consola: 2.15.3
       defu: 6.1.1
+      h3: 0.8.6
       postcss: 8.4.19
       postcss-custom-properties: 12.1.11_postcss@8.4.19
       postcss-nesting: 10.2.0_postcss@8.4.19
@@ -751,11 +759,11 @@ packages:
       - webpack
     dev: false
 
-  /@pinia/nuxt/0.4.6_mgnvym7yiazkylwwogi5r767ue:
+  /@pinia/nuxt/0.4.6_prq2uz4lho2pwp6irk4cfkrxwu:
     resolution: {integrity: sha512-HjrYEfLdFpmsjhicPJgL36jVhzHWukIQPFFHGTSF84Cplu+f2nY2XHKqe9ToHzE9rLee2RjLOwAzOnXa/I/u6A==}
     dependencies:
       '@nuxt/kit': 3.0.0
-      pinia: 2.0.27_mgnvym7yiazkylwwogi5r767ue
+      pinia: 2.0.28_prq2uz4lho2pwp6irk4cfkrxwu
     transitivePeerDependencies:
       - '@vue/composition-api'
       - rollup
@@ -776,90 +784,102 @@ packages:
       tslib: 2.4.1
     dev: true
 
-  /@rollup/plugin-alias/3.1.9_rollup@2.79.1:
-    resolution: {integrity: sha512-QI5fsEvm9bDzt32k39wpOwZhVzRcL5ydcffUHMyLVaVaLeC70I8TJZ17F1z1eMoLu4E/UOcH9BWVkKpIKdrfiw==}
-    engines: {node: '>=8.0.0'}
+  /@rollup/plugin-alias/4.0.2_rollup@2.79.1:
+    resolution: {integrity: sha512-1hv7dBOZZwo3SEupxn4UA2N0EDThqSSS+wI1St1TNTBtOZvUchyIClyHcnDcjjrReTPZ47Faedrhblv4n+T5UQ==}
+    engines: {node: '>=14.0.0'}
     peerDependencies:
-      rollup: ^1.20.0||^2.0.0
+      rollup: ^1.20.0||^2.0.0||^3.0.0
+    peerDependenciesMeta:
+      rollup:
+        optional: true
     dependencies:
       rollup: 2.79.1
-      slash: 3.0.0
+      slash: 4.0.0
 
-  /@rollup/plugin-commonjs/22.0.2_rollup@2.79.1:
-    resolution: {integrity: sha512-//NdP6iIwPbMTcazYsiBMbJW7gfmpHom33u1beiIoHDEM0Q9clvtQB1T0efvMqHeKsGohiHo97BCPCkBXdscwg==}
-    engines: {node: '>= 12.0.0'}
+  /@rollup/plugin-commonjs/23.0.4_rollup@2.79.1:
+    resolution: {integrity: sha512-bOPJeTZg56D2MCm+TT4psP8e8Jmf1Jsi7pFUMl8BN5kOADNzofNHe47+84WVCt7D095xPghC235/YKuNDEhczg==}
+    engines: {node: '>=14.0.0'}
     peerDependencies:
-      rollup: ^2.68.0
+      rollup: ^2.68.0||^3.0.0
+    peerDependenciesMeta:
+      rollup:
+        optional: true
     dependencies:
-      '@rollup/pluginutils': 3.1.0_rollup@2.79.1
+      '@rollup/pluginutils': 5.0.2_rollup@2.79.1
       commondir: 1.0.1
       estree-walker: 2.0.2
-      glob: 7.2.3
+      glob: 8.0.3
       is-reference: 1.2.1
-      magic-string: 0.25.9
-      resolve: 1.22.1
+      magic-string: 0.26.7
       rollup: 2.79.1
 
-  /@rollup/plugin-inject/4.0.4_rollup@2.79.1:
-    resolution: {integrity: sha512-4pbcU4J/nS+zuHk+c+OL3WtmEQhqxlZ9uqfjQMQDOHOPld7PsCd8k5LWs8h5wjwJN7MgnAn768F2sDxEP4eNFQ==}
+  /@rollup/plugin-inject/5.0.2_rollup@2.79.1:
+    resolution: {integrity: sha512-zRthPC/sZ2OaQwPh2LvFn0A+3SyMAZR1Vqsp89mWkIuGXKswT8ty1JWj1pf7xdZvft4gHZaCuhdopuiCwjclWg==}
+    engines: {node: '>=14.0.0'}
     peerDependencies:
-      rollup: ^1.20.0 || ^2.0.0
+      rollup: ^1.20.0||^2.0.0||^3.0.0
+    peerDependenciesMeta:
+      rollup:
+        optional: true
     dependencies:
-      '@rollup/pluginutils': 3.1.0_rollup@2.79.1
+      '@rollup/pluginutils': 5.0.2_rollup@2.79.1
       estree-walker: 2.0.2
-      magic-string: 0.25.9
+      magic-string: 0.26.7
       rollup: 2.79.1
 
-  /@rollup/plugin-json/4.1.0_rollup@2.79.1:
-    resolution: {integrity: sha512-yfLbTdNS6amI/2OpmbiBoW12vngr5NW2jCJVZSBEz+H5KfUJZ2M7sDjk0U6GOOdCWFVScShte29o9NezJ53TPw==}
+  /@rollup/plugin-json/5.0.2_rollup@2.79.1:
+    resolution: {integrity: sha512-D1CoOT2wPvadWLhVcmpkDnesTzjhNIQRWLsc3fA49IFOP2Y84cFOOJ+nKGYedvXHKUsPeq07HR4hXpBBr+CHlA==}
+    engines: {node: '>=14.0.0'}
     peerDependencies:
-      rollup: ^1.20.0 || ^2.0.0
+      rollup: ^1.20.0||^2.0.0||^3.0.0
+    peerDependenciesMeta:
+      rollup:
+        optional: true
     dependencies:
-      '@rollup/pluginutils': 3.1.0_rollup@2.79.1
+      '@rollup/pluginutils': 5.0.2_rollup@2.79.1
       rollup: 2.79.1
 
-  /@rollup/plugin-node-resolve/14.1.0_rollup@2.79.1:
-    resolution: {integrity: sha512-5G2niJroNCz/1zqwXtk0t9+twOSDlG00k1Wfd7bkbbXmwg8H8dvgHdIWAun53Ps/rckfvOC7scDBjuGFg5OaWw==}
-    engines: {node: '>= 10.0.0'}
+  /@rollup/plugin-node-resolve/15.0.1_rollup@2.79.1:
+    resolution: {integrity: sha512-ReY88T7JhJjeRVbfCyNj+NXAG3IIsVMsX9b5/9jC98dRP8/yxlZdz7mHZbHk5zHr24wZZICS5AcXsFZAXYUQEg==}
+    engines: {node: '>=14.0.0'}
     peerDependencies:
-      rollup: ^2.78.0
+      rollup: ^2.78.0||^3.0.0
+    peerDependenciesMeta:
+      rollup:
+        optional: true
     dependencies:
-      '@rollup/pluginutils': 3.1.0_rollup@2.79.1
-      '@types/resolve': 1.17.1
+      '@rollup/pluginutils': 5.0.2_rollup@2.79.1
+      '@types/resolve': 1.20.2
       deepmerge: 4.2.2
       is-builtin-module: 3.2.0
       is-module: 1.0.0
       resolve: 1.22.1
       rollup: 2.79.1
 
-  /@rollup/plugin-replace/4.0.0_rollup@2.79.1:
-    resolution: {integrity: sha512-+rumQFiaNac9y64OHtkHGmdjm7us9bo1PlbgQfdihQtuNxzjpaB064HbRnewUOggLQxVCCyINfStkgmBeQpv1g==}
+  /@rollup/plugin-replace/5.0.1_rollup@2.79.1:
+    resolution: {integrity: sha512-Z3MfsJ4CK17BfGrZgvrcp/l6WXoKb0kokULO+zt/7bmcyayokDaQ2K3eDJcRLCTAlp5FPI4/gz9MHAsosz4Rag==}
+    engines: {node: '>=14.0.0'}
     peerDependencies:
-      rollup: ^1.20.0 || ^2.0.0
+      rollup: ^1.20.0||^2.0.0||^3.0.0
+    peerDependenciesMeta:
+      rollup:
+        optional: true
     dependencies:
-      '@rollup/pluginutils': 3.1.0_rollup@2.79.1
-      magic-string: 0.25.9
+      '@rollup/pluginutils': 5.0.2_rollup@2.79.1
+      magic-string: 0.26.7
       rollup: 2.79.1
 
-  /@rollup/plugin-wasm/5.2.0_rollup@2.79.1:
-    resolution: {integrity: sha512-PR3ff67ls2Kr9H04pZ24wJYPZq0YV+UHySpk7OuAJxyc7o5Q8NHFdwi4pfMtJkJkqfN1/QY/nq46SoRDoDvK2w==}
-    engines: {node: '>=10.0.0'}
+  /@rollup/plugin-wasm/6.0.1_rollup@2.79.1:
+    resolution: {integrity: sha512-a5yRknFQG/QGhb1xGkazWXgjpsv0hhWlx34irsf5adMEo55NdpzhZLg+jx49u+bzH6ekktuFg2WKA1RAF+WEDQ==}
+    engines: {node: '>=14.0.0'}
     peerDependencies:
-      rollup: ^1.20.0 || ^2.0.0
+      rollup: ^1.20.0||^2.0.0||^3.0.0
+    peerDependenciesMeta:
+      rollup:
+        optional: true
     dependencies:
       rollup: 2.79.1
 
-  /@rollup/pluginutils/3.1.0_rollup@2.79.1:
-    resolution: {integrity: sha512-GksZ6pr6TpIjHm8h9lSQ8pi8BE9VeubNT0OMJ3B5uZJ8pz73NPiqOtCog/x2/QzM1ENChPKxMDhiQuRHsqc+lg==}
-    engines: {node: '>= 8.0.0'}
-    peerDependencies:
-      rollup: ^1.20.0||^2.0.0
-    dependencies:
-      '@types/estree': 0.0.39
-      estree-walker: 1.0.1
-      picomatch: 2.3.1
-      rollup: 2.79.1
-
   /@rollup/pluginutils/4.2.1:
     resolution: {integrity: sha512-iKnFXr7NkdZAIHiIWE+BX5ULi/ucVFYWD6TbAV+rZctiRTY2PL6tsIKhoIOaoskiWAkgu+VsbXgUVDNLHf+InQ==}
     engines: {node: '>= 8.0.0'}
@@ -880,6 +900,20 @@ packages:
       estree-walker: 2.0.2
       picomatch: 2.3.1
 
+  /@rollup/pluginutils/5.0.2_rollup@2.79.1:
+    resolution: {integrity: sha512-pTd9rIsP92h+B6wWwFbW8RkZv4hiR/xKsqre4SIuAOaOEQRxi0lqLke9k2/7WegC85GgUs9pjmOjCUi3In4vwA==}
+    engines: {node: '>=14.0.0'}
+    peerDependencies:
+      rollup: ^1.20.0||^2.0.0||^3.0.0
+    peerDependenciesMeta:
+      rollup:
+        optional: true
+    dependencies:
+      '@types/estree': 1.0.0
+      estree-walker: 2.0.2
+      picomatch: 2.3.1
+      rollup: 2.79.1
+
   /@tailwindcss/aspect-ratio/0.4.2_tailwindcss@3.2.4:
     resolution: {integrity: sha512-8QPrypskfBa7QIMuKHg2TA7BqES6vhBrDLOv8Unb6FcFyd3TjKbc6lcmb9UPQHxfl24sXoJ41ux/H7qQQvfaSQ==}
     peerDependencies:
@@ -930,9 +964,6 @@ packages:
       '@types/json-schema': 7.0.11
     dev: true
 
-  /@types/estree/0.0.39:
-    resolution: {integrity: sha512-EYNwp3bU+98cpU4lAWYYL7Zz+2gryWH1qbdDTidVd6hkiR6weksdbMadyXKXNPEkQFhXM+hVO9ZygomHXp+AIw==}
-
   /@types/estree/1.0.0:
     resolution: {integrity: sha512-WulqXMDUTYAXCjZnk6JtIHPigp55cVtDgDrO2gHRwhyJto21+1zbVCtOYB2L1F9w4qCQ0rOGWBnBe0FNTiEJIQ==}
 
@@ -943,8 +974,8 @@ packages:
     resolution: {integrity: sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==}
     dev: true
 
-  /@types/node/18.11.10:
-    resolution: {integrity: sha512-juG3RWMBOqcOuXC643OAdSA525V44cVgGV6dUDuiFtss+8Fk5x1hI93Rsld43VeJVIeqlP9I7Fn9/qaVqoEAuQ==}
+  /@types/node/18.11.12:
+    resolution: {integrity: sha512-FgD3NtTAKvyMmD44T07zz2fEf+OKwutgBCEVM8GcvMGVGaDktiLNTDvPwC/LUe3PinMW+X6CuLOF2Ui1mAlSXg==}
 
   /@types/normalize-package-data/2.4.1:
     resolution: {integrity: sha512-Gj7cI7z+98M282Tqmp2K5EIsoouUEzbBJhQQzDE3jSIRk6r9gsz0oUokqIUR4u1R3dMHo0pDHM7sNOHyhulypw==}
@@ -954,10 +985,8 @@ packages:
     resolution: {integrity: sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA==}
     dev: false
 
-  /@types/resolve/1.17.1:
-    resolution: {integrity: sha512-yy7HuzQhj0dhGpD8RLXSZWEkLsV9ibvxvi6EiJ3bkqLAO1RGo0WbkWQiwpRlSFymTJRz0d3k5LM3kkx8ArDbLw==}
-    dependencies:
-      '@types/node': 18.11.10
+  /@types/resolve/1.20.2:
+    resolution: {integrity: sha512-60BCwRFOZCQhDncwQdxxeOEEkbc5dIMccYLwbxsS4TUNeVECQ/pBJ0j09mrHOl/JJvpRPGwO9SvE4nR2Nb/a4Q==}
 
   /@types/semver/7.3.13:
     resolution: {integrity: sha512-21cFJr9z3g5dW8B0CVI9g2O9beqaThGQ6ZFBqHfwhzLDKUxaqTIy3vnfah/UPkfOiF2pLq+tGz+W8RyCskuslw==}
@@ -967,8 +996,8 @@ packages:
     resolution: {integrity: sha512-oh8q2Zc32S6gd/j50GowEjKLoOVOwHP/bWVjKJInBwQqdOYMdPrf1oVlelTlyfFK3CKxL1uahMDAr+vy8T7yMQ==}
     dev: false
 
-  /@typescript-eslint/eslint-plugin/5.45.0_yjegg5cyoezm3fzsmuszzhetym:
-    resolution: {integrity: sha512-CXXHNlf0oL+Yg021cxgOdMHNTXD17rHkq7iW6RFHoybdFgQBjU3yIXhhcPpGwr1CjZlo6ET8C6tzX5juQoXeGA==}
+  /@typescript-eslint/eslint-plugin/5.46.0_5mle7isnkfgjmrghnnczirv6iy:
+    resolution: {integrity: sha512-QrZqaIOzJAjv0sfjY4EjbXUi3ZOFpKfzntx22gPGr9pmFcTjcFw/1sS1LJhEubfAGwuLjNrPV0rH+D1/XZFy7Q==}
     engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
     peerDependencies:
       '@typescript-eslint/parser': ^5.0.0
@@ -978,24 +1007,24 @@ packages:
       typescript:
         optional: true
     dependencies:
-      '@typescript-eslint/parser': 5.45.0_s5ps7njkmjlaqajutnox5ntcla
-      '@typescript-eslint/scope-manager': 5.45.0
-      '@typescript-eslint/type-utils': 5.45.0_s5ps7njkmjlaqajutnox5ntcla
-      '@typescript-eslint/utils': 5.45.0_s5ps7njkmjlaqajutnox5ntcla
+      '@typescript-eslint/parser': 5.46.0_ha6vam6werchizxrnqvarmz2zu
+      '@typescript-eslint/scope-manager': 5.46.0
+      '@typescript-eslint/type-utils': 5.46.0_ha6vam6werchizxrnqvarmz2zu
+      '@typescript-eslint/utils': 5.46.0_ha6vam6werchizxrnqvarmz2zu
       debug: 4.3.4
       eslint: 8.29.0
       ignore: 5.2.1
       natural-compare-lite: 1.4.0
       regexpp: 3.2.0
       semver: 7.3.8
-      tsutils: 3.21.0_typescript@4.9.3
-      typescript: 4.9.3
+      tsutils: 3.21.0_typescript@4.9.4
+      typescript: 4.9.4
     transitivePeerDependencies:
       - supports-color
     dev: true
 
-  /@typescript-eslint/parser/5.45.0_s5ps7njkmjlaqajutnox5ntcla:
-    resolution: {integrity: sha512-brvs/WSM4fKUmF5Ot/gEve6qYiCMjm6w4HkHPfS6ZNmxTS0m0iNN4yOChImaCkqc1hRwFGqUyanMXuGal6oyyQ==}
+  /@typescript-eslint/parser/5.46.0_ha6vam6werchizxrnqvarmz2zu:
+    resolution: {integrity: sha512-joNO6zMGUZg+C73vwrKXCd8usnsmOYmgW/w5ZW0pG0RGvqeznjtGDk61EqqTpNrFLUYBW2RSBFrxdAZMqA4OZA==}
     engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
     peerDependencies:
       eslint: ^6.0.0 || ^7.0.0 || ^8.0.0
@@ -1004,26 +1033,26 @@ packages:
       typescript:
         optional: true
     dependencies:
-      '@typescript-eslint/scope-manager': 5.45.0
-      '@typescript-eslint/types': 5.45.0
-      '@typescript-eslint/typescript-estree': 5.45.0_typescript@4.9.3
+      '@typescript-eslint/scope-manager': 5.46.0
+      '@typescript-eslint/types': 5.46.0
+      '@typescript-eslint/typescript-estree': 5.46.0_typescript@4.9.4
       debug: 4.3.4
       eslint: 8.29.0
-      typescript: 4.9.3
+      typescript: 4.9.4
     transitivePeerDependencies:
       - supports-color
     dev: true
 
-  /@typescript-eslint/scope-manager/5.45.0:
-    resolution: {integrity: sha512-noDMjr87Arp/PuVrtvN3dXiJstQR1+XlQ4R1EvzG+NMgXi8CuMCXpb8JqNtFHKceVSQ985BZhfRdowJzbv4yKw==}
+  /@typescript-eslint/scope-manager/5.46.0:
+    resolution: {integrity: sha512-7wWBq9d/GbPiIM6SqPK9tfynNxVbfpihoY5cSFMer19OYUA3l4powA2uv0AV2eAZV6KoAh6lkzxv4PoxOLh1oA==}
     engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
     dependencies:
-      '@typescript-eslint/types': 5.45.0
-      '@typescript-eslint/visitor-keys': 5.45.0
+      '@typescript-eslint/types': 5.46.0
+      '@typescript-eslint/visitor-keys': 5.46.0
     dev: true
 
-  /@typescript-eslint/type-utils/5.45.0_s5ps7njkmjlaqajutnox5ntcla:
-    resolution: {integrity: sha512-DY7BXVFSIGRGFZ574hTEyLPRiQIvI/9oGcN8t1A7f6zIs6ftbrU0nhyV26ZW//6f85avkwrLag424n+fkuoJ1Q==}
+  /@typescript-eslint/type-utils/5.46.0_ha6vam6werchizxrnqvarmz2zu:
+    resolution: {integrity: sha512-dwv4nimVIAsVS2dTA0MekkWaRnoYNXY26dKz8AN5W3cBFYwYGFQEqm/cG+TOoooKlncJS4RTbFKgcFY/pOiBCg==}
     engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
     peerDependencies:
       eslint: '*'
@@ -1032,23 +1061,23 @@ packages:
       typescript:
         optional: true
     dependencies:
-      '@typescript-eslint/typescript-estree': 5.45.0_typescript@4.9.3
-      '@typescript-eslint/utils': 5.45.0_s5ps7njkmjlaqajutnox5ntcla
+      '@typescript-eslint/typescript-estree': 5.46.0_typescript@4.9.4
+      '@typescript-eslint/utils': 5.46.0_ha6vam6werchizxrnqvarmz2zu
       debug: 4.3.4
       eslint: 8.29.0
-      tsutils: 3.21.0_typescript@4.9.3
-      typescript: 4.9.3
+      tsutils: 3.21.0_typescript@4.9.4
+      typescript: 4.9.4
     transitivePeerDependencies:
       - supports-color
     dev: true
 
-  /@typescript-eslint/types/5.45.0:
-    resolution: {integrity: sha512-QQij+u/vgskA66azc9dCmx+rev79PzX8uDHpsqSjEFtfF2gBUTRCpvYMh2gw2ghkJabNkPlSUCimsyBEQZd1DA==}
+  /@typescript-eslint/types/5.46.0:
+    resolution: {integrity: sha512-wHWgQHFB+qh6bu0IAPAJCdeCdI0wwzZnnWThlmHNY01XJ9Z97oKqKOzWYpR2I83QmshhQJl6LDM9TqMiMwJBTw==}
     engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
     dev: true
 
-  /@typescript-eslint/typescript-estree/5.45.0_typescript@4.9.3:
-    resolution: {integrity: sha512-maRhLGSzqUpFcZgXxg1qc/+H0bT36lHK4APhp0AEUVrpSwXiRAomm/JGjSG+kNUio5kAa3uekCYu/47cnGn5EQ==}
+  /@typescript-eslint/typescript-estree/5.46.0_typescript@4.9.4:
+    resolution: {integrity: sha512-kDLNn/tQP+Yp8Ro2dUpyyVV0Ksn2rmpPpB0/3MO874RNmXtypMwSeazjEN/Q6CTp8D7ExXAAekPEcCEB/vtJkw==}
     engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
     peerDependencies:
       typescript: '*'
@@ -1056,29 +1085,29 @@ packages:
       typescript:
         optional: true
     dependencies:
-      '@typescript-eslint/types': 5.45.0
-      '@typescript-eslint/visitor-keys': 5.45.0
+      '@typescript-eslint/types': 5.46.0
+      '@typescript-eslint/visitor-keys': 5.46.0
       debug: 4.3.4
       globby: 11.1.0
       is-glob: 4.0.3
       semver: 7.3.8
-      tsutils: 3.21.0_typescript@4.9.3
-      typescript: 4.9.3
+      tsutils: 3.21.0_typescript@4.9.4
+      typescript: 4.9.4
     transitivePeerDependencies:
       - supports-color
     dev: true
 
-  /@typescript-eslint/utils/5.45.0_s5ps7njkmjlaqajutnox5ntcla:
-    resolution: {integrity: sha512-OUg2JvsVI1oIee/SwiejTot2OxwU8a7UfTFMOdlhD2y+Hl6memUSL4s98bpUTo8EpVEr0lmwlU7JSu/p2QpSvA==}
+  /@typescript-eslint/utils/5.46.0_ha6vam6werchizxrnqvarmz2zu:
+    resolution: {integrity: sha512-4O+Ps1CRDw+D+R40JYh5GlKLQERXRKW5yIQoNDpmXPJ+C7kaPF9R7GWl+PxGgXjB3PQCqsaaZUpZ9dG4U6DO7g==}
     engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
     peerDependencies:
       eslint: ^6.0.0 || ^7.0.0 || ^8.0.0
     dependencies:
       '@types/json-schema': 7.0.11
       '@types/semver': 7.3.13
-      '@typescript-eslint/scope-manager': 5.45.0
-      '@typescript-eslint/types': 5.45.0
-      '@typescript-eslint/typescript-estree': 5.45.0_typescript@4.9.3
+      '@typescript-eslint/scope-manager': 5.46.0
+      '@typescript-eslint/types': 5.46.0
+      '@typescript-eslint/typescript-estree': 5.46.0_typescript@4.9.4
       eslint: 8.29.0
       eslint-scope: 5.1.1
       eslint-utils: 3.0.0_eslint@8.29.0
@@ -1088,19 +1117,46 @@ packages:
       - typescript
     dev: true
 
-  /@typescript-eslint/visitor-keys/5.45.0:
-    resolution: {integrity: sha512-jc6Eccbn2RtQPr1s7th6jJWQHBHI6GBVQkCHoJFQ5UreaKm59Vxw+ynQUPPY2u2Amquc+7tmEoC2G52ApsGNNg==}
+  /@typescript-eslint/visitor-keys/5.46.0:
+    resolution: {integrity: sha512-E13gBoIXmaNhwjipuvQg1ByqSAu/GbEpP/qzFihugJ+MomtoJtFAJG/+2DRPByf57B863m0/q7Zt16V9ohhANw==}
     engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
     dependencies:
-      '@typescript-eslint/types': 5.45.0
+      '@typescript-eslint/types': 5.46.0
       eslint-visitor-keys: 3.3.0
     dev: true
 
-  /@vercel/nft/0.22.1:
-    resolution: {integrity: sha512-lYYZIoxRurqDOSoVIdBicGnpUIpfyaS5qVjdPq+EfI285WqtZK3NK/dyCkiyBul+X2U2OEhRyeMdXPCHGJbohw==}
+  /@unhead/dom/1.0.13:
+    resolution: {integrity: sha512-ErfhK3Nwk3kpxnPEOrkruKAdS3/TrNlKs0nYtKgFJ1ywJYg+uNwRFDe82v4JdUMhnfmbgL/qcO3PTx3Dv09IEQ==}
+    dependencies:
+      '@unhead/schema': 1.0.13
+
+  /@unhead/schema/1.0.13:
+    resolution: {integrity: sha512-K8SiAEkM8G7GaF1QvsKlthLmRqGB8R9SvZXMCucZqb2VQ6bU4IFSs/4q6dKxmV0fXb5AHdKUL9+rX/4rQ6FsZg==}
+    dependencies:
+      '@zhead/schema': 1.0.7
+      hookable: 5.4.2
+
+  /@unhead/ssr/1.0.13:
+    resolution: {integrity: sha512-pach3THVx8LU54M6aQ4qZeQdcLjXVnPlpHe7pQjHGvD6iBJC5bZc8TL+CHdTRxeiq2DqMA5uyfoor7VJJTi5AQ==}
+    dependencies:
+      '@unhead/schema': 1.0.13
+
+  /@unhead/vue/1.0.13_vue@3.2.45:
+    resolution: {integrity: sha512-sGl640UQqN8HUYTKXOh6gErk/vw8byPdx1+ECqX4ec7UZYktsWgfyIReYBu09Qm3O6pIYfX8HlZbDipX+wQAOQ==}
+    peerDependencies:
+      vue: '>=2.7 || >=3'
+    dependencies:
+      '@unhead/schema': 1.0.13
+      hookable: 5.4.2
+      vue: 3.2.45
+
+  /@vercel/nft/0.22.5:
+    resolution: {integrity: sha512-mug57Wd1BL7GMj9gXMgMeKUjdqO0e4u+0QLPYMFE1rwdJ+55oPy6lp3nIBCS8gOvigT62UI4QKUL2sGqcoW4Hw==}
+    engines: {node: '>=14'}
     hasBin: true
     dependencies:
       '@mapbox/node-pre-gyp': 1.0.10
+      '@rollup/pluginutils': 4.2.1
       acorn: 8.8.1
       async-sema: 3.1.1
       bindings: 1.5.0
@@ -1110,12 +1166,11 @@ packages:
       micromatch: 4.0.5
       node-gyp-build: 4.5.0
       resolve-from: 5.0.0
-      rollup-pluginutils: 2.8.2
     transitivePeerDependencies:
       - encoding
       - supports-color
 
-  /@vitejs/plugin-vue-jsx/2.1.1_vite@3.1.8+vue@3.2.45:
+  /@vitejs/plugin-vue-jsx/2.1.1_vite@3.2.5+vue@3.2.45:
     resolution: {integrity: sha512-JgDhxstQlwnHBvZ1BSnU5mbmyQ14/t5JhREc6YH5kWyu2QdAAOsLF6xgHoIWarj8tddaiwFrNzLbWJPudpXKYA==}
     engines: {node: ^14.18.0 || >=16.0.0}
     peerDependencies:
@@ -1125,19 +1180,19 @@ packages:
       '@babel/core': 7.20.5
       '@babel/plugin-transform-typescript': 7.20.2_@babel+core@7.20.5
       '@vue/babel-plugin-jsx': 1.1.1_@babel+core@7.20.5
-      vite: 3.1.8
+      vite: 3.2.5
       vue: 3.2.45
     transitivePeerDependencies:
       - supports-color
 
-  /@vitejs/plugin-vue/3.2.0_vite@3.1.8+vue@3.2.45:
+  /@vitejs/plugin-vue/3.2.0_vite@3.2.5+vue@3.2.45:
     resolution: {integrity: sha512-E0tnaL4fr+qkdCNxJ+Xd0yM31UwMkQje76fsDVBBUCoGOUPexu2VDUYHL8P4CwV+zMvWw6nlRw19OnRKmYAJpw==}
     engines: {node: ^14.18.0 || >=16.0.0}
     peerDependencies:
       vite: ^3.0.0
       vue: ^3.2.25
     dependencies:
-      vite: 3.1.8
+      vite: 3.2.5
       vue: 3.2.45
 
   /@vue/babel-helper-vue-transform-on/1.0.2:
@@ -1247,21 +1302,22 @@ packages:
       - vue
     dev: false
 
-  /@vueuse/head/0.7.13_vue@3.2.45:
-    resolution: {integrity: sha512-VHSla/0uOAUii/mz+NWLFKMrMbz/lQoYxEcpS/ri7drepYjdB5QdbTnD2KdOX8EmQM/b07qpjKanr7erGrd3XA==}
+  /@vueuse/head/1.0.22_vue@3.2.45:
+    resolution: {integrity: sha512-YmUdbzNdCnhmrAFxGnJS+Rixj+swE+TQC9OEaYDHIro6gE7W11jugcdwVP00HrA4WRQhg+TOQ4YcY2oL/PP1hw==}
     peerDependencies:
       vue: '>=2.7 || >=3'
     dependencies:
-      '@zhead/schema-vue': 0.7.4_vue@3.2.45
+      '@unhead/dom': 1.0.13
+      '@unhead/schema': 1.0.13
+      '@unhead/ssr': 1.0.13
+      '@unhead/vue': 1.0.13_vue@3.2.45
       vue: 3.2.45
-    transitivePeerDependencies:
-      - '@vue/composition-api'
 
   /@vueuse/metadata/9.6.0:
     resolution: {integrity: sha512-sIC8R+kWkIdpi5X2z2Gk8TRYzmczDwHRhEFfCu2P+XW2JdPoXrziqsGpDDsN7ykBx4ilwieS7JUIweVGhvZ93w==}
     dev: false
 
-  /@vueuse/nuxt/9.6.0_34m4vklv7wytvv7hkkggjs6mui:
+  /@vueuse/nuxt/9.6.0_nuxt@3.0.0+vue@3.2.45:
     resolution: {integrity: sha512-r+ngzHijvrfn89KnwlFCSe/xzFCiuskwmxbDdzFtpcZ57RHqLPdUS34YYrDxt7XasM0vlD3dmLIKEH//btf7sw==}
     peerDependencies:
       nuxt: ^3.0.0-rc.9
@@ -1270,7 +1326,7 @@ packages:
       '@vueuse/core': 9.6.0_vue@3.2.45
       '@vueuse/metadata': 9.6.0
       local-pkg: 0.4.2
-      nuxt: 3.0.0-rc.11_s5ps7njkmjlaqajutnox5ntcla
+      nuxt: 3.0.0_ha6vam6werchizxrnqvarmz2zu
       vue-demi: 0.13.11_vue@3.2.45
     transitivePeerDependencies:
       - '@vue/composition-api'
@@ -1286,20 +1342,10 @@ packages:
     transitivePeerDependencies:
       - '@vue/composition-api'
       - vue
+    dev: false
 
-  /@zhead/schema-vue/0.7.4_vue@3.2.45:
-    resolution: {integrity: sha512-Q7wPUly3ZWbPYQ5SEJBUuD6Mw3wiUfPMPquGfqsR2KF6sxQKRF8oaHnRLuu2uxpjuXjkzpBlZBPu1JgQX+Lf6Q==}
-    peerDependencies:
-      vue: '>=2.7 || >=3'
-    dependencies:
-      '@vueuse/shared': 9.6.0_vue@3.2.45
-      '@zhead/schema': 0.7.4
-      vue: 3.2.45
-    transitivePeerDependencies:
-      - '@vue/composition-api'
-
-  /@zhead/schema/0.7.4:
-    resolution: {integrity: sha512-18gfgmZL8gM64JdcT11d1rYLnCaY2mOtbDDSAXtCySanPGpkA1r+w01LCssEI/OfLdoZPS/kYCIW09d1PmUFfQ==}
+  /@zhead/schema/1.0.7:
+    resolution: {integrity: sha512-jN2ipkz39YrHd8uulgw/Y7x8iOxvR/cTkin/E9zRQVP5JBIrrJMiGyFFj6JBW4Q029xJ5dKtpwy/3RZWpz+dkQ==}
 
   /abbrev/1.1.1:
     resolution: {integrity: sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==}
@@ -1463,7 +1509,7 @@ packages:
     dependencies:
       call-bind: 1.0.2
       define-properties: 1.1.4
-      es-abstract: 1.20.4
+      es-abstract: 1.20.5
       get-intrinsic: 1.1.3
       is-string: 1.0.7
     dev: true
@@ -1479,7 +1525,7 @@ packages:
     dependencies:
       call-bind: 1.0.2
       define-properties: 1.1.4
-      es-abstract: 1.20.4
+      es-abstract: 1.20.5
       es-shim-unscopables: 1.0.0
     dev: true
 
@@ -1512,7 +1558,7 @@ packages:
       postcss: ^8.1.0
     dependencies:
       browserslist: 4.21.4
-      caniuse-lite: 1.0.30001435
+      caniuse-lite: 1.0.30001439
       fraction.js: 4.2.0
       normalize-range: 0.1.2
       picocolors: 1.0.0
@@ -1577,7 +1623,7 @@ packages:
     engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7}
     hasBin: true
     dependencies:
-      caniuse-lite: 1.0.30001435
+      caniuse-lite: 1.0.30001439
       electron-to-chromium: 1.4.284
       node-releases: 2.0.6
       update-browserslist-db: 1.0.10_browserslist@4.21.4
@@ -1616,29 +1662,19 @@ packages:
     dependencies:
       streamsearch: 1.1.0
 
-  /c12/0.2.13:
-    resolution: {integrity: sha512-wJL0/knDbqM/3moLb+8Xd+w3JdkggkIIhiNBkxZ1mWlskKC/vajb85wM3UPg/D9nK6RbI1NgaVTg6AeXBVbknA==}
+  /c12/1.1.0:
+    resolution: {integrity: sha512-9KRFWEng+TH8sGST4NNdiKzZGw1Z1CHnPGAmNqAyVP7suluROmBjD8hsiR34f94DdlrvtGvvmiGDsoFXlCBWIw==}
     dependencies:
       defu: 6.1.1
       dotenv: 16.0.3
-      gittar: 0.1.1
-      jiti: 1.16.0
-      mlly: 0.5.17
-      pathe: 0.3.9
-      pkg-types: 0.3.6
-      rc9: 1.2.4
-
-  /c12/1.0.1:
-    resolution: {integrity: sha512-EN9Rqix2q9X3PseFkUvRFZ/0fvncF35ZR5nykLDwv4Ml/Q1WYPLkcdqlrczFll2G9t4qmxgM4my3EF3IrRGl5Q==}
-    dependencies:
-      defu: 6.1.1
-      dotenv: 16.0.3
-      gittar: 0.1.1
+      giget: 1.0.0
       jiti: 1.16.0
       mlly: 1.0.0
       pathe: 1.0.0
       pkg-types: 1.0.1
       rc9: 2.0.0
+    transitivePeerDependencies:
+      - supports-color
 
   /cache-content-type/1.0.1:
     resolution: {integrity: sha512-IKufZ1o4Ut42YUrZSo8+qnMTrFuKkvyoLXUywKz9GJ5BrhOFGhLdkx9sG4KAnVvbY6kEcSFjLQul+DVmBm2bgA==}
@@ -1672,12 +1708,12 @@ packages:
     resolution: {integrity: sha512-bsTwuIg/BZZK/vreVTYYbSWoe2F+71P7K5QGEX+pT250DZbfU1MQ5prOKpPR+LL6uWKK3KMwMCAS74QB3Um1uw==}
     dependencies:
       browserslist: 4.21.4
-      caniuse-lite: 1.0.30001435
+      caniuse-lite: 1.0.30001439
       lodash.memoize: 4.1.2
       lodash.uniq: 4.5.0
 
-  /caniuse-lite/1.0.30001435:
-    resolution: {integrity: sha512-kdCkUTjR+v4YAJelyiDTqiu82BDr4W4CP5sgTA0ZBmqn30XfS2ZghPLMowik9TPhS+psWJiUNxsqLyurDbmutA==}
+  /caniuse-lite/1.0.30001439:
+    resolution: {integrity: sha512-1MgUzEkoMO6gKfXflStpYgZDlFM7M/ck/bgfVCACO5vnAf0fXoNVHdWtqGU+MYca+4bL9Z5bpOVmR33cWW9G2A==}
 
   /chai/4.3.7:
     resolution: {integrity: sha512-HLnAzZ2iupm25PlN0xFreAlBA5zaBSv3og0DdeGA4Ar6h6rJ3A0rolRUKJhSF2V10GZKDgWF/VmAEsNWjCRB+A==}
@@ -1685,7 +1721,7 @@ packages:
     dependencies:
       assertion-error: 1.1.0
       check-error: 1.0.2
-      deep-eql: 4.1.2
+      deep-eql: 4.1.3
       get-func-name: 2.0.0
       loupe: 2.3.6
       pathval: 1.1.1
@@ -1707,8 +1743,8 @@ packages:
       ansi-styles: 4.3.0
       supports-color: 7.2.0
 
-  /chalk/5.1.2:
-    resolution: {integrity: sha512-E5CkT4jWURs1Vy5qGJye+XwCkNj7Od3Af7CP6SujMetSMkLs8Do2RWJK5yx1wamHV/op8Rz+9rltjaTQWDnEFQ==}
+  /chalk/5.2.0:
+    resolution: {integrity: sha512-ree3Gqw/nazQAPuJJEy+avdl7QfZMcUvmHIKgEZkGL+xOBzRvup5Hxo6LHuMceSxOabuJLJm5Yp/92R9eMmMvA==}
     engines: {node: ^12.17.0 || ^14.13 || >=16.0.0}
 
   /chardet/0.7.0:
@@ -1732,9 +1768,6 @@ packages:
     optionalDependencies:
       fsevents: 2.3.2
 
-  /chownr/1.1.4:
-    resolution: {integrity: sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==}
-
   /chownr/2.0.0:
     resolution: {integrity: sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==}
     engines: {node: '>=10'}
@@ -2069,8 +2102,8 @@ packages:
   /cuint/0.2.2:
     resolution: {integrity: sha512-d4ZVpCW31eWwCMe1YT3ur7mUDnTXbgwyzaL320DrcRT45rfjYxkt5QWLrmOJ+/UEAI2+fQgKe/fCjR8l4TpRgw==}
 
-  /daisyui/2.42.1_2lwn2upnx27dqeg6hqdu7sq75m:
-    resolution: {integrity: sha512-IVeEvP8gvOzHR47fMrOp2YocQJMRmYskhdt7OsuhKJNn+YzLRGOpVpY7AGXt/56pYeYy7h03THHXRTW5cVU9rQ==}
+  /daisyui/2.43.0_2lwn2upnx27dqeg6hqdu7sq75m:
+    resolution: {integrity: sha512-0nwDQggoRB6F6Oezmgkx+9bJG7Xz8IYxgZK6/wdKN1RQZtYOTk+9DO1bsf9lyYyJ/NSBwgPKwkNnuuQFAPJCGg==}
     peerDependencies:
       autoprefixer: ^10.0.2
       postcss: ^8.1.6
@@ -2120,8 +2153,8 @@ packages:
     dependencies:
       ms: 2.1.2
 
-  /deep-eql/4.1.2:
-    resolution: {integrity: sha512-gT18+YW4CcW/DBNTwAmqTtkJh7f9qqScu2qFVlx7kCoeY9tlBu9cUcr7+I+Z/noG8INehS3xQgLpTtd/QUTn4w==}
+  /deep-eql/4.1.3:
+    resolution: {integrity: sha512-WaEtAOpRA1MQ0eohqZjpGD8zdI0Ovsm8mmFhaDN8dvDZzyoUMcYDnf5Y6iu7HTXxf8JDS23qWa4a+hKCDyOPzw==}
     engines: {node: '>=6'}
     dependencies:
       type-detect: 4.0.8
@@ -2182,8 +2215,8 @@ packages:
     resolution: {integrity: sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==}
     engines: {node: '>= 0.8'}
 
-  /destr/1.2.1:
-    resolution: {integrity: sha512-ud8w0qMLlci6iFG7CNgeRr8OcbUWMsbfjtWft1eJ5Luqrz/M8Ebqk/KCzne8rKUlIQWWfLv0wD6QHrqOf4GshA==}
+  /destr/1.2.2:
+    resolution: {integrity: sha512-lrbCJwD9saUQrqUfXvl6qoM+QN3W7tLV5pAOs+OqOmopCCz/JkE05MHedJR1xfk4IAnZuJXPVuN5+7jNA2ZCiA==}
 
   /destroy/1.2.0:
     resolution: {integrity: sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==}
@@ -2333,8 +2366,8 @@ packages:
     dependencies:
       is-arrayish: 0.2.1
 
-  /es-abstract/1.20.4:
-    resolution: {integrity: sha512-0UtvRN79eMe2L+UNEF1BwRe364sj/DXhQ/k5FmivgoSdpM90b8Jc0mDzKMGo7QS0BVbOP/bTwBKNnDc9rNzaPA==}
+  /es-abstract/1.20.5:
+    resolution: {integrity: sha512-7h8MM2EQhsCA7pU/Nv78qOXFpD8Rhqd12gYiSJVkrH9+e8VuA8JlPJK/hQjjlLv6pJvx/z1iRFKzYb0XT/RuAQ==}
     engines: {node: '>= 0.4'}
     dependencies:
       call-bind: 1.0.2
@@ -2343,6 +2376,7 @@ packages:
       function.prototype.name: 1.1.5
       get-intrinsic: 1.1.3
       get-symbol-description: 1.0.0
+      gopd: 1.0.1
       has: 1.0.3
       has-property-descriptors: 1.0.0
       has-symbols: 1.0.3
@@ -2378,194 +2412,194 @@ packages:
       is-symbol: 1.0.4
     dev: true
 
-  /esbuild-android-64/0.15.16:
-    resolution: {integrity: sha512-Vwkv/sT0zMSgPSVO3Jlt1pUbnZuOgtOQJkJkyyJFAlLe7BiT8e9ESzo0zQSx4c3wW4T6kGChmKDPMbWTgtliQA==}
+  /esbuild-android-64/0.15.18:
+    resolution: {integrity: sha512-wnpt3OXRhcjfIDSZu9bnzT4/TNTDsOUvip0foZOUBG7QbSt//w3QV4FInVJxNhKc/ErhUxc5z4QjHtMi7/TbgA==}
     engines: {node: '>=12'}
     cpu: [x64]
     os: [android]
     requiresBuild: true
     optional: true
 
-  /esbuild-android-arm64/0.15.16:
-    resolution: {integrity: sha512-lqfKuofMExL5niNV3gnhMUYacSXfsvzTa/58sDlBET/hCOG99Zmeh+lz6kvdgvGOsImeo6J9SW21rFCogNPLxg==}
+  /esbuild-android-arm64/0.15.18:
+    resolution: {integrity: sha512-G4xu89B8FCzav9XU8EjsXacCKSG2FT7wW9J6hOc18soEHJdtWu03L3TQDGf0geNxfLTtxENKBzMSq9LlbjS8OQ==}
     engines: {node: '>=12'}
     cpu: [arm64]
     os: [android]
     requiresBuild: true
     optional: true
 
-  /esbuild-darwin-64/0.15.16:
-    resolution: {integrity: sha512-wo2VWk/n/9V2TmqUZ/KpzRjCEcr00n7yahEdmtzlrfQ3lfMCf3Wa+0sqHAbjk3C6CKkR3WKK/whkMq5Gj4Da9g==}
+  /esbuild-darwin-64/0.15.18:
+    resolution: {integrity: sha512-2WAvs95uPnVJPuYKP0Eqx+Dl/jaYseZEUUT1sjg97TJa4oBtbAKnPnl3b5M9l51/nbx7+QAEtuummJZW0sBEmg==}
     engines: {node: '>=12'}
     cpu: [x64]
     os: [darwin]
     requiresBuild: true
     optional: true
 
-  /esbuild-darwin-arm64/0.15.16:
-    resolution: {integrity: sha512-fMXaUr5ou0M4WnewBKsspMtX++C1yIa3nJ5R2LSbLCfJT3uFdcRoU/NZjoM4kOMKyOD9Sa/2vlgN8G07K3SJnw==}
+  /esbuild-darwin-arm64/0.15.18:
+    resolution: {integrity: sha512-tKPSxcTJ5OmNb1btVikATJ8NftlyNlc8BVNtyT/UAr62JFOhwHlnoPrhYWz09akBLHI9nElFVfWSTSRsrZiDUA==}
     engines: {node: '>=12'}
     cpu: [arm64]
     os: [darwin]
     requiresBuild: true
     optional: true
 
-  /esbuild-freebsd-64/0.15.16:
-    resolution: {integrity: sha512-UzIc0xlRx5x9kRuMr+E3+hlSOxa/aRqfuMfiYBXu2jJ8Mzej4lGL7+o6F5hzhLqWfWm1GWHNakIdlqg1ayaTNQ==}
+  /esbuild-freebsd-64/0.15.18:
+    resolution: {integrity: sha512-TT3uBUxkteAjR1QbsmvSsjpKjOX6UkCstr8nMr+q7zi3NuZ1oIpa8U41Y8I8dJH2fJgdC3Dj3CXO5biLQpfdZA==}
     engines: {node: '>=12'}
     cpu: [x64]
     os: [freebsd]
     requiresBuild: true
     optional: true
 
-  /esbuild-freebsd-arm64/0.15.16:
-    resolution: {integrity: sha512-8xyiYuGc0DLZphFQIiYaLHlfoP+hAN9RHbE+Ibh8EUcDNHAqbQgUrQg7pE7Bo00rXmQ5Ap6KFgcR0b4ALZls1g==}
+  /esbuild-freebsd-arm64/0.15.18:
+    resolution: {integrity: sha512-R/oVr+X3Tkh+S0+tL41wRMbdWtpWB8hEAMsOXDumSSa6qJR89U0S/PpLXrGF7Wk/JykfpWNokERUpCeHDl47wA==}
     engines: {node: '>=12'}
     cpu: [arm64]
     os: [freebsd]
     requiresBuild: true
     optional: true
 
-  /esbuild-linux-32/0.15.16:
-    resolution: {integrity: sha512-iGijUTV+0kIMyUVoynK0v+32Oi8yyp0xwMzX69GX+5+AniNy/C/AL1MjFTsozRp/3xQPl7jVux/PLe2ds10/2w==}
+  /esbuild-linux-32/0.15.18:
+    resolution: {integrity: sha512-lphF3HiCSYtaa9p1DtXndiQEeQDKPl9eN/XNoBf2amEghugNuqXNZA/ZovthNE2aa4EN43WroO0B85xVSjYkbg==}
     engines: {node: '>=12'}
     cpu: [ia32]
     os: [linux]
     requiresBuild: true
     optional: true
 
-  /esbuild-linux-64/0.15.16:
-    resolution: {integrity: sha512-tuSOjXdLw7VzaUj89fIdAaQT7zFGbKBcz4YxbWrOiXkwscYgE7HtTxUavreBbnRkGxKwr9iT/gmeJWNm4djy/g==}
+  /esbuild-linux-64/0.15.18:
+    resolution: {integrity: sha512-hNSeP97IviD7oxLKFuii5sDPJ+QHeiFTFLoLm7NZQligur8poNOWGIgpQ7Qf8Balb69hptMZzyOBIPtY09GZYw==}
     engines: {node: '>=12'}
     cpu: [x64]
     os: [linux]
     requiresBuild: true
     optional: true
 
-  /esbuild-linux-arm/0.15.16:
-    resolution: {integrity: sha512-XKcrxCEXDTOuoRj5l12tJnkvuxXBMKwEC5j0JISw3ziLf0j4zIwXbKbTmUrKFWbo6ZgvNpa7Y5dnbsjVvH39bQ==}
+  /esbuild-linux-arm/0.15.18:
+    resolution: {integrity: sha512-UH779gstRblS4aoS2qpMl3wjg7U0j+ygu3GjIeTonCcN79ZvpPee12Qun3vcdxX+37O5LFxz39XeW2I9bybMVA==}
     engines: {node: '>=12'}
     cpu: [arm]
     os: [linux]
     requiresBuild: true
     optional: true
 
-  /esbuild-linux-arm64/0.15.16:
-    resolution: {integrity: sha512-mPYksnfHnemNrvjrDhZyixL/AfbJN0Xn9S34ZOHYdh6/jJcNd8iTsv3JwJoEvTJqjMggjMhGUPJAdjnFBHoH8A==}
+  /esbuild-linux-arm64/0.15.18:
+    resolution: {integrity: sha512-54qr8kg/6ilcxd+0V3h9rjT4qmjc0CccMVWrjOEM/pEcUzt8X62HfBSeZfT2ECpM7104mk4yfQXkosY8Quptug==}
     engines: {node: '>=12'}
     cpu: [arm64]
     os: [linux]
     requiresBuild: true
     optional: true
 
-  /esbuild-linux-mips64le/0.15.16:
-    resolution: {integrity: sha512-kSJO2PXaxfm0pWY39+YX+QtpFqyyrcp0ZeI8QPTrcFVQoWEPiPVtOfTZeS3ZKedfH+Ga38c4DSzmKMQJocQv6A==}
+  /esbuild-linux-mips64le/0.15.18:
+    resolution: {integrity: sha512-Mk6Ppwzzz3YbMl/ZZL2P0q1tnYqh/trYZ1VfNP47C31yT0K8t9s7Z077QrDA/guU60tGNp2GOwCQnp+DYv7bxQ==}
     engines: {node: '>=12'}
     cpu: [mips64el]
     os: [linux]
     requiresBuild: true
     optional: true
 
-  /esbuild-linux-ppc64le/0.15.16:
-    resolution: {integrity: sha512-NimPikwkBY0yGABw6SlhKrtT35sU4O23xkhlrTT/O6lSxv3Pm5iSc6OYaqVAHWkLdVf31bF4UDVFO+D990WpAA==}
+  /esbuild-linux-ppc64le/0.15.18:
+    resolution: {integrity: sha512-b0XkN4pL9WUulPTa/VKHx2wLCgvIAbgwABGnKMY19WhKZPT+8BxhZdqz6EgkqCLld7X5qiCY2F/bfpUUlnFZ9w==}
     engines: {node: '>=12'}
     cpu: [ppc64]
     os: [linux]
     requiresBuild: true
     optional: true
 
-  /esbuild-linux-riscv64/0.15.16:
-    resolution: {integrity: sha512-ty2YUHZlwFOwp7pR+J87M4CVrXJIf5ZZtU/umpxgVJBXvWjhziSLEQxvl30SYfUPq0nzeWKBGw5i/DieiHeKfw==}
+  /esbuild-linux-riscv64/0.15.18:
+    resolution: {integrity: sha512-ba2COaoF5wL6VLZWn04k+ACZjZ6NYniMSQStodFKH/Pu6RxzQqzsmjR1t9QC89VYJxBeyVPTaHuBMCejl3O/xg==}
     engines: {node: '>=12'}
     cpu: [riscv64]
     os: [linux]
     requiresBuild: true
     optional: true
 
-  /esbuild-linux-s390x/0.15.16:
-    resolution: {integrity: sha512-VkZaGssvPDQtx4fvVdZ9czezmyWyzpQhEbSNsHZZN0BHvxRLOYAQ7sjay8nMQwYswP6O2KlZluRMNPYefFRs+w==}
+  /esbuild-linux-s390x/0.15.18:
+    resolution: {integrity: sha512-VbpGuXEl5FCs1wDVp93O8UIzl3ZrglgnSQ+Hu79g7hZu6te6/YHgVJxCM2SqfIila0J3k0csfnf8VD2W7u2kzQ==}
     engines: {node: '>=12'}
     cpu: [s390x]
     os: [linux]
     requiresBuild: true
     optional: true
 
-  /esbuild-netbsd-64/0.15.16:
-    resolution: {integrity: sha512-ElQ9rhdY51et6MJTWrCPbqOd/YuPowD7Cxx3ee8wlmXQQVW7UvQI6nSprJ9uVFQISqSF5e5EWpwWqXZsECLvXg==}
+  /esbuild-netbsd-64/0.15.18:
+    resolution: {integrity: sha512-98ukeCdvdX7wr1vUYQzKo4kQ0N2p27H7I11maINv73fVEXt2kyh4K4m9f35U1K43Xc2QGXlzAw0K9yoU7JUjOg==}
     engines: {node: '>=12'}
     cpu: [x64]
     os: [netbsd]
     requiresBuild: true
     optional: true
 
-  /esbuild-openbsd-64/0.15.16:
-    resolution: {integrity: sha512-KgxMHyxMCT+NdLQE1zVJEsLSt2QQBAvJfmUGDmgEq8Fvjrf6vSKB00dVHUEDKcJwMID6CdgCpvYNt999tIYhqA==}
+  /esbuild-openbsd-64/0.15.18:
+    resolution: {integrity: sha512-yK5NCcH31Uae076AyQAXeJzt/vxIo9+omZRKj1pauhk3ITuADzuOx5N2fdHrAKPxN+zH3w96uFKlY7yIn490xQ==}
     engines: {node: '>=12'}
     cpu: [x64]
     os: [openbsd]
     requiresBuild: true
     optional: true
 
-  /esbuild-sunos-64/0.15.16:
-    resolution: {integrity: sha512-exSAx8Phj7QylXHlMfIyEfNrmqnLxFqLxdQF6MBHPdHAjT7fsKaX6XIJn+aQEFiOcE4X8e7VvdMCJ+WDZxjSRQ==}
+  /esbuild-sunos-64/0.15.18:
+    resolution: {integrity: sha512-On22LLFlBeLNj/YF3FT+cXcyKPEI263nflYlAhz5crxtp3yRG1Ugfr7ITyxmCmjm4vbN/dGrb/B7w7U8yJR9yw==}
     engines: {node: '>=12'}
     cpu: [x64]
     os: [sunos]
     requiresBuild: true
     optional: true
 
-  /esbuild-windows-32/0.15.16:
-    resolution: {integrity: sha512-zQgWpY5pUCSTOwqKQ6/vOCJfRssTvxFuEkpB4f2VUGPBpdddZfdj8hbZuFRdZRPIVHvN7juGcpgCA/XCF37mAQ==}
+  /esbuild-windows-32/0.15.18:
+    resolution: {integrity: sha512-o+eyLu2MjVny/nt+E0uPnBxYuJHBvho8vWsC2lV61A7wwTWC3jkN2w36jtA+yv1UgYkHRihPuQsL23hsCYGcOQ==}
     engines: {node: '>=12'}
     cpu: [ia32]
     os: [win32]
     requiresBuild: true
     optional: true
 
-  /esbuild-windows-64/0.15.16:
-    resolution: {integrity: sha512-HjW1hHRLSncnM3MBCP7iquatHVJq9l0S2xxsHHj4yzf4nm9TU4Z7k4NkeMlD/dHQ4jPlQQhwcMvwbJiOefSuZw==}
+  /esbuild-windows-64/0.15.18:
+    resolution: {integrity: sha512-qinug1iTTaIIrCorAUjR0fcBk24fjzEedFYhhispP8Oc7SFvs+XeW3YpAKiKp8dRpizl4YYAhxMjlftAMJiaUw==}
     engines: {node: '>=12'}
     cpu: [x64]
     os: [win32]
     requiresBuild: true
     optional: true
 
-  /esbuild-windows-arm64/0.15.16:
-    resolution: {integrity: sha512-oCcUKrJaMn04Vxy9Ekd8x23O8LoU01+4NOkQ2iBToKgnGj5eo1vU9i27NQZ9qC8NFZgnQQZg5oZWAejmbsppNA==}
+  /esbuild-windows-arm64/0.15.18:
+    resolution: {integrity: sha512-q9bsYzegpZcLziq0zgUi5KqGVtfhjxGbnksaBFYmWLxeV/S1fK4OLdq2DFYnXcLMjlZw2L0jLsk1eGoB522WXQ==}
     engines: {node: '>=12'}
     cpu: [arm64]
     os: [win32]
     requiresBuild: true
     optional: true
 
-  /esbuild/0.15.16:
-    resolution: {integrity: sha512-o6iS9zxdHrrojjlj6pNGC2NAg86ECZqIETswTM5KmJitq+R1YmahhWtMumeQp9lHqJaROGnsBi2RLawGnfo5ZQ==}
+  /esbuild/0.15.18:
+    resolution: {integrity: sha512-x/R72SmW3sSFRm5zrrIjAhCeQSAWoni3CmHEqfQrZIQTM3lVCdehdwuIqaOtfC2slvpdlLa62GYoN8SxT23m6Q==}
     engines: {node: '>=12'}
     hasBin: true
     requiresBuild: true
     optionalDependencies:
-      '@esbuild/android-arm': 0.15.16
-      '@esbuild/linux-loong64': 0.15.16
-      esbuild-android-64: 0.15.16
-      esbuild-android-arm64: 0.15.16
-      esbuild-darwin-64: 0.15.16
-      esbuild-darwin-arm64: 0.15.16
-      esbuild-freebsd-64: 0.15.16
-      esbuild-freebsd-arm64: 0.15.16
-      esbuild-linux-32: 0.15.16
-      esbuild-linux-64: 0.15.16
-      esbuild-linux-arm: 0.15.16
-      esbuild-linux-arm64: 0.15.16
-      esbuild-linux-mips64le: 0.15.16
-      esbuild-linux-ppc64le: 0.15.16
-      esbuild-linux-riscv64: 0.15.16
-      esbuild-linux-s390x: 0.15.16
-      esbuild-netbsd-64: 0.15.16
-      esbuild-openbsd-64: 0.15.16
-      esbuild-sunos-64: 0.15.16
-      esbuild-windows-32: 0.15.16
-      esbuild-windows-64: 0.15.16
-      esbuild-windows-arm64: 0.15.16
+      '@esbuild/android-arm': 0.15.18
+      '@esbuild/linux-loong64': 0.15.18
+      esbuild-android-64: 0.15.18
+      esbuild-android-arm64: 0.15.18
+      esbuild-darwin-64: 0.15.18
+      esbuild-darwin-arm64: 0.15.18
+      esbuild-freebsd-64: 0.15.18
+      esbuild-freebsd-arm64: 0.15.18
+      esbuild-linux-32: 0.15.18
+      esbuild-linux-64: 0.15.18
+      esbuild-linux-arm: 0.15.18
+      esbuild-linux-arm64: 0.15.18
+      esbuild-linux-mips64le: 0.15.18
+      esbuild-linux-ppc64le: 0.15.18
+      esbuild-linux-riscv64: 0.15.18
+      esbuild-linux-s390x: 0.15.18
+      esbuild-netbsd-64: 0.15.18
+      esbuild-openbsd-64: 0.15.18
+      esbuild-sunos-64: 0.15.18
+      esbuild-windows-32: 0.15.18
+      esbuild-windows-64: 0.15.18
+      esbuild-windows-arm64: 0.15.18
 
   /escalade/3.1.1:
     resolution: {integrity: sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==}
@@ -2595,7 +2629,7 @@ packages:
       eslint: 8.29.0
     dev: true
 
-  /eslint-config-standard/17.0.0_jafpsg2texzosb7zvycotik6am:
+  /eslint-config-standard/17.0.0_wnkmxhw54rcoqx42l6oqxte7qq:
     resolution: {integrity: sha512-/2ks1GKyqSOkH7JFvXJicu0iMpoojkwB+f5Du/1SC0PtBL+s8v30k9njRZ21pm2drKYm2342jFnGWzttxPmZVg==}
     peerDependencies:
       eslint: ^8.0.1
@@ -2604,8 +2638,8 @@ packages:
       eslint-plugin-promise: ^6.0.0
     dependencies:
       eslint: 8.29.0
-      eslint-plugin-import: 2.26.0_n542pvy4d6vz5nffbpq5koul4e
-      eslint-plugin-n: 15.5.1_eslint@8.29.0
+      eslint-plugin-import: 2.26.0_hmezkefo75s2prddlqllgjxqc4
+      eslint-plugin-n: 15.6.0_eslint@8.29.0
       eslint-plugin-promise: 6.1.1_eslint@8.29.0
     dev: true
 
@@ -2628,7 +2662,7 @@ packages:
       debug: 4.3.4
       enhanced-resolve: 5.12.0
       eslint: 8.29.0
-      eslint-plugin-import: 2.26.0_n542pvy4d6vz5nffbpq5koul4e
+      eslint-plugin-import: 2.26.0_hmezkefo75s2prddlqllgjxqc4
       get-tsconfig: 4.2.0
       globby: 13.1.2
       is-core-module: 2.11.0
@@ -2638,7 +2672,7 @@ packages:
       - supports-color
     dev: true
 
-  /eslint-module-utils/2.7.4_4igdoaky4qu5ssujrvs4x5gk6q:
+  /eslint-module-utils/2.7.4_rnhsyrmqgagohklwa74m5i2wxm:
     resolution: {integrity: sha512-j4GT+rqzCoRKHwURX7pddtIPGySnX9Si/cgMI5ztrcqOPtk5dDEeZ34CQVPphnqkJytlc97Vuk05Um2mJ3gEQA==}
     engines: {node: '>=4'}
     peerDependencies:
@@ -2659,7 +2693,7 @@ packages:
       eslint-import-resolver-webpack:
         optional: true
     dependencies:
-      '@typescript-eslint/parser': 5.45.0_s5ps7njkmjlaqajutnox5ntcla
+      '@typescript-eslint/parser': 5.46.0_ha6vam6werchizxrnqvarmz2zu
       debug: 3.2.7
       eslint: 8.29.0
       eslint-import-resolver-node: 0.3.6
@@ -2690,7 +2724,7 @@ packages:
       regexpp: 3.2.0
     dev: true
 
-  /eslint-plugin-import/2.26.0_n542pvy4d6vz5nffbpq5koul4e:
+  /eslint-plugin-import/2.26.0_hmezkefo75s2prddlqllgjxqc4:
     resolution: {integrity: sha512-hYfi3FXaM8WPLf4S1cikh/r4IxnO6zrhZbEGz2b660EJRbuxgpDS5gkCuYgGWg2xxh2rBuIr4Pvhve/7c31koA==}
     engines: {node: '>=4'}
     peerDependencies:
@@ -2700,14 +2734,14 @@ packages:
       '@typescript-eslint/parser':
         optional: true
     dependencies:
-      '@typescript-eslint/parser': 5.45.0_s5ps7njkmjlaqajutnox5ntcla
+      '@typescript-eslint/parser': 5.46.0_ha6vam6werchizxrnqvarmz2zu
       array-includes: 3.1.6
       array.prototype.flat: 1.3.1
       debug: 2.6.9
       doctrine: 2.1.0
       eslint: 8.29.0
       eslint-import-resolver-node: 0.3.6
-      eslint-module-utils: 2.7.4_4igdoaky4qu5ssujrvs4x5gk6q
+      eslint-module-utils: 2.7.4_rnhsyrmqgagohklwa74m5i2wxm
       has: 1.0.3
       is-core-module: 2.11.0
       is-glob: 4.0.3
@@ -2721,8 +2755,8 @@ packages:
       - supports-color
     dev: true
 
-  /eslint-plugin-n/15.5.1_eslint@8.29.0:
-    resolution: {integrity: sha512-kAd+xhZm7brHoFLzKLB7/FGRFJNg/srmv67mqb7tto22rpr4wv/LV6RuXzAfv3jbab7+k1wi42PsIhGviywaaw==}
+  /eslint-plugin-n/15.6.0_eslint@8.29.0:
+    resolution: {integrity: sha512-Hd/F7wz4Mj44Jp0H6Jtty13NcE69GNTY0rVlgTIj1XBnGGVI6UTdDrpE6vqu3AHo07bygq/N+7OH/lgz1emUJw==}
     engines: {node: '>=12.22.0'}
     peerDependencies:
       eslint: '>=7.0.0'
@@ -2753,7 +2787,7 @@ packages:
       semver: 6.3.0
     dev: true
 
-  /eslint-plugin-prettier/4.2.1_nrhoyyjffvfyk4vtlt5destxgm:
+  /eslint-plugin-prettier/4.2.1_5dgjrgoi64tgrv3zzn3walur3u:
     resolution: {integrity: sha512-f/0rXLXUt0oFYs8ra4w49wYZBG5GKZpAYsJSm6rnYL5uVDjd+zowwMwVZHnAjf4edNrKpCDYfXDgmRE/Ak7QyQ==}
     engines: {node: '>=12.0.0'}
     peerDependencies:
@@ -2766,7 +2800,7 @@ packages:
     dependencies:
       eslint: 8.29.0
       eslint-config-prettier: 8.5.0_eslint@8.29.0
-      prettier: 2.8.0
+      prettier: 2.8.1
       prettier-linter-helpers: 1.0.0
     dev: true
 
@@ -2940,12 +2974,6 @@ packages:
     resolution: {integrity: sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==}
     engines: {node: '>=4.0'}
 
-  /estree-walker/0.6.1:
-    resolution: {integrity: sha512-SqmZANLWS0mnatqbSfRP5g8OXZC12Fgg1IwNtLsyHDzJizORW4khDfjPqJZsemPWBB2uqykUah5YpQ6epsqC/w==}
-
-  /estree-walker/1.0.1:
-    resolution: {integrity: sha512-1fMXF3YP4pZZVozF8j/ZLfvnR8NSIljt56UhbZ5PeeDmmGHpgpdwQt7ITlGvYaQukCvuBRMLEiKiYC+oeIg4cg==}
-
   /estree-walker/2.0.2:
     resolution: {integrity: sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==}
 
@@ -2985,13 +3013,13 @@ packages:
       iconv-lite: 0.4.24
       tmp: 0.0.33
 
-  /externality/0.2.2:
-    resolution: {integrity: sha512-seYffJRrRVI3qrCC0asf2mWAvQ/U0jZA+eECylqIxCDHzBs/W+ZeEv3D0bsjNeEewIYZKfELyY96mRactx8C4w==}
+  /externality/1.0.0:
+    resolution: {integrity: sha512-MAU9ci3XdpqOX1aoIoyL2DMzW97P8LYeJxIUkfXhOfsrkH4KLHFaYDwKN0B2l6tqedVJWiTIJtWmxmZfa05vOQ==}
     dependencies:
       enhanced-resolve: 5.12.0
-      mlly: 0.5.17
-      pathe: 0.3.9
-      ufo: 0.8.6
+      mlly: 1.0.0
+      pathe: 1.0.0
+      ufo: 1.0.1
 
   /fast-deep-equal/3.1.3:
     resolution: {integrity: sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==}
@@ -3129,11 +3157,6 @@ packages:
   /fs-memo/1.2.0:
     resolution: {integrity: sha512-YEexkCpL4j03jn5SxaMHqcO6IuWuqm8JFUYhyCep7Ao89JIYmB8xoKhK7zXXJ9cCaNXpyNH5L3QtAmoxjoHW2w==}
 
-  /fs-minipass/1.2.7:
-    resolution: {integrity: sha512-GWSSJGFy4e9GUeCcbIkED+bgAoFyj7XF1mV8rma3QW4NIqX9Kyx79N/PF61H5udOV3aY1IaMLs6pGbH71nlCTA==}
-    dependencies:
-      minipass: 2.9.0
-
   /fs-minipass/2.1.0:
     resolution: {integrity: sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==}
     engines: {node: '>= 8'}
@@ -3159,7 +3182,7 @@ packages:
     dependencies:
       call-bind: 1.0.2
       define-properties: 1.1.4
-      es-abstract: 1.20.4
+      es-abstract: 1.20.5
       functions-have-names: 1.2.3
     dev: true
 
@@ -3222,6 +3245,20 @@ packages:
     resolution: {integrity: sha512-X8u8fREiYOE6S8hLbq99PeykTDoLVnxvF4DjWKJmz9xy2nNRdUcV8ZN9tniJFeKyTU3qnC9lL8n4Chd6LmVKHg==}
     dev: true
 
+  /giget/1.0.0:
+    resolution: {integrity: sha512-KWELZn3Nxq5+0So485poHrFriK9Bn3V/x9y+wgqrHkbmnGbjfLmZ685/SVA/ovW+ewoqW0gVI47pI4yW/VNobQ==}
+    hasBin: true
+    dependencies:
+      colorette: 2.0.19
+      defu: 6.1.1
+      https-proxy-agent: 5.0.1
+      mri: 1.2.0
+      node-fetch-native: 1.0.1
+      pathe: 1.0.0
+      tar: 6.1.13
+    transitivePeerDependencies:
+      - supports-color
+
   /git-config-path/2.0.0:
     resolution: {integrity: sha512-qc8h1KIQbJpp+241id3GuAtkdyJ+IK+LIVtkiFTRKRrmddDzs3SI9CvP1QYmWBFvm1I/PWRwj//of8bgAc0ltA==}
     engines: {node: '>=4'}
@@ -3237,13 +3274,6 @@ packages:
     dependencies:
       git-up: 7.0.0
 
-  /gittar/0.1.1:
-    resolution: {integrity: sha512-p+XuqWJpW9ahUuNTptqeFjudFq31o6Jd+maMBarkMAR5U3K9c7zJB4sQ4BV8mIqrTOV29TtqikDhnZfCD4XNfQ==}
-    engines: {node: '>=4'}
-    dependencies:
-      mkdirp: 0.5.6
-      tar: 4.4.19
-
   /glob-parent/5.1.2:
     resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==}
     engines: {node: '>= 6'}
@@ -3266,6 +3296,16 @@ packages:
       once: 1.4.0
       path-is-absolute: 1.0.1
 
+  /glob/8.0.3:
+    resolution: {integrity: sha512-ull455NHSHI/Y1FqGaaYFaLGkNMMJbavMrEGFXG/PGrg6y7sutWHUHrz6gy6WEBH6akM1M414dWKCNs+IhKdiQ==}
+    engines: {node: '>=12'}
+    dependencies:
+      fs.realpath: 1.0.0
+      inflight: 1.0.6
+      inherits: 2.0.4
+      minimatch: 5.1.1
+      once: 1.4.0
+
   /globals/11.12.0:
     resolution: {integrity: sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==}
     engines: {node: '>=4'}
@@ -3306,6 +3346,12 @@ packages:
     resolution: {integrity: sha512-uHJgbwAMwNFf5mLst7IWLNg14x1CkeqglJb/K3doi4dw6q2IvAAmM/Y81kevy83wP+Sst+nutFTYOGg3d1lsxg==}
     dev: true
 
+  /gopd/1.0.1:
+    resolution: {integrity: sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==}
+    dependencies:
+      get-intrinsic: 1.1.3
+    dev: true
+
   /graceful-fs/4.2.10:
     resolution: {integrity: sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==}
 
@@ -3318,13 +3364,22 @@ packages:
     dependencies:
       duplexer: 0.1.2
 
-  /h3/0.7.21:
-    resolution: {integrity: sha512-F/qdr3JKh8zBLiZyiprH5kuzG6vjoTK3nFnIYFUIQPLsw755GI5JezAFc3HJxbgYlzawcGeJlmsw4xu2t/0n/Q==}
+  /h3/0.8.6:
+    resolution: {integrity: sha512-CSWNOKa3QGo67rFU2PhbFTp0uPJtilNji2Z0pMiSRQt3+OkIW0u3E1WMJqIycLqaTgb9JyFqH/S4mcTyyGtvyQ==}
     dependencies:
       cookie-es: 0.5.0
-      destr: 1.2.1
-      radix3: 0.1.2
+      destr: 1.2.2
+      radix3: 0.2.1
       ufo: 0.8.6
+    dev: false
+
+  /h3/1.0.1:
+    resolution: {integrity: sha512-gDCGpRvjchZW2JBlTqbJ9IOs+mdkXXuwSQkSye+jubHAv/UhdamKqoQvd4RFgyBNjHSId8Y+b10UdTcPlP/V+w==}
+    dependencies:
+      cookie-es: 0.5.0
+      destr: 1.2.2
+      radix3: 1.0.0
+      ufo: 1.0.1
 
   /has-bigints/1.0.2:
     resolution: {integrity: sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ==}
@@ -3502,7 +3557,7 @@ packages:
     engines: {node: '>=12.0.0'}
     dependencies:
       ansi-escapes: 6.0.0
-      chalk: 5.1.2
+      chalk: 5.2.0
       cli-cursor: 4.0.0
       cli-width: 4.0.0
       external-editor: 3.1.0
@@ -3511,7 +3566,7 @@ packages:
       mute-stream: 0.0.8
       ora: 6.1.2
       run-async: 2.4.1
-      rxjs: 7.5.7
+      rxjs: 7.6.0
       string-width: 5.1.2
       strip-ansi: 7.0.1
       through: 2.3.8
@@ -3738,7 +3793,7 @@ packages:
     resolution: {integrity: sha512-KWYVV1c4i+jbMpaBC+U++4Va0cp8OisU185o73T1vo99hqi7w8tSJfUXYswwqqrjzwxa6KpRK54WhPvwf5w6PQ==}
     engines: {node: '>= 10.13.0'}
     dependencies:
-      '@types/node': 18.11.10
+      '@types/node': 18.11.12
       merge-stream: 2.0.0
       supports-color: 7.2.0
 
@@ -3805,9 +3860,6 @@ packages:
     resolution: {integrity: sha512-pJiBpiXMbt7dkzXe8Ghj/u4FfXOOa98fPW+bihOJ4SjnoijweJrNThJfd3ifXpXhREjpoF2mZVH1GfS9LV3kHQ==}
     engines: {node: '>= 8'}
 
-  /knitwork/0.1.3:
-    resolution: {integrity: sha512-f6Mz4kK8k0BAlGZn9Eb7mCUwSyRLoKTLr//u75tyLKm0jgt0ydnI8ubcTPwZjSJredpBZV7ry1EOrNbMJYT0mA==}
-
   /knitwork/1.0.0:
     resolution: {integrity: sha512-dWl0Dbjm6Xm+kDxhPQJsCBTxrJzuGl0aP9rhr+TG8D3l+GL90N8O8lYUi7dTSAN2uuDqCtNgb6aEuQH5wsiV8Q==}
 
@@ -3844,8 +3896,8 @@ packages:
       - supports-color
     dev: false
 
-  /koa/2.13.4:
-    resolution: {integrity: sha512-43zkIKubNbnrULWlHdN5h1g3SEKXOEzoAlRsHOTFpnlDu8JlAOZSMJBLULusuXRequboiwJcj5vtYXKB3k7+2g==}
+  /koa/2.14.1:
+    resolution: {integrity: sha512-USJFyZgi2l0wDgqkfD27gL4YGno7TfUkcmOe6UOLFOVuN+J7FwnNu4Dydl4CUQzraM1lBAiGed0M9OVJoT0Kqw==}
     engines: {node: ^4.8.4 || ^6.10.1 || ^7.10.1 || >= 8.1.4}
     dependencies:
       accepts: 1.3.8
@@ -3901,19 +3953,8 @@ packages:
       uc.micro: 1.0.6
     dev: false
 
-  /listhen/0.2.15:
-    resolution: {integrity: sha512-F/IWj/aJLeokHAIVY+l3JoWRUnbRaf2F0cr+Ybc1YyozMA/yP0C2nf3c0Oi7vAbFvtfiwfWWfP7bIrQc/u5L1A==}
-    dependencies:
-      clipboardy: 3.0.0
-      colorette: 2.0.19
-      defu: 6.1.1
-      get-port-please: 2.6.1
-      http-shutdown: 1.2.2
-      selfsigned: 2.1.1
-      ufo: 0.8.6
-
-  /listhen/0.3.5:
-    resolution: {integrity: sha512-suyt79hNmCFeBIyftcLqLPfYiXeB795gSUWOJT7nspl2IvREY0Q9xvchLhekxvQ0KiOPvWoyALnc9Mxoelm0Pw==}
+  /listhen/1.0.1:
+    resolution: {integrity: sha512-RBzBGHMCc5wP8J5Vf8WgF4CAJH8dWHi9LaKB7vfzZt54CiH/0dp01rudy2hFD9wCrTM+UfxFVnn5wTIiY+Qhiw==}
     dependencies:
       clipboardy: 3.0.0
       colorette: 2.0.19
@@ -3922,7 +3963,7 @@ packages:
       http-shutdown: 1.2.2
       ip-regex: 5.0.0
       node-forge: 1.3.1
-      ufo: 0.8.6
+      ufo: 1.0.1
 
   /loader-utils/2.0.4:
     resolution: {integrity: sha512-xXqpXoINfFhgua9xiqD8fPFHgkoq1mmmpE92WlDbm9rNRd/EbRb+Gqf908T2DMfuHjjJlksiK2RbHVOdD/MqSw==}
@@ -4008,7 +4049,7 @@ packages:
     resolution: {integrity: sha512-l0x2DvrW294C9uDCoQe1VSU4gf529FkSZ6leBl4TiqZH/e+0R7hSfHQBNut2mNygDgHwvYHfFLn6Oxb3VWj2rA==}
     engines: {node: '>=12'}
     dependencies:
-      chalk: 5.1.2
+      chalk: 5.2.0
       is-unicode-supported: 1.3.0
 
   /loupe/2.3.6:
@@ -4149,22 +4190,17 @@ packages:
   /minimist/1.2.7:
     resolution: {integrity: sha512-bzfL1YUZsP41gmu/qjrEk0Q6i2ix/cVeAhbCbqH9u3zYutS1cLg00qhrD0M2MVdCcx4Sc0UpP2eBWo9rotpq6g==}
 
-  /minipass/2.9.0:
-    resolution: {integrity: sha512-wxfUjg9WebH+CUDX/CdbRlh5SmfZiy/hpkxaRI16Y9W56Pa75sWgd/rvFilSgrauD9NyFymP/+JFV3KwzIsJeg==}
-    dependencies:
-      safe-buffer: 5.2.1
-      yallist: 3.1.1
-
   /minipass/3.3.6:
     resolution: {integrity: sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==}
     engines: {node: '>=8'}
     dependencies:
       yallist: 4.0.0
 
-  /minizlib/1.3.3:
-    resolution: {integrity: sha512-6ZYMOEnmVsdCeTJVE0W9ZD+pVnE8h9Hma/iOwwRDsdQoePpoX56/8B6z3P9VNwppJuBKNRuFDRNRqRWexT9G9Q==}
+  /minipass/4.0.0:
+    resolution: {integrity: sha512-g2Uuh2jEKoht+zvO6vJqXmYpflPqzRBT+Th2h01DKh5z7wbY/AZ2gCQ78cP70YoHPyFdY30YBV5WxgLOEwOykw==}
+    engines: {node: '>=8'}
     dependencies:
-      minipass: 2.9.0
+      yallist: 4.0.0
 
   /minizlib/2.1.2:
     resolution: {integrity: sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==}
@@ -4173,25 +4209,22 @@ packages:
       minipass: 3.3.6
       yallist: 4.0.0
 
+  /mkdir/0.0.2:
+    resolution: {integrity: sha512-98OnjcWaNEIRUJJe9rFoWlbkQ5n9z8F86wIPCrI961YEViiVybTuJln919WuuSHSnlrqXy0ELKCntoPy8C7lqg==}
+    engines: {node: '>=0.4.0'}
+
   /mkdirp/0.5.6:
     resolution: {integrity: sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==}
     hasBin: true
     dependencies:
       minimist: 1.2.7
+    dev: false
 
   /mkdirp/1.0.4:
     resolution: {integrity: sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==}
     engines: {node: '>=10'}
     hasBin: true
 
-  /mlly/0.5.17:
-    resolution: {integrity: sha512-Rn+ai4G+CQXptDFSRNnChEgNr+xAEauYhwRvpPl/UHStTlgkIftplgJRsA2OXPuoUn86K4XAjB26+x5CEvVb6A==}
-    dependencies:
-      acorn: 8.8.1
-      pathe: 1.0.0
-      pkg-types: 1.0.1
-      ufo: 1.0.1
-
   /mlly/1.0.0:
     resolution: {integrity: sha512-QL108Hwt+u9bXdWgOI0dhzZfACovn5Aen4Xvc8Jasd9ouRH4NjnrXEiyP3nVvJo91zPlYjVRckta0Nt2zfoR6g==}
     dependencies:
@@ -4238,69 +4271,69 @@ packages:
     engines: {node: '>= 0.6'}
     dev: false
 
-  /nitropack/0.5.4:
-    resolution: {integrity: sha512-e7hNguDQLDTV5271U1PgWFC/B3HscZ6W8DG9bHfuwmiWLXRrdsMvw27yJdf6MGbqQ+p6o22ligpfIL1M54rSTg==}
-    engines: {node: ^14.16.0 || ^16.11.0 || ^17.0.0 || ^18.0.0}
+  /nitropack/1.0.0:
+    resolution: {integrity: sha512-788lHgNgC+NKqecwFgMkAQTuTXwuh2hEgOk2sLwV3qPVUogxrl6P3m5eKdt6Mtzx+mlXIw0G/P90B5TNWEqDSQ==}
+    engines: {node: ^14.16.0 || ^16.11.0 || ^17.0.0 || ^18.0.0 || ^19.0.0}
     hasBin: true
     dependencies:
       '@cloudflare/kv-asset-handler': 0.2.0
       '@netlify/functions': 1.3.0
-      '@rollup/plugin-alias': 3.1.9_rollup@2.79.1
-      '@rollup/plugin-commonjs': 22.0.2_rollup@2.79.1
-      '@rollup/plugin-inject': 4.0.4_rollup@2.79.1
-      '@rollup/plugin-json': 4.1.0_rollup@2.79.1
-      '@rollup/plugin-node-resolve': 14.1.0_rollup@2.79.1
-      '@rollup/plugin-replace': 4.0.0_rollup@2.79.1
-      '@rollup/plugin-wasm': 5.2.0_rollup@2.79.1
-      '@rollup/pluginutils': 4.2.1
-      '@vercel/nft': 0.22.1
+      '@rollup/plugin-alias': 4.0.2_rollup@2.79.1
+      '@rollup/plugin-commonjs': 23.0.4_rollup@2.79.1
+      '@rollup/plugin-inject': 5.0.2_rollup@2.79.1
+      '@rollup/plugin-json': 5.0.2_rollup@2.79.1
+      '@rollup/plugin-node-resolve': 15.0.1_rollup@2.79.1
+      '@rollup/plugin-replace': 5.0.1_rollup@2.79.1
+      '@rollup/plugin-wasm': 6.0.1_rollup@2.79.1
+      '@rollup/pluginutils': 5.0.2_rollup@2.79.1
+      '@vercel/nft': 0.22.5
       archiver: 5.3.1
-      c12: 0.2.13
-      chalk: 5.1.2
+      c12: 1.1.0
+      chalk: 5.2.0
       chokidar: 3.5.3
       consola: 2.15.3
       cookie-es: 0.5.0
       defu: 6.1.1
-      destr: 1.2.1
+      destr: 1.2.2
       dot-prop: 7.2.0
-      esbuild: 0.15.16
+      esbuild: 0.15.18
       escape-string-regexp: 5.0.0
       etag: 1.8.1
       fs-extra: 10.1.0
       globby: 13.1.2
       gzip-size: 7.0.0
-      h3: 0.7.21
+      h3: 1.0.1
       hookable: 5.4.2
       http-proxy: 1.18.1
       is-primitive: 3.0.1
       jiti: 1.16.0
       klona: 2.0.5
-      knitwork: 0.1.3
-      listhen: 0.3.5
+      knitwork: 1.0.0
+      listhen: 1.0.1
       mime: 3.0.0
-      mlly: 0.5.17
+      mlly: 1.0.0
       mri: 1.2.0
-      node-fetch-native: 0.1.8
-      ohash: 0.1.5
-      ohmyfetch: 0.4.21
-      pathe: 0.3.9
+      node-fetch-native: 1.0.1
+      ofetch: 1.0.0
+      ohash: 1.0.0
+      pathe: 1.0.0
       perfect-debounce: 0.1.3
-      pkg-types: 0.3.6
+      pkg-types: 1.0.1
       pretty-bytes: 6.0.0
-      radix3: 0.1.2
+      radix3: 1.0.0
       rollup: 2.79.1
       rollup-plugin-terser: 7.0.2_rollup@2.79.1
       rollup-plugin-visualizer: 5.8.3_rollup@2.79.1
-      scule: 0.3.2
+      scule: 1.0.0
       semver: 7.3.8
       serve-placeholder: 2.0.1
       serve-static: 1.15.0
       source-map-support: 0.5.21
       std-env: 3.3.1
-      ufo: 0.8.6
-      unenv: 0.6.2
-      unimport: 0.6.8
-      unstorage: 0.5.6
+      ufo: 1.0.1
+      unenv: 1.0.0
+      unimport: 1.0.1_rollup@2.79.1
+      unstorage: 1.0.1
     transitivePeerDependencies:
       - bufferutil
       - debug
@@ -4315,6 +4348,9 @@ packages:
   /node-fetch-native/0.1.8:
     resolution: {integrity: sha512-ZNaury9r0NxaT2oL65GvdGDy+5PlSaHTovT6JV5tOW07k1TQmgC0olZETa4C9KZg0+6zBr99ctTYa3Utqj9P/Q==}
 
+  /node-fetch-native/1.0.1:
+    resolution: {integrity: sha512-VzW+TAk2wE4X9maiKMlT+GsPU4OMmR1U9CrHSmd3DFLn2IcZ9VJ6M6BBugGfYUnPCLSYxXdZy17M0BEJyhUTwg==}
+
   /node-fetch/2.6.7:
     resolution: {integrity: sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==}
     engines: {node: 4.x || >=6.0.0}
@@ -4392,60 +4428,64 @@ packages:
     dependencies:
       boolbase: 1.0.0
 
-  /nuxi/3.0.0-rc.11:
-    resolution: {integrity: sha512-Zz3FRkLX0pmrQAgNkiartayC5DHKBxuMsPqTkaWSXD123CtFanL2mTOwfWtuO6W+qkEA9DGWNOL+fOkfScOQJQ==}
-    engines: {node: ^14.16.0 || ^16.11.0 || ^17.0.0 || ^18.0.0}
+  /nuxi/3.0.0:
+    resolution: {integrity: sha512-VWh1kKFffxD2yadZWcQSd6eTf9okXRr7d3HsjLiI4B3Q1/8iKdIUiodGo7X71OZ+gPVnX6Oh/XFzcb7mr+8TbQ==}
+    engines: {node: ^14.16.0 || ^16.10.0 || ^17.0.0 || ^18.0.0 || ^19.0.0}
     hasBin: true
     optionalDependencies:
       fsevents: 2.3.2
 
-  /nuxt/3.0.0-rc.11_s5ps7njkmjlaqajutnox5ntcla:
-    resolution: {integrity: sha512-I0wyxPHnUoJBWoROKUx91PLKaAFZ/TsxSpcm3/jn/Ysq2RGU5Q3o9AzqT0YcXW4rgH35QPFvGpqopU9X0vS7Qw==}
-    engines: {node: ^14.16.0 || ^16.11.0 || ^17.0.0 || ^18.0.0}
+  /nuxt/3.0.0_ha6vam6werchizxrnqvarmz2zu:
+    resolution: {integrity: sha512-RNlD78uv04ZiXWmlx9f1tnJfrqsYAWHU+4gbgOTQpIBmQzHWPWiox+fm/1m93iKfEd5sJi9TJUoXX5yBObVZYw==}
+    engines: {node: ^14.16.0 || ^16.10.0 || ^17.0.0 || ^18.0.0 || ^19.0.0}
     hasBin: true
     dependencies:
       '@nuxt/devalue': 2.0.0
-      '@nuxt/kit': 3.0.0-rc.11
-      '@nuxt/schema': 3.0.0-rc.11
+      '@nuxt/kit': 3.0.0
+      '@nuxt/schema': 3.0.0
       '@nuxt/telemetry': 2.1.8
-      '@nuxt/ui-templates': 0.4.0
-      '@nuxt/vite-builder': 3.0.0-rc.11_5akckbu4tmbn6phmzmqezegkrq
+      '@nuxt/ui-templates': 1.0.0
+      '@nuxt/vite-builder': 3.0.0_rsibta3vmmagu2awyzif4pq2lq
+      '@unhead/ssr': 1.0.13
       '@vue/reactivity': 3.2.45
       '@vue/shared': 3.2.45
-      '@vueuse/head': 0.7.13_vue@3.2.45
+      '@vueuse/head': 1.0.22_vue@3.2.45
       chokidar: 3.5.3
       cookie-es: 0.5.0
       defu: 6.1.1
-      destr: 1.2.1
+      destr: 1.2.2
       escape-string-regexp: 5.0.0
+      estree-walker: 3.0.1
       fs-extra: 10.1.0
       globby: 13.1.2
-      h3: 0.7.21
+      h3: 1.0.1
       hash-sum: 2.0.0
       hookable: 5.4.2
-      knitwork: 0.1.3
+      knitwork: 1.0.0
       magic-string: 0.26.7
-      mlly: 0.5.17
-      nitropack: 0.5.4
-      nuxi: 3.0.0-rc.11
-      ohash: 0.1.5
-      ohmyfetch: 0.4.21
-      pathe: 0.3.9
+      mlly: 1.0.0
+      nitropack: 1.0.0
+      nuxi: 3.0.0
+      ofetch: 1.0.0
+      ohash: 1.0.0
+      pathe: 1.0.0
       perfect-debounce: 0.1.3
-      scule: 0.3.2
-      strip-literal: 0.4.2
-      ufo: 0.8.6
+      scule: 1.0.0
+      strip-literal: 1.0.0
+      ufo: 1.0.1
+      ultrahtml: 1.0.4
       unctx: 2.1.1
-      unenv: 0.6.2
-      unimport: 0.6.8
-      unplugin: 0.9.6
-      untyped: 0.5.0
+      unenv: 1.0.0
+      unhead: 1.0.13
+      unimport: 1.0.1
+      unplugin: 1.0.0
+      untyped: 1.0.0
       vue: 3.2.45
-      vue-bundle-renderer: 0.4.4
+      vue-bundle-renderer: 1.0.0
       vue-devtools-stub: 0.1.0
       vue-router: 4.1.6_vue@3.2.45
     transitivePeerDependencies:
-      - '@vue/composition-api'
+      - '@types/node'
       - bufferutil
       - debug
       - encoding
@@ -4454,6 +4494,7 @@ packages:
       - rollup
       - sass
       - stylus
+      - sugarss
       - supports-color
       - terser
       - typescript
@@ -4495,19 +4536,26 @@ packages:
     dependencies:
       call-bind: 1.0.2
       define-properties: 1.1.4
-      es-abstract: 1.20.4
+      es-abstract: 1.20.5
     dev: true
 
-  /ohash/0.1.5:
-    resolution: {integrity: sha512-qynly1AFIpGWEAW88p6DhMNqok/Swb52/KsiU+Toi7er058Ptvno3tkfTML6wYcEgFgp2GsUziW4Nqn62ciuyw==}
+  /ofetch/1.0.0:
+    resolution: {integrity: sha512-d40aof8czZFSQKJa4+F7Ch3UC5D631cK1TTUoK+iNEut9NoiCL+u0vykl/puYVUS2df4tIQl5upQcolIcEzQjQ==}
+    dependencies:
+      destr: 1.2.2
+      node-fetch-native: 1.0.1
+      ufo: 1.0.1
+
+  /ohash/1.0.0:
+    resolution: {integrity: sha512-kxSyzq6tt+6EE/xCnD1XaFhCCjUNUaz3X30rJp6mnjGLXAAvuPFqohMdv0aScWzajR45C29HyBaXZ8jXBwnh9A==}
 
   /ohmyfetch/0.4.21:
     resolution: {integrity: sha512-VG7f/JRvqvBOYvL0tHyEIEG7XHWm7OqIfAs6/HqwWwDfjiJ1g0huIpe5sFEmyb+7hpFa1EGNH2aERWR72tlClw==}
     dependencies:
-      destr: 1.2.1
+      destr: 1.2.2
       node-fetch-native: 0.1.8
       ufo: 0.8.6
-      undici: 5.13.0
+      undici: 5.14.0
 
   /on-finished/2.4.1:
     resolution: {integrity: sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==}
@@ -4562,7 +4610,7 @@ packages:
     engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0}
     dependencies:
       bl: 5.1.0
-      chalk: 5.1.2
+      chalk: 5.2.0
       cli-cursor: 4.0.0
       cli-spinners: 2.7.0
       is-interactive: 2.0.0
@@ -4675,9 +4723,6 @@ packages:
   /pathe/0.2.0:
     resolution: {integrity: sha512-sTitTPYnn23esFR3RlqYBWn4c45WGeLcsKzQiUpXJAyfcWkolvlYpV8FLo7JishK946oQwMFUCHXQ9AjGPKExw==}
 
-  /pathe/0.3.9:
-    resolution: {integrity: sha512-6Y6s0vT112P3jD8dGfuS6r+lpa0qqNrLyHPOwvXMnyNTQaYiwgau2DP3aNDsR13xqtGj7rrPo+jFUATpU6/s+g==}
-
   /pathe/1.0.0:
     resolution: {integrity: sha512-nPdMG0Pd09HuSsr7QOKUXO2Jr9eqaDiZvDwdyIhNG5SHYujkQHYKDfGQkulBxvbDHz8oHLsTgKN86LSwYzSHAg==}
 
@@ -4699,8 +4744,8 @@ packages:
     resolution: {integrity: sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==}
     engines: {node: '>=0.10.0'}
 
-  /pinia/2.0.27_mgnvym7yiazkylwwogi5r767ue:
-    resolution: {integrity: sha512-nOnXP0OFeL8R4WjAHsterU+11vptda643gH02xKNtSCDPiRzVfRYodOLihLDoa0gL1KKuQKV+KOzEgdt3YvqEw==}
+  /pinia/2.0.28_prq2uz4lho2pwp6irk4cfkrxwu:
+    resolution: {integrity: sha512-YClq9DkqCblq9rlyUual7ezMu/iICWdBtfJrDt4oWU9Zxpijyz7xB2xTwx57DaBQ96UGvvTMORzALr+iO5PVMw==}
     peerDependencies:
       '@vue/composition-api': ^1.4.0
       typescript: '>=4.4.4'
@@ -4712,18 +4757,11 @@ packages:
         optional: true
     dependencies:
       '@vue/devtools-api': 6.4.5
-      typescript: 4.9.3
+      typescript: 4.9.4
       vue: 3.2.45
       vue-demi: 0.13.11_vue@3.2.45
     dev: false
 
-  /pkg-types/0.3.6:
-    resolution: {integrity: sha512-uQZutkkh6axl1GxDm5/+8ivVdwuJ5pyDGqJeSiIWIUWIqYiK3p9QKozN/Rv6eVvFoeSWkN1uoYeSDBwwBJBtbg==}
-    dependencies:
-      jsonc-parser: 3.2.0
-      mlly: 0.5.17
-      pathe: 0.3.9
-
   /pkg-types/1.0.1:
     resolution: {integrity: sha512-jHv9HB+Ho7dj6ItwppRDDl0iZRYBD0jsakHXtFgoLr+cHSF6xC+QL54sJmWxyGxOLYSHm0afhXhXcQDQqH9z8g==}
     dependencies:
@@ -4849,8 +4887,8 @@ packages:
       resolve: 1.22.1
     dev: false
 
-  /postcss-import/15.0.1_postcss@8.4.19:
-    resolution: {integrity: sha512-UGlvk8EgT7Gm/Ndf9xZHnzr8xm8P54N8CBWLtcY5alP+YxlEge/Rv78etQyevZs3qWTE9If13+Bo6zATBrPOpA==}
+  /postcss-import/15.1.0_postcss@8.4.19:
+    resolution: {integrity: sha512-hpr+J05B2FVYUAXHeK1YyI267J/dDDhMU6B6civm8hSY1jYJnBXxzKDKDswzJmtLHryrjhnDjqqp/49t8FALew==}
     engines: {node: '>=14.0.0'}
     peerDependencies:
       postcss: ^8.0.0
@@ -5205,8 +5243,8 @@ packages:
       fast-diff: 1.2.0
     dev: true
 
-  /prettier/2.8.0:
-    resolution: {integrity: sha512-9Lmg8hTFZKG0Asr/kW9Bp8tJjRVluO8EJQVfY2T7FMw9T5jy4I/Uvx0Rca/XWf50QQ1/SS48+6IJWnrb+2yemA==}
+  /prettier/2.8.1:
+    resolution: {integrity: sha512-lqGoSJBQNJidqCHE80vqZJHWHRFoNYsSpP9AjFhlhi9ODCJA541svILes/+/1GM3VaL/abZi7cpFzOpdR9UPKg==}
     engines: {node: '>=10.13.0'}
     hasBin: true
     dev: true
@@ -5236,8 +5274,12 @@ packages:
     engines: {node: '>=10'}
     dev: false
 
-  /radix3/0.1.2:
-    resolution: {integrity: sha512-Mpfd/OuX0zoJ6ojLD/RTOHvJPg6e6PjINtmYzV87kIXc5iUtDz34i7gg4SV4XjqRJTmSiYO/g9i/mKWGf4z8wg==}
+  /radix3/0.2.1:
+    resolution: {integrity: sha512-FnhArTl5Tq7dodiLeSPKrDUyCQuJqEncP8cKdyy399g8F/cz7GH6FmzA3Rkosu2IZMkpswFFwXfb2ERSiL06pg==}
+    dev: false
+
+  /radix3/1.0.0:
+    resolution: {integrity: sha512-6n3AEXth91ASapMVKiEh2wrbFJmI+NBilrWE0AbiGgfm0xet0QXC8+a3K19r1UVYjUjctUgB053c3V/J6V0kCQ==}
 
   /randombytes/2.1.0:
     resolution: {integrity: sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==}
@@ -5248,18 +5290,11 @@ packages:
     resolution: {integrity: sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==}
     engines: {node: '>= 0.6'}
 
-  /rc9/1.2.4:
-    resolution: {integrity: sha512-YD1oJO9LUzMdmr2sAsVlwQVtEoDCmvuyDwmSWrg2GKFprl3BckP5cmw9rHPunei0lV6Xl4E5t2esT+0trY1xfQ==}
-    dependencies:
-      defu: 6.1.1
-      destr: 1.2.1
-      flat: 5.0.2
-
   /rc9/2.0.0:
     resolution: {integrity: sha512-yVeYJHOpJLOhs3V6RKwz7RPPwPurrx3JjwK264sPgvo/lFdhuUrLien7iSvAO6STVkN0gSMk/MehQNHQhflqZw==}
     dependencies:
       defu: 6.1.1
-      destr: 1.2.1
+      destr: 1.2.2
       flat: 5.0.2
 
   /read-cache/1.0.0:
@@ -5429,18 +5464,6 @@ packages:
       source-map: 0.7.4
       yargs: 17.6.2
 
-  /rollup-pluginutils/2.8.2:
-    resolution: {integrity: sha512-EEp9NhnUkwY8aif6bxgovPHMoMoNr2FulJziTndpt5H9RdwC47GSGuII9XxpSdzVGM0GWrNPHV6ie1LTNJPaLQ==}
-    dependencies:
-      estree-walker: 0.6.1
-
-  /rollup/2.78.1:
-    resolution: {integrity: sha512-VeeCgtGi4P+o9hIg+xz4qQpRl6R401LWEXBmxYKOV4zlF82lyhgh2hTZnheFUbANE8l2A41F458iwj2vEYaXJg==}
-    engines: {node: '>=10.0.0'}
-    hasBin: true
-    optionalDependencies:
-      fsevents: 2.3.2
-
   /rollup/2.79.1:
     resolution: {integrity: sha512-uKxbd0IhMZOhjAiD5oAFp7BqvkA4Dv47qpOCtaNvng4HBwdbWtdOh8f5nZNuk2rp51PMGk3bzfWu5oayNEuYnw==}
     engines: {node: '>=10.0.0'}
@@ -5457,8 +5480,8 @@ packages:
     dependencies:
       queue-microtask: 1.2.3
 
-  /rxjs/7.5.7:
-    resolution: {integrity: sha512-z9MzKh/UcOqB3i20H6rtrlaE/CgjLOvheWK/9ILrbhROGTweAi1BaFsTT9FbwZi5Trr1qNRs+MXkhmR06awzQA==}
+  /rxjs/7.6.0:
+    resolution: {integrity: sha512-DDa7d8TFNUalGC9VqXvQ1euWNN7sc63TrUCuM9J998+ViviahMIjKSOU7rfcgFOF+FCD71BhDRv4hrFz+ImDLQ==}
     dependencies:
       tslib: 2.4.1
 
@@ -5494,18 +5517,9 @@ packages:
       ajv-keywords: 3.5.2_ajv@6.12.6
     dev: false
 
-  /scule/0.3.2:
-    resolution: {integrity: sha512-zIvPdjOH8fv8CgrPT5eqtxHQXmPNnV/vHJYffZhE43KZkvULvpCTvOt1HPlFaCZx287INL9qaqrZg34e8NgI4g==}
-
   /scule/1.0.0:
     resolution: {integrity: sha512-4AsO/FrViE/iDNEPaAQlb77tf0csuq27EsVpy6ett584EcRTp6pTDLoGWVxCD77y5iU5FauOvhsI4o1APwPoSQ==}
 
-  /selfsigned/2.1.1:
-    resolution: {integrity: sha512-GSL3aowiF7wa/WtSFwnUrludWFoNhftq8bUkH9pkzjpN2XSPOAYEgg6e0sS9s0rZwgJzJiQRPU18A6clnoW5wQ==}
-    engines: {node: '>=10'}
-    dependencies:
-      node-forge: 1.3.1
-
   /semver/5.7.1:
     resolution: {integrity: sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==}
     hasBin: true
@@ -5603,6 +5617,7 @@ packages:
   /slash/3.0.0:
     resolution: {integrity: sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==}
     engines: {node: '>=8'}
+    dev: true
 
   /slash/4.0.0:
     resolution: {integrity: sha512-3dOsAHXXUkQTpOYcoAxLIorMTp4gIQr5IW3iVb7A7lFIp0VHhnynm9izx6TssdrIcVIESAlVjtnO2K8bg+Coew==}
@@ -5628,6 +5643,7 @@ packages:
 
   /sourcemap-codec/1.4.8:
     resolution: {integrity: sha512-9NykojV5Uih4lgo5So5dtw+f0JgJX30KCNI8gwhz2J9A15wD0Ml6tjHKwf6fTSa6fAdVBdZeNOs9eJ71qCk8vA==}
+    deprecated: Please use @jridgewell/sourcemap-codec instead
 
   /spdx-correct/3.1.1:
     resolution: {integrity: sha512-cOYcUWwhCuHCXi49RhFRCyJEK3iPj1Ziz9DpViV3tbZOwXD49QzIN3MpOLJNxh2qwq2lJJZaKMVw9qNi4jTC0w==}
@@ -5695,7 +5711,7 @@ packages:
     dependencies:
       call-bind: 1.0.2
       define-properties: 1.1.4
-      es-abstract: 1.20.4
+      es-abstract: 1.20.5
     dev: true
 
   /string.prototype.trimstart/1.0.6:
@@ -5703,7 +5719,7 @@ packages:
     dependencies:
       call-bind: 1.0.2
       define-properties: 1.1.4
-      es-abstract: 1.20.4
+      es-abstract: 1.20.5
     dev: true
 
   /string_decoder/1.1.1:
@@ -5748,11 +5764,6 @@ packages:
     resolution: {integrity: sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==}
     engines: {node: '>=8'}
 
-  /strip-literal/0.4.2:
-    resolution: {integrity: sha512-pv48ybn4iE1O9RLgCAN0iU4Xv7RlBTiit6DKmMiErbs9x1wH6vXBs45tWc0H5wUIF6TLTrKweqkmYF/iraQKNw==}
-    dependencies:
-      acorn: 8.8.1
-
   /strip-literal/1.0.0:
     resolution: {integrity: sha512-5o4LsH1lzBzO9UFH63AJ2ad2/S2AVx6NtjOcaz+VTT2h1RiRvbipW72z8M/lxEhcPHDBQwpDrnTF7sXy/7OwCQ==}
     dependencies:
@@ -5818,7 +5829,7 @@ packages:
       '@koa/router': 9.4.0
       commander: 6.2.1
       fs-extra: 9.1.0
-      koa: 2.13.4
+      koa: 2.14.1
       koa-static: 5.0.0
       open: 7.4.2
       portfinder: 1.0.32
@@ -5880,25 +5891,13 @@ packages:
       inherits: 2.0.4
       readable-stream: 3.6.0
 
-  /tar/4.4.19:
-    resolution: {integrity: sha512-a20gEsvHnWe0ygBY8JbxoM4w3SJdhc7ZAuxkLqh+nvNQN2IOt0B5lLgM490X5Hl8FF0dl0tOf2ewFYAlIFgzVA==}
-    engines: {node: '>=4.5'}
-    dependencies:
-      chownr: 1.1.4
-      fs-minipass: 1.2.7
-      minipass: 2.9.0
-      minizlib: 1.3.3
-      mkdirp: 0.5.6
-      safe-buffer: 5.2.1
-      yallist: 3.1.1
-
-  /tar/6.1.12:
-    resolution: {integrity: sha512-jU4TdemS31uABHd+Lt5WEYJuzn+TJTCBLljvIAHZOz6M9Os5pJ4dD+vRFLxPa/n3T0iEFzpi+0x1UfuDZYbRMw==}
+  /tar/6.1.13:
+    resolution: {integrity: sha512-jdIBIN6LTIe2jqzay/2vtYLlBHa3JF42ot3h1dW8Q0PaAG4v8rm0cvpVePtau5C6OKXGGcgO9q2AMNSWxiLqKw==}
     engines: {node: '>=10'}
     dependencies:
       chownr: 2.0.0
       fs-minipass: 2.1.0
-      minipass: 3.3.6
+      minipass: 4.0.0
       minizlib: 2.1.2
       mkdirp: 1.0.4
       yallist: 4.0.0
@@ -5983,14 +5982,14 @@ packages:
     engines: {node: '>=0.6.x'}
     dev: false
 
-  /tsutils/3.21.0_typescript@4.9.3:
+  /tsutils/3.21.0_typescript@4.9.4:
     resolution: {integrity: sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA==}
     engines: {node: '>= 6'}
     peerDependencies:
       typescript: '>=2.8.0 || >= 3.2.0-dev || >= 3.3.0-dev || >= 3.4.0-dev || >= 3.5.0-dev || >= 3.6.0-dev || >= 3.6.0-beta || >= 3.7.0-dev || >= 3.7.0-beta'
     dependencies:
       tslib: 1.14.1
-      typescript: 4.9.3
+      typescript: 4.9.4
     dev: true
 
   /type-check/0.4.0:
@@ -6038,8 +6037,8 @@ packages:
       mime-types: 2.1.35
     dev: false
 
-  /typescript/4.9.3:
-    resolution: {integrity: sha512-CIfGzTelbKNEnLpLdGFgdyKhG23CKdKgQPOBc+OUNrkJ2vr+KSzsSV5kq5iWhEQbok+quxgGzrAtGWCyU7tHnA==}
+  /typescript/4.9.4:
+    resolution: {integrity: sha512-Uz+dTXYzxXXbsFpM86Wh3dKCxrQqUcVMxwU54orwlJjOpO3ao8L7j5lH+dWfTwgCwIuM9GQ2kvVotzYJMXTBZg==}
     engines: {node: '>=4.2.0'}
     hasBin: true
 
@@ -6053,6 +6052,9 @@ packages:
   /ufo/1.0.1:
     resolution: {integrity: sha512-boAm74ubXHY7KJQZLlXrtMz52qFvpsbOxDcZOnw/Wf+LS4Mmyu7JxmzD4tDLtUQtmZECypJ0FrCz4QIe6dvKRA==}
 
+  /ultrahtml/1.0.4:
+    resolution: {integrity: sha512-sso5lk1F1/Q1crKx0+9/9/rHCykRJFSifYLaShnhgzfJhbEDZdpntClBs7ojhx0lRmQlUtDIxrC/8hBJj5bVPw==}
+
   /unbox-primitive/1.0.2:
     resolution: {integrity: sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw==}
     dependencies:
@@ -6070,33 +6072,26 @@ packages:
       magic-string: 0.26.7
       unplugin: 1.0.0
 
-  /undici/5.13.0:
-    resolution: {integrity: sha512-UDZKtwb2k7KRsK4SdXWG7ErXiL7yTGgLWvk2AXO1JMjgjh404nFo6tWSCM2xMpJwMPx3J8i/vfqEh1zOqvj82Q==}
+  /undici/5.14.0:
+    resolution: {integrity: sha512-yJlHYw6yXPPsuOH0x2Ib1Km61vu4hLiRRQoafs+WUgX1vO64vgnxiCEN9dpIrhZyHFsai3F0AEj4P9zy19enEQ==}
     engines: {node: '>=12.18'}
     dependencies:
       busboy: 1.6.0
 
-  /unenv/0.6.2:
-    resolution: {integrity: sha512-IdQfYsHsGKDkiBdeOmtU4MjWvPYfMDOC63cvFqZPodAc5aVezvfD9Bwr7FL/G78cAMMCaDm5Jux3vYo+Z8c/Dg==}
+  /unenv/1.0.0:
+    resolution: {integrity: sha512-vlyi2Rzj4CNlA1JsEXufX+ItkGr3Z5DfLzKniYEneMlBVtuxS+57f1LwTPj2eiBPSPaGHMUVzEnjSCGE7l8JQg==}
     dependencies:
       defu: 6.1.1
       mime: 3.0.0
-      node-fetch-native: 0.1.8
-      pathe: 0.3.9
+      node-fetch-native: 1.0.1
+      pathe: 1.0.0
 
-  /unimport/0.6.8:
-    resolution: {integrity: sha512-MWkaPYvN0j+6jfEuiVFhfmy+aOtgAP11CozSbu/I3Cx+8ybjXIueB7GVlKofHabtjzSlPeAvWKJSFjHWsG2JaA==}
+  /unhead/1.0.13:
+    resolution: {integrity: sha512-stWC9VawHWq27WiAsgNPLFXI61LaNy1E3Zs/0cSgPTvz4ti8fYuqLOz930pzVRIKrWnxQVGndw8UZLSEcK7ikA==}
     dependencies:
-      '@rollup/pluginutils': 4.2.1
-      escape-string-regexp: 5.0.0
-      fast-glob: 3.2.12
-      local-pkg: 0.4.2
-      magic-string: 0.26.7
-      mlly: 0.5.17
-      pathe: 0.3.9
-      scule: 0.3.2
-      strip-literal: 0.4.2
-      unplugin: 0.9.6
+      '@unhead/dom': 1.0.13
+      '@unhead/schema': 1.0.13
+      hookable: 5.4.2
 
   /unimport/1.0.1:
     resolution: {integrity: sha512-SEPKl3uyqUvi6c0MnyCmUF9H07CuC9j9p2p33F03LmegU0sxjpnjL0fLKAhh7BTfcKaJKj+1iOiAFtg7P3m5mQ==}
@@ -6115,18 +6110,27 @@ packages:
     transitivePeerDependencies:
       - rollup
 
+  /unimport/1.0.1_rollup@2.79.1:
+    resolution: {integrity: sha512-SEPKl3uyqUvi6c0MnyCmUF9H07CuC9j9p2p33F03LmegU0sxjpnjL0fLKAhh7BTfcKaJKj+1iOiAFtg7P3m5mQ==}
+    dependencies:
+      '@rollup/pluginutils': 5.0.2_rollup@2.79.1
+      escape-string-regexp: 5.0.0
+      fast-glob: 3.2.12
+      local-pkg: 0.4.2
+      magic-string: 0.26.7
+      mlly: 1.0.0
+      pathe: 1.0.0
+      pkg-types: 1.0.1
+      scule: 1.0.0
+      strip-literal: 1.0.0
+      unplugin: 1.0.0
+    transitivePeerDependencies:
+      - rollup
+
   /universalify/2.0.0:
     resolution: {integrity: sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==}
     engines: {node: '>= 10.0.0'}
 
-  /unplugin/0.9.6:
-    resolution: {integrity: sha512-YYLtfoNiie/lxswy1GOsKXgnLJTE27la/PeCGznSItk+8METYZErO+zzV9KQ/hXhPwzIJsfJ4s0m1Rl7ZCWZ4Q==}
-    dependencies:
-      acorn: 8.8.1
-      chokidar: 3.5.3
-      webpack-sources: 3.2.3
-      webpack-virtual-modules: 0.4.6
-
   /unplugin/1.0.0:
     resolution: {integrity: sha512-H5UnBUxfhTXBXGo2AwKsl0UaLSHzSNDZNehPQSgdhVfO/t+XAS1Yoj3vmLrrlBrS9ZwtH5tejbX/TCp5DcyCKg==}
     dependencies:
@@ -6135,34 +6139,25 @@ packages:
       webpack-sources: 3.2.3
       webpack-virtual-modules: 0.4.6
 
-  /unstorage/0.5.6:
-    resolution: {integrity: sha512-TUm1ZyLkVamRfM+uWmWtavlzri3XS0ajYXKhlrAZ8aCChMwH29lufOfAP0bsMaBHuciIVfycaGgNhHeyLONpdA==}
+  /unstorage/1.0.1:
+    resolution: {integrity: sha512-J1c4b8K2KeihHrQtdgl/ybIapArUbPaPb+TyJy/nGSauDwDYqciZsEKdkee568P3c8SSH4TIgnGRHDWMPGw+Lg==}
     dependencies:
       anymatch: 3.1.3
       chokidar: 3.5.3
-      destr: 1.2.1
-      h3: 0.7.21
+      destr: 1.2.2
+      h3: 1.0.1
       ioredis: 5.2.4
-      listhen: 0.2.15
+      listhen: 1.0.1
+      mkdir: 0.0.2
       mri: 1.2.0
-      ohmyfetch: 0.4.21
-      ufo: 0.8.6
+      ofetch: 1.0.0
+      ufo: 1.0.1
       ws: 8.11.0
     transitivePeerDependencies:
       - bufferutil
       - supports-color
       - utf-8-validate
 
-  /untyped/0.5.0:
-    resolution: {integrity: sha512-2Sre5A1a7G61bjaAKZnSFaVgbJMwwbbYQpJFH69hAYcDfN7kIaktlSphS02XJilz4+/jR1tsJ5MHo1oMoCezxg==}
-    dependencies:
-      '@babel/core': 7.20.5
-      '@babel/standalone': 7.20.6
-      '@babel/types': 7.20.5
-      scule: 0.3.2
-    transitivePeerDependencies:
-      - supports-color
-
   /untyped/1.0.0:
     resolution: {integrity: sha512-aBeR3Z51038d7zVzsNShYEdO7u/VCp5R17fxpPXlD2QvG9g6uVJ+JM+zMJ7KFPIt1BNf3I6bU6PhAlsAFkIfdA==}
     dependencies:
@@ -6203,23 +6198,27 @@ packages:
     engines: {node: '>= 0.8'}
     dev: false
 
-  /vite-node/0.23.4:
-    resolution: {integrity: sha512-8VuDGwTWIvwPYcbw8ZycMlwAwqCmqZfLdFrDK75+o+6bWYpede58k6AAXN9ioU+icW82V4u1MzkxLVhhIoQ9xA==}
+  /vite-node/0.25.6:
+    resolution: {integrity: sha512-xwmZ4lVpqfKTCKZRt4vJflGIA4kEsClfGSWZijNqyORnAl1EvL/8USLGEHADe/NSjkwiEJoVQvZu0JQXpo+rQA==}
     engines: {node: '>=v14.16.0'}
     hasBin: true
     dependencies:
       debug: 4.3.4
-      mlly: 0.5.17
+      mlly: 1.0.0
       pathe: 0.2.0
-      vite: 3.1.8
+      source-map: 0.6.1
+      source-map-support: 0.5.21
+      vite: 3.2.5
     transitivePeerDependencies:
+      - '@types/node'
       - less
       - sass
       - stylus
+      - sugarss
       - supports-color
       - terser
 
-  /vite-plugin-checker/0.5.1_aa2q4hyfpz4qcuczedr7bawau4:
+  /vite-plugin-checker/0.5.1_2diyh6cvkl6stcaq3dzcmc6p6a:
     resolution: {integrity: sha512-NFiO1PyK9yGuaeSnJ7Whw9fnxLc1AlELnZoyFURnauBYhbIkx9n+PmIXxSFUuC9iFyACtbJQUAEuQi6yHs2Adg==}
     engines: {node: '>=14.16'}
     peerDependencies:
@@ -6250,11 +6249,11 @@ packages:
       npm-run-path: 4.0.1
       strip-ansi: 6.0.1
       tiny-invariant: 1.3.1
-      typescript: 4.9.3
-      vite: 3.1.8
+      typescript: 4.9.4
+      vite: 3.2.5
       vscode-languageclient: 7.0.0
       vscode-languageserver: 7.0.0
-      vscode-languageserver-textdocument: 1.0.7
+      vscode-languageserver-textdocument: 1.0.8
       vscode-uri: 3.0.6
 
   /vite-plugin-eslint/1.8.1_eslint@8.29.0:
@@ -6269,34 +6268,8 @@ packages:
       rollup: 2.79.1
     dev: true
 
-  /vite/3.1.8:
-    resolution: {integrity: sha512-m7jJe3nufUbuOfotkntGFupinL/fmuTNuQmiVE7cH2IZMuf4UbfbGYMUT3jVWgGYuRVLY9j8NnrRqgw5rr5QTg==}
-    engines: {node: ^14.18.0 || >=16.0.0}
-    hasBin: true
-    peerDependencies:
-      less: '*'
-      sass: '*'
-      stylus: '*'
-      terser: ^5.4.0
-    peerDependenciesMeta:
-      less:
-        optional: true
-      sass:
-        optional: true
-      stylus:
-        optional: true
-      terser:
-        optional: true
-    dependencies:
-      esbuild: 0.15.16
-      postcss: 8.4.19
-      resolve: 1.22.1
-      rollup: 2.78.1
-    optionalDependencies:
-      fsevents: 2.3.2
-
-  /vite/3.2.4_@types+node@18.11.10:
-    resolution: {integrity: sha512-Z2X6SRAffOUYTa+sLy3NQ7nlHFU100xwanq1WDwqaiFiCe+25zdxP1TfCS5ojPV2oDDcXudHIoPnI1Z/66B7Yw==}
+  /vite/3.2.5:
+    resolution: {integrity: sha512-4mVEpXpSOgrssFZAOmGIr85wPHKvaDAcXqxVxVRZhljkJOMZi1ibLibzjLHzJvcok8BMguLc7g1W6W/GqZbLdQ==}
     engines: {node: ^14.18.0 || >=16.0.0}
     hasBin: true
     peerDependencies:
@@ -6320,8 +6293,40 @@ packages:
       terser:
         optional: true
     dependencies:
-      '@types/node': 18.11.10
-      esbuild: 0.15.16
+      esbuild: 0.15.18
+      postcss: 8.4.19
+      resolve: 1.22.1
+      rollup: 2.79.1
+    optionalDependencies:
+      fsevents: 2.3.2
+
+  /vite/3.2.5_@types+node@18.11.12:
+    resolution: {integrity: sha512-4mVEpXpSOgrssFZAOmGIr85wPHKvaDAcXqxVxVRZhljkJOMZi1ibLibzjLHzJvcok8BMguLc7g1W6W/GqZbLdQ==}
+    engines: {node: ^14.18.0 || >=16.0.0}
+    hasBin: true
+    peerDependencies:
+      '@types/node': '>= 14'
+      less: '*'
+      sass: '*'
+      stylus: '*'
+      sugarss: '*'
+      terser: ^5.4.0
+    peerDependenciesMeta:
+      '@types/node':
+        optional: true
+      less:
+        optional: true
+      sass:
+        optional: true
+      stylus:
+        optional: true
+      sugarss:
+        optional: true
+      terser:
+        optional: true
+    dependencies:
+      '@types/node': 18.11.12
+      esbuild: 0.15.18
       postcss: 8.4.19
       resolve: 1.22.1
       rollup: 2.79.1
@@ -6353,13 +6358,13 @@ packages:
     dependencies:
       '@types/chai': 4.3.4
       '@types/chai-subset': 1.3.3
-      '@types/node': 18.11.10
+      '@types/node': 18.11.12
       chai: 4.3.7
       debug: 4.3.4
       local-pkg: 0.4.2
       tinypool: 0.2.4
       tinyspy: 1.0.2
-      vite: 3.2.4_@types+node@18.11.10
+      vite: 3.2.5_@types+node@18.11.12
     transitivePeerDependencies:
       - less
       - sass
@@ -6387,8 +6392,8 @@ packages:
       vscode-jsonrpc: 6.0.0
       vscode-languageserver-types: 3.16.0
 
-  /vscode-languageserver-textdocument/1.0.7:
-    resolution: {integrity: sha512-bFJH7UQxlXT8kKeyiyu41r22jCZXG8kuuVVA33OEJn1diWOZK5n8zBSPZFHVBOu8kXZ6h0LIRhf5UnCo61J4Hg==}
+  /vscode-languageserver-textdocument/1.0.8:
+    resolution: {integrity: sha512-1bonkGqQs5/fxGT5UchTgjGVnfysL0O8v1AYMBjqTbWQTFn721zaPGDYFkOKtfDgFiSgXM3KwaG3FMGfW4Ed9Q==}
 
   /vscode-languageserver-types/3.16.0:
     resolution: {integrity: sha512-k8luDIWJWyenLc5ToFQQMaSrqCHiLwyKPHKPQZ5zz21vM+vIVUSvsRpcbiECH4WR88K2XZqc4ScRcZ7nk/jbeA==}
@@ -6402,10 +6407,10 @@ packages:
   /vscode-uri/3.0.6:
     resolution: {integrity: sha512-fmL7V1eiDBFRRnu+gfRWTzyPpNIHJTc4mWnFkwBUmO9U3KPgJAmTx7oxi2bl/Rh6HLdU7+4C9wlj0k2E4AdKFQ==}
 
-  /vue-bundle-renderer/0.4.4:
-    resolution: {integrity: sha512-kjJWPayzup8QFynETVpoYD0gDM2nbwN//bpt86hAHpZ+FPdTJFDQqKpouSLQgb2XjkOYM1uB/yc6Zb3iCvS7Gw==}
+  /vue-bundle-renderer/1.0.0:
+    resolution: {integrity: sha512-43vCqTgaMXfHhtR8/VcxxWD1DgtzyvNc4wNyG5NKCIH19O1z5G9ZCRXTGEA2wifVec5PU82CkRLD2sTK9NkTdA==}
     dependencies:
-      ufo: 0.8.6
+      ufo: 1.0.1
 
   /vue-demi/0.13.11_vue@3.2.45:
     resolution: {integrity: sha512-IR8HoEEGM65YY3ZJYAjMlKygDQn25D5ajNFNoKh9RSDMQtlzCxtfQjdQgv9jjK+m3377SsJXY8ysq8kLCZL25A==}
@@ -6420,6 +6425,7 @@ packages:
         optional: true
     dependencies:
       vue: 3.2.45
+    dev: false
 
   /vue-devtools-stub/0.1.0:
     resolution: {integrity: sha512-RutnB7X8c5hjq39NceArgXg28WZtZpGc3+J16ljMiYnFhKvd8hITxSWQSQ5bvldxMDU6gG5mkxl1MTQLXckVSQ==}
@@ -6564,9 +6570,6 @@ packages:
     resolution: {integrity: sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==}
     engines: {node: '>=10'}
 
-  /yallist/3.1.1:
-    resolution: {integrity: sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==}
-
   /yallist/4.0.0:
     resolution: {integrity: sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==}
 
diff --git a/scripts/process-types.py b/scripts/process-types.py
index 00896b7..1641987 100644
--- a/scripts/process-types.py
+++ b/scripts/process-types.py
@@ -34,6 +34,7 @@ regex_replace: dict[re.Pattern, str] = {
         "purchaseTime",
         "warrantyExpires",
         "expiresAt",
+        "date",
     ),
 }