update refs on import

This commit is contained in:
Hayden 2023-02-25 17:22:00 -09:00
parent 28b69d3967
commit 8821d338c6
No known key found for this signature in database
GPG key ID: 17CF79474E257545
14 changed files with 299 additions and 36 deletions

View file

@ -1,8 +1,10 @@
package v1 package v1
import ( import (
"context"
"net/http" "net/http"
"github.com/google/uuid"
"github.com/hay-kot/homebox/backend/internal/core/services" "github.com/hay-kot/homebox/backend/internal/core/services"
"github.com/hay-kot/homebox/backend/internal/sys/validate" "github.com/hay-kot/homebox/backend/internal/sys/validate"
"github.com/hay-kot/homebox/backend/pkgs/server" "github.com/hay-kot/homebox/backend/pkgs/server"
@ -13,6 +15,20 @@ type ActionAmountResult struct {
Completed int `json:"completed"` Completed int `json:"completed"`
} }
func actionHandlerFactory(ref string, fn func(context.Context, uuid.UUID) (int, error)) server.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
ctx := services.NewContext(r.Context())
totalCompleted, err := fn(ctx, ctx.GID)
if err != nil {
log.Err(err).Str("action_ref", ref).Msg("failed to run action")
return validate.NewRequestError(err, http.StatusInternalServerError)
}
return server.Respond(w, http.StatusOK, ActionAmountResult{Completed: totalCompleted})
}
}
// HandleGroupInvitationsCreate godoc // HandleGroupInvitationsCreate godoc
// @Summary Ensures all items in the database have an asset id // @Summary Ensures all items in the database have an asset id
// @Tags Group // @Tags Group
@ -21,17 +37,18 @@ type ActionAmountResult struct {
// @Router /v1/actions/ensure-asset-ids [Post] // @Router /v1/actions/ensure-asset-ids [Post]
// @Security Bearer // @Security Bearer
func (ctrl *V1Controller) HandleEnsureAssetID() server.HandlerFunc { func (ctrl *V1Controller) HandleEnsureAssetID() server.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error { return actionHandlerFactory("ensure asset IDs", ctrl.svc.Items.EnsureAssetID)
ctx := services.NewContext(r.Context()) }
totalCompleted, err := ctrl.svc.Items.EnsureAssetID(ctx, ctx.GID) // HandleEnsureImportRefs godoc
if err != nil { // @Summary Ensures all items in the database have an import ref
log.Err(err).Msg("failed to ensure asset id") // @Tags Group
return validate.NewRequestError(err, http.StatusInternalServerError) // @Produce json
} // @Success 200 {object} ActionAmountResult
// @Router /v1/actions/ensure-import-refs [Post]
return server.Respond(w, http.StatusOK, ActionAmountResult{Completed: totalCompleted}) // @Security Bearer
} func (ctrl *V1Controller) HandleEnsureImportRefs() server.HandlerFunc {
return actionHandlerFactory("ensure import refs", ctrl.svc.Items.EnsureImportRef)
} }
// HandleItemDateZeroOut godoc // HandleItemDateZeroOut godoc
@ -42,15 +59,5 @@ func (ctrl *V1Controller) HandleEnsureAssetID() server.HandlerFunc {
// @Router /v1/actions/zero-item-time-fields [Post] // @Router /v1/actions/zero-item-time-fields [Post]
// @Security Bearer // @Security Bearer
func (ctrl *V1Controller) HandleItemDateZeroOut() server.HandlerFunc { func (ctrl *V1Controller) HandleItemDateZeroOut() server.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error { return actionHandlerFactory("zero out date time", ctrl.repo.Items.ZeroOutTimeFields)
ctx := services.NewContext(r.Context())
totalCompleted, err := ctrl.repo.Items.ZeroOutTimeFields(ctx, ctx.GID)
if err != nil {
log.Err(err).Msg("failed to ensure asset id")
return validate.NewRequestError(err, http.StatusInternalServerError)
}
return server.Respond(w, http.StatusOK, ActionAmountResult{Completed: totalCompleted})
}
} }

View file

@ -89,6 +89,7 @@ func (a *app) mountRoutes(repos *repo.AllRepos) {
a.server.Post(v1Base("/actions/ensure-asset-ids"), v1Ctrl.HandleEnsureAssetID(), userMW...) a.server.Post(v1Base("/actions/ensure-asset-ids"), v1Ctrl.HandleEnsureAssetID(), userMW...)
a.server.Post(v1Base("/actions/zero-item-time-fields"), v1Ctrl.HandleItemDateZeroOut(), userMW...) a.server.Post(v1Base("/actions/zero-item-time-fields"), v1Ctrl.HandleItemDateZeroOut(), userMW...)
a.server.Post(v1Base("/actions/ensure-import-refs"), v1Ctrl.HandleEnsureImportRefs(), userMW...)
a.server.Get(v1Base("/locations"), v1Ctrl.HandleLocationGetAll(), userMW...) a.server.Get(v1Base("/locations"), v1Ctrl.HandleLocationGetAll(), userMW...)
a.server.Post(v1Base("/locations"), v1Ctrl.HandleLocationCreate(), userMW...) a.server.Post(v1Base("/locations"), v1Ctrl.HandleLocationCreate(), userMW...)

View file

@ -45,6 +45,30 @@ const docTemplate = `{
} }
} }
}, },
"/v1/actions/ensure-import-refs": {
"post": {
"security": [
{
"Bearer": []
}
],
"produces": [
"application/json"
],
"tags": [
"Group"
],
"summary": "Ensures all items in the database have an import ref",
"responses": {
"200": {
"description": "OK",
"schema": {
"$ref": "#/definitions/v1.ActionAmountResult"
}
}
}
}
},
"/v1/actions/zero-item-time-fields": { "/v1/actions/zero-item-time-fields": {
"post": { "post": {
"security": [ "security": [

View file

@ -37,6 +37,30 @@
} }
} }
}, },
"/v1/actions/ensure-import-refs": {
"post": {
"security": [
{
"Bearer": []
}
],
"produces": [
"application/json"
],
"tags": [
"Group"
],
"summary": "Ensures all items in the database have an import ref",
"responses": {
"200": {
"description": "OK",
"schema": {
"$ref": "#/definitions/v1.ActionAmountResult"
}
}
}
}
},
"/v1/actions/zero-item-time-fields": { "/v1/actions/zero-item-time-fields": {
"post": { "post": {
"security": [ "security": [

View file

@ -650,6 +650,20 @@ paths:
summary: Ensures all items in the database have an asset id summary: Ensures all items in the database have an asset id
tags: tags:
- Group - Group
/v1/actions/ensure-import-refs:
post:
produces:
- application/json
responses:
"200":
description: OK
schema:
$ref: '#/definitions/v1.ActionAmountResult'
security:
- Bearer: []
summary: Ensures all items in the database have an import ref
tags:
- Group
/v1/actions/zero-item-time-fields: /v1/actions/zero-item-time-fields:
post: post:
produces: produces:

View file

@ -13,9 +13,9 @@ type ExportItemFields struct {
} }
type ExportTSVRow struct { type ExportTSVRow struct {
ImportRef string `csv:"HB.import_ref"`
Location LocationString `csv:"HB.location"` Location LocationString `csv:"HB.location"`
LabelStr LabelString `csv:"HB.labels"` LabelStr LabelString `csv:"HB.labels"`
ImportRef string `csv:"HB.import_ref"`
AssetID repo.AssetID `csv:"HB.asset_id"` AssetID repo.AssetID `csv:"HB.asset_id"`
Archived bool `csv:"HB.archived"` Archived bool `csv:"HB.archived"`

View file

@ -289,6 +289,8 @@ func (s *IOSheet) TSV() ([][]string, error) {
v = val.Interface().(LocationString).String() v = val.Interface().(LocationString).String()
case reflect.TypeOf(LabelString{}): case reflect.TypeOf(LabelString{}):
v = val.Interface().(LabelString).String() v = val.Interface().(LabelString).String()
default:
log.Debug().Str("type", field.Type.String()).Msg("unknown type")
} }
memcsv[rowIdx][col] = v memcsv[rowIdx][col] = v

View file

@ -64,6 +64,27 @@ func (svc *ItemService) EnsureAssetID(ctx context.Context, GID uuid.UUID) (int,
return finished, nil return finished, nil
} }
func (svc *ItemService) EnsureImportRef(ctx context.Context, GID uuid.UUID) (int, error) {
ids, err := svc.repo.Items.GetAllZeroImportRef(ctx, GID)
if err != nil {
return 0, err
}
finished := 0
for _, itemID := range ids {
ref := uuid.New().String()[0:8]
err = svc.repo.Items.Patch(ctx, GID, itemID, repo.ItemPatch{ImportRef: &ref})
if err != nil {
return 0, err
}
finished++
}
return finished, nil
}
func serializeLocation[T ~[]string](location T) string { func serializeLocation[T ~[]string](location T) string {
return strings.Join(location, "/") return strings.Join(location, "/")
} }
@ -143,9 +164,10 @@ func (svc *ItemService) CsvImport(ctx context.Context, GID uuid.UUID, data io.Re
for i := range sheet.Rows { for i := range sheet.Rows {
row := sheet.Rows[i] row := sheet.Rows[i]
createRequired := true
// ======================================== // ========================================
// Preflight check for existing item // Preflight check for existing item
// TODO: Allow updates to existing items by matching on ImportRef
if row.ImportRef != "" { if row.ImportRef != "" {
exists, err := svc.repo.Items.CheckRef(ctx, GID, row.ImportRef) exists, err := svc.repo.Items.CheckRef(ctx, GID, row.ImportRef)
if err != nil { if err != nil {
@ -153,7 +175,7 @@ func (svc *ItemService) CsvImport(ctx context.Context, GID uuid.UUID, data io.Re
} }
if exists { if exists {
continue createRequired = false
} }
} }
@ -227,18 +249,31 @@ func (svc *ItemService) CsvImport(ctx context.Context, GID uuid.UUID, data io.Re
// ======================================== // ========================================
// Create Item // Create Item
newItem := repo.ItemCreate{ var item repo.ItemOut
ImportRef: row.ImportRef, switch {
Name: row.Name, case createRequired:
Description: row.Description, newItem := repo.ItemCreate{
AssetID: effAID, ImportRef: row.ImportRef,
LocationID: locationID, Name: row.Name,
LabelIDs: labelIds, Description: row.Description,
AssetID: effAID,
LocationID: locationID,
LabelIDs: labelIds,
}
item, err = svc.repo.Items.Create(ctx, GID, newItem)
if err != nil {
return 0, err
}
default:
item, err = svc.repo.Items.GetByRef(ctx, GID, row.ImportRef)
if err != nil {
return 0, err
}
} }
item, err := svc.repo.Items.Create(ctx, GID, newItem) if item.ID == uuid.Nil {
if err != nil { panic("item ID is nil on import - this should never happen")
return 0, err
} }
fields := make([]repo.ItemField, len(row.Fields)) fields := make([]repo.ItemField, len(row.Fields))

View file

@ -67,6 +67,26 @@ func (iu *ItemUpdate) ClearDescription() *ItemUpdate {
return iu return iu
} }
// SetImportRef sets the "import_ref" field.
func (iu *ItemUpdate) SetImportRef(s string) *ItemUpdate {
iu.mutation.SetImportRef(s)
return iu
}
// SetNillableImportRef sets the "import_ref" field if the given value is not nil.
func (iu *ItemUpdate) SetNillableImportRef(s *string) *ItemUpdate {
if s != nil {
iu.SetImportRef(*s)
}
return iu
}
// ClearImportRef clears the value of the "import_ref" field.
func (iu *ItemUpdate) ClearImportRef() *ItemUpdate {
iu.mutation.ClearImportRef()
return iu
}
// SetNotes sets the "notes" field. // SetNotes sets the "notes" field.
func (iu *ItemUpdate) SetNotes(s string) *ItemUpdate { func (iu *ItemUpdate) SetNotes(s string) *ItemUpdate {
iu.mutation.SetNotes(s) iu.mutation.SetNotes(s)
@ -713,6 +733,11 @@ func (iu *ItemUpdate) check() error {
return &ValidationError{Name: "description", err: fmt.Errorf(`ent: validator failed for field "Item.description": %w`, err)} return &ValidationError{Name: "description", err: fmt.Errorf(`ent: validator failed for field "Item.description": %w`, err)}
} }
} }
if v, ok := iu.mutation.ImportRef(); ok {
if err := item.ImportRefValidator(v); err != nil {
return &ValidationError{Name: "import_ref", err: fmt.Errorf(`ent: validator failed for field "Item.import_ref": %w`, err)}
}
}
if v, ok := iu.mutation.Notes(); ok { if v, ok := iu.mutation.Notes(); ok {
if err := item.NotesValidator(v); err != nil { if err := item.NotesValidator(v); err != nil {
return &ValidationError{Name: "notes", err: fmt.Errorf(`ent: validator failed for field "Item.notes": %w`, err)} return &ValidationError{Name: "notes", err: fmt.Errorf(`ent: validator failed for field "Item.notes": %w`, err)}
@ -773,6 +798,9 @@ func (iu *ItemUpdate) sqlSave(ctx context.Context) (n int, err error) {
if iu.mutation.DescriptionCleared() { if iu.mutation.DescriptionCleared() {
_spec.ClearField(item.FieldDescription, field.TypeString) _spec.ClearField(item.FieldDescription, field.TypeString)
} }
if value, ok := iu.mutation.ImportRef(); ok {
_spec.SetField(item.FieldImportRef, field.TypeString, value)
}
if iu.mutation.ImportRefCleared() { if iu.mutation.ImportRefCleared() {
_spec.ClearField(item.FieldImportRef, field.TypeString) _spec.ClearField(item.FieldImportRef, field.TypeString)
} }
@ -1302,6 +1330,26 @@ func (iuo *ItemUpdateOne) ClearDescription() *ItemUpdateOne {
return iuo return iuo
} }
// SetImportRef sets the "import_ref" field.
func (iuo *ItemUpdateOne) SetImportRef(s string) *ItemUpdateOne {
iuo.mutation.SetImportRef(s)
return iuo
}
// SetNillableImportRef sets the "import_ref" field if the given value is not nil.
func (iuo *ItemUpdateOne) SetNillableImportRef(s *string) *ItemUpdateOne {
if s != nil {
iuo.SetImportRef(*s)
}
return iuo
}
// ClearImportRef clears the value of the "import_ref" field.
func (iuo *ItemUpdateOne) ClearImportRef() *ItemUpdateOne {
iuo.mutation.ClearImportRef()
return iuo
}
// SetNotes sets the "notes" field. // SetNotes sets the "notes" field.
func (iuo *ItemUpdateOne) SetNotes(s string) *ItemUpdateOne { func (iuo *ItemUpdateOne) SetNotes(s string) *ItemUpdateOne {
iuo.mutation.SetNotes(s) iuo.mutation.SetNotes(s)
@ -1961,6 +2009,11 @@ func (iuo *ItemUpdateOne) check() error {
return &ValidationError{Name: "description", err: fmt.Errorf(`ent: validator failed for field "Item.description": %w`, err)} return &ValidationError{Name: "description", err: fmt.Errorf(`ent: validator failed for field "Item.description": %w`, err)}
} }
} }
if v, ok := iuo.mutation.ImportRef(); ok {
if err := item.ImportRefValidator(v); err != nil {
return &ValidationError{Name: "import_ref", err: fmt.Errorf(`ent: validator failed for field "Item.import_ref": %w`, err)}
}
}
if v, ok := iuo.mutation.Notes(); ok { if v, ok := iuo.mutation.Notes(); ok {
if err := item.NotesValidator(v); err != nil { if err := item.NotesValidator(v); err != nil {
return &ValidationError{Name: "notes", err: fmt.Errorf(`ent: validator failed for field "Item.notes": %w`, err)} return &ValidationError{Name: "notes", err: fmt.Errorf(`ent: validator failed for field "Item.notes": %w`, err)}
@ -2038,6 +2091,9 @@ func (iuo *ItemUpdateOne) sqlSave(ctx context.Context) (_node *Item, err error)
if iuo.mutation.DescriptionCleared() { if iuo.mutation.DescriptionCleared() {
_spec.ClearField(item.FieldDescription, field.TypeString) _spec.ClearField(item.FieldDescription, field.TypeString)
} }
if value, ok := iuo.mutation.ImportRef(); ok {
_spec.SetField(item.FieldImportRef, field.TypeString, value)
}
if iuo.mutation.ImportRefCleared() { if iuo.mutation.ImportRefCleared() {
_spec.ClearField(item.FieldImportRef, field.TypeString) _spec.ClearField(item.FieldImportRef, field.TypeString)
} }

View file

@ -38,8 +38,7 @@ func (Item) Fields() []ent.Field {
return []ent.Field{ return []ent.Field{
field.String("import_ref"). field.String("import_ref").
Optional(). Optional().
MaxLen(100). MaxLen(100),
Immutable(),
field.String("notes"). field.String("notes").
MaxLen(1000). MaxLen(1000).
Optional(), Optional(),

View file

@ -59,6 +59,7 @@ type (
LocationID uuid.UUID `json:"locationId"` LocationID uuid.UUID `json:"locationId"`
LabelIDs []uuid.UUID `json:"labelIds"` LabelIDs []uuid.UUID `json:"labelIds"`
} }
ItemUpdate struct { ItemUpdate struct {
ParentID uuid.UUID `json:"parentId" extensions:"x-nullable,x-omitempty"` ParentID uuid.UUID `json:"parentId" extensions:"x-nullable,x-omitempty"`
ID uuid.UUID `json:"id"` ID uuid.UUID `json:"id"`
@ -99,6 +100,12 @@ type (
Fields []ItemField `json:"fields"` Fields []ItemField `json:"fields"`
} }
ItemPatch struct {
ID uuid.UUID `json:"id"`
Quantity *int `json:"quantity,omitempty" extensions:"x-nullable,x-omitempty"`
ImportRef *string `json:"importRef,omitempty" extensions:"x-nullable,x-omitempty"`
}
ItemSummary struct { ItemSummary struct {
ImportRef string `json:"-"` ImportRef string `json:"-"`
ID uuid.UUID `json:"id"` ID uuid.UUID `json:"id"`
@ -168,6 +175,7 @@ func mapItemSummary(item *ent.Item) ItemSummary {
ID: item.ID, ID: item.ID,
Name: item.Name, Name: item.Name,
Description: item.Description, Description: item.Description,
ImportRef: item.ImportRef,
Quantity: item.Quantity, Quantity: item.Quantity,
CreatedAt: item.CreatedAt, CreatedAt: item.CreatedAt,
UpdatedAt: item.UpdatedAt, UpdatedAt: item.UpdatedAt,
@ -285,6 +293,10 @@ func (e *ItemsRepository) CheckRef(ctx context.Context, GID uuid.UUID, ref strin
return q.Where(item.ImportRef(ref)).Exist(ctx) return q.Where(item.ImportRef(ref)).Exist(ctx)
} }
func (e *ItemsRepository) GetByRef(ctx context.Context, GID uuid.UUID, ref string) (ItemOut, error) {
return e.getOne(ctx, item.ImportRef(ref), item.HasGroupWith(group.ID(GID)))
}
// GetOneByGroup returns a single item by ID. If the item does not exist, an error is returned. // GetOneByGroup returns a single item by ID. If the item does not exist, an error is returned.
// GetOneByGroup ensures that the item belongs to a specific group. // GetOneByGroup ensures that the item belongs to a specific group.
func (e *ItemsRepository) GetOneByGroup(ctx context.Context, gid, id uuid.UUID) (ItemOut, error) { func (e *ItemsRepository) GetOneByGroup(ctx context.Context, gid, id uuid.UUID) (ItemOut, error) {
@ -628,6 +640,44 @@ func (e *ItemsRepository) UpdateByGroup(ctx context.Context, GID uuid.UUID, data
return e.GetOne(ctx, data.ID) return e.GetOne(ctx, data.ID)
} }
func (e *ItemsRepository) GetAllZeroImportRef(ctx context.Context, GID uuid.UUID) ([]uuid.UUID, error) {
var ids []uuid.UUID
err := e.db.Item.Query().
Where(
item.HasGroupWith(group.ID(GID)),
item.Or(
item.ImportRefEQ(""),
item.ImportRefIsNil(),
),
).
Select(item.FieldID).
Scan(ctx, &ids)
if err != nil {
return nil, err
}
return ids, nil
}
func (e *ItemsRepository) Patch(ctx context.Context, GID, ID uuid.UUID, data ItemPatch) error {
q := e.db.Item.Update().
Where(
item.ID(ID),
item.HasGroupWith(group.ID(GID)),
)
if data.ImportRef != nil {
q.SetImportRef(*data.ImportRef)
}
if data.Quantity != nil {
q.SetQuantity(*data.Quantity)
}
return q.Exec(ctx)
}
func (e *ItemsRepository) GetAllCustomFieldValues(ctx context.Context, GID uuid.UUID, name string) ([]string, error) { func (e *ItemsRepository) GetAllCustomFieldValues(ctx context.Context, GID uuid.UUID, name string) ([]string, error) {
type st struct { type st struct {
Value string `json:"text_value"` Value string `json:"text_value"`

View file

@ -5,6 +5,27 @@
Import a CSV file containing your items, labels, and locations. See documentation for more information on the Import a CSV file containing your items, labels, and locations. See documentation for more information on the
required format. required format.
</p> </p>
<div class="alert alert-warning shadow-lg mt-4">
<div>
<svg
xmlns="http://www.w3.org/2000/svg"
class="stroke-current flex-shrink-0 h-6 w-6 mb-auto"
fill="none"
viewBox="0 0 24 24"
>
<path
stroke-linecap="round"
stroke-linejoin="round"
stroke-width="2"
d="M12 9v2m0 4h.01m-6.938 4h13.856c1.54 0 2.502-1.667 1.732-3L13.732 4c-.77-1.333-2.694-1.333-3.464 0L3.34 16c-.77 1.333.192 3 1.732 3z"
/>
</svg>
<span class="text-sm">
Behavior for imports with existing import_refs has changed. If an import_ref is present in the CSV file, the
item will be updated with the values in the CSV file.
</span>
</div>
</div>
<form @submit.prevent="submitCsvFile"> <form @submit.prevent="submitCsvFile">
<div class="flex flex-col gap-2 py-6"> <div class="flex flex-col gap-2 py-6">

View file

@ -13,4 +13,10 @@ export class ActionsAPI extends BaseAPI {
url: route("/actions/zero-item-time-fields"), url: route("/actions/zero-item-time-fields"),
}); });
} }
ensureImportRefs() {
return this.http.post<void, ActionAmountResult>({
url: route("/actions/ensure-import-refs"),
});
}
} }

View file

@ -68,6 +68,11 @@
current asset_id field in the database and applying the next value to each item that has an unset asset_id current asset_id field in the database and applying the next value to each item that has an unset asset_id
field. This is done in order of the created_at field. field. This is done in order of the created_at field.
</DetailAction> </DetailAction>
<DetailAction @action="ensureImportRefs">
<template #title>Ensures Import Refs</template>
Ensures that all items in your inventory have a valid import_ref field. This is done by randomly generating
a 8 character string for each item that has an unset import_ref field.
</DetailAction>
<DetailAction @click="resetItemDateTimes"> <DetailAction @click="resetItemDateTimes">
<template #title> Zero Item Date Times</template> <template #title> Zero Item Date Times</template>
Resets the time value for all date time fields in your inventory to the beginning of the date. This is to Resets the time value for all date time fields in your inventory to the beginning of the date. This is to
@ -131,6 +136,25 @@
notify.success(`${result.data.completed} assets have been updated.`); notify.success(`${result.data.completed} assets have been updated.`);
} }
async function ensureImportRefs() {
const { isCanceled } = await confirm.open(
"Are you sure you want to ensure all assets have an import_ref? This can take a while and cannot be undone."
);
if (isCanceled) {
return;
}
const result = await api.actions.ensureImportRefs();
if (result.error) {
notify.error("Failed to ensure import refs.");
return;
}
notify.success(`${result.data.completed} assets have been updated.`);
}
async function resetItemDateTimes() { async function resetItemDateTimes() {
const { isCanceled } = await confirm.open( const { isCanceled } = await confirm.open(
"Are you sure you want to reset all date and time values? This can take a while and cannot be undone." "Are you sure you want to reset all date and time values? This can take a while and cannot be undone."