diff --git a/backend/app/api/demo.go b/backend/app/api/demo.go index 538655d..fa83e13 100644 --- a/backend/app/api/demo.go +++ b/backend/app/api/demo.go @@ -2,7 +2,6 @@ package main import ( "context" - "encoding/csv" "strings" "github.com/hay-kot/homebox/backend/internal/core/services" @@ -10,7 +9,7 @@ import ( ) func (a *app) SetupDemo() { - csvText := `Import Ref,Location,Labels,Quantity,Name,Description,Insured,Serial Number,Model Number,Manufacturer,Notes,Purchase From,Purchased Price,Purchased Time,Lifetime Warranty,Warranty Expires,Warranty Details,Sold To,Sold Price,Sold Time,Sold Notes + csvText := `HB.import_ref,HB.location,HB.labels,HB.quantity,HB.name,HB.description,HB.insured,HB.serial_number,HB.model_number,HB.manufacturer,HB.notes,HB.purchase_from,HB.purchase_price,HB.purchase_time,HB.lifetime_warranty,HB.warranty_expires,HB.warranty_details,HB.sold_to,HB.sold_price,HB.sold_time,HB.sold_notes ,Garage,IOT;Home Assistant; Z-Wave,1,Zooz Universal Relay ZEN17,"Zooz 700 Series Z-Wave Universal Relay ZEN17 for Awnings, Garage Doors, Sprinklers, and More | 2 NO-C-NC Relays (20A, 10A) | Signal Repeater | Hub Required (Compatible with SmartThings and Hubitat)",,,ZEN17,Zooz,,Amazon,39.95,10/13/2021,,,,,,, ,Living Room,IOT;Home Assistant; Z-Wave,1,Zooz Motion Sensor,"Zooz Z-Wave Plus S2 Motion Sensor ZSE18 with Magnetic Mount, Works with Vera and SmartThings",,,ZSE18,Zooz,,Amazon,29.95,10/15/2021,,,,,,, ,Office,IOT;Home Assistant; Z-Wave,1,Zooz 110v Power Switch,"Zooz Z-Wave Plus Power Switch ZEN15 for 110V AC Units, Sump Pumps, Humidifiers, and More",,,ZEN15,Zooz,,Amazon,39.95,10/13/2021,,,,,,, @@ -19,13 +18,11 @@ func (a *app) SetupDemo() { ,Kitchen,IOT;Home Assistant; Z-Wave,1,Smart Rocker Light Dimmer,"UltraPro Z-Wave Smart Rocker Light Dimmer with QuickFit and SimpleWire, 3-Way Ready, Compatible with Alexa, Google Assistant, ZWave Hub Required, Repeater/Range Extender, White Paddle Only, 39351",,,‎39351,Honeywell,,Amazon,65.98,09/30/0202,,,,,,, ` - var ( - registration = services.UserRegistration{ - Email: "demo@example.com", - Name: "Demo", - Password: "demo", - } - ) + registration := services.UserRegistration{ + Email: "demo@example.com", + Name: "Demo", + Password: "demo", + } // First check if we've already setup a demo user and skip if so _, err := a.services.User.Login(context.Background(), registration.Email, registration.Password) @@ -42,17 +39,7 @@ func (a *app) SetupDemo() { token, _ := a.services.User.Login(context.Background(), registration.Email, registration.Password) self, _ := a.services.User.GetSelf(context.Background(), token.Raw) - // Read CSV Text - reader := csv.NewReader(strings.NewReader(csvText)) - reader.Comma = ',' - - records, err := reader.ReadAll() - if err != nil { - log.Err(err).Msg("Failed to read CSV") - log.Fatal().Msg("Failed to setup demo") - } - - _, err = a.services.Items.CsvImport(context.Background(), self.GroupID, records) + _, err = a.services.Items.CsvImport(context.Background(), self.GroupID, strings.NewReader(csvText)) if err != nil { log.Err(err).Msg("Failed to import CSV") log.Fatal().Msg("Failed to setup demo") diff --git a/backend/app/api/handlers/v1/v1_ctrl_actions.go b/backend/app/api/handlers/v1/v1_ctrl_actions.go index ea490c0..9f89ea6 100644 --- a/backend/app/api/handlers/v1/v1_ctrl_actions.go +++ b/backend/app/api/handlers/v1/v1_ctrl_actions.go @@ -1,8 +1,10 @@ package v1 import ( + "context" "net/http" + "github.com/google/uuid" "github.com/hay-kot/homebox/backend/internal/core/services" "github.com/hay-kot/homebox/backend/internal/sys/validate" "github.com/hay-kot/homebox/backend/pkgs/server" @@ -13,6 +15,20 @@ type ActionAmountResult struct { Completed int `json:"completed"` } +func actionHandlerFactory(ref string, fn func(context.Context, uuid.UUID) (int, error)) server.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) error { + ctx := services.NewContext(r.Context()) + + totalCompleted, err := fn(ctx, ctx.GID) + if err != nil { + log.Err(err).Str("action_ref", ref).Msg("failed to run action") + return validate.NewRequestError(err, http.StatusInternalServerError) + } + + return server.Respond(w, http.StatusOK, ActionAmountResult{Completed: totalCompleted}) + } +} + // HandleGroupInvitationsCreate godoc // @Summary Ensures all items in the database have an asset id // @Tags Group @@ -21,17 +37,18 @@ type ActionAmountResult struct { // @Router /v1/actions/ensure-asset-ids [Post] // @Security Bearer func (ctrl *V1Controller) HandleEnsureAssetID() server.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) error { - ctx := services.NewContext(r.Context()) + return actionHandlerFactory("ensure asset IDs", ctrl.svc.Items.EnsureAssetID) +} - totalCompleted, err := ctrl.svc.Items.EnsureAssetID(ctx, ctx.GID) - if err != nil { - log.Err(err).Msg("failed to ensure asset id") - return validate.NewRequestError(err, http.StatusInternalServerError) - } - - return server.Respond(w, http.StatusOK, ActionAmountResult{Completed: totalCompleted}) - } +// HandleEnsureImportRefs godoc +// @Summary Ensures all items in the database have an import ref +// @Tags Group +// @Produce json +// @Success 200 {object} ActionAmountResult +// @Router /v1/actions/ensure-import-refs [Post] +// @Security Bearer +func (ctrl *V1Controller) HandleEnsureImportRefs() server.HandlerFunc { + return actionHandlerFactory("ensure import refs", ctrl.svc.Items.EnsureImportRef) } // HandleItemDateZeroOut godoc @@ -42,15 +59,5 @@ func (ctrl *V1Controller) HandleEnsureAssetID() server.HandlerFunc { // @Router /v1/actions/zero-item-time-fields [Post] // @Security Bearer func (ctrl *V1Controller) HandleItemDateZeroOut() server.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) error { - ctx := services.NewContext(r.Context()) - - totalCompleted, err := ctrl.repo.Items.ZeroOutTimeFields(ctx, ctx.GID) - if err != nil { - log.Err(err).Msg("failed to ensure asset id") - return validate.NewRequestError(err, http.StatusInternalServerError) - } - - return server.Respond(w, http.StatusOK, ActionAmountResult{Completed: totalCompleted}) - } + return actionHandlerFactory("zero out date time", ctrl.repo.Items.ZeroOutTimeFields) } diff --git a/backend/app/api/handlers/v1/v1_ctrl_items.go b/backend/app/api/handlers/v1/v1_ctrl_items.go index b82d2f3..51c7fa3 100644 --- a/backend/app/api/handlers/v1/v1_ctrl_items.go +++ b/backend/app/api/handlers/v1/v1_ctrl_items.go @@ -2,6 +2,7 @@ package v1 import ( "database/sql" + "encoding/csv" "errors" "net/http" "strings" @@ -255,15 +256,9 @@ func (ctrl *V1Controller) HandleItemsImport() server.HandlerFunc { return validate.NewRequestError(err, http.StatusInternalServerError) } - data, err := services.ReadCsv(file) - if err != nil { - log.Err(err).Msg("failed to read csv") - return validate.NewRequestError(err, http.StatusInternalServerError) - } - user := services.UseUserCtx(r.Context()) - _, err = ctrl.svc.Items.CsvImport(r.Context(), user.GroupID, data) + _, err = ctrl.svc.Items.CsvImport(r.Context(), user.GroupID, file) if err != nil { log.Err(err).Msg("failed to import items") return validate.NewRequestError(err, http.StatusInternalServerError) @@ -272,3 +267,26 @@ func (ctrl *V1Controller) HandleItemsImport() server.HandlerFunc { return server.Respond(w, http.StatusNoContent, nil) } } + +// HandleItemsImport godocs +// @Summary exports items into the database +// @Tags Items +// @Success 200 {string} string "text/csv" +// @Router /v1/items/export [GET] +// @Security Bearer +func (ctrl *V1Controller) HandleItemsExport() server.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) error { + ctx := services.NewContext(r.Context()) + + csvData, err := ctrl.svc.Items.ExportTSV(r.Context(), ctx.GID) + if err != nil { + log.Err(err).Msg("failed to export items") + return validate.NewRequestError(err, http.StatusInternalServerError) + } + + w.Header().Set("Content-Type", "text/tsv") + w.Header().Set("Content-Disposition", "attachment;filename=homebox-items.tsv") + writer := csv.NewWriter(w) + return writer.WriteAll(csvData) + } +} diff --git a/backend/app/api/handlers/v1/v1_ctrl_reporting.go b/backend/app/api/handlers/v1/v1_ctrl_reporting.go index 09f2ae6..7792c1a 100644 --- a/backend/app/api/handlers/v1/v1_ctrl_reporting.go +++ b/backend/app/api/handlers/v1/v1_ctrl_reporting.go @@ -19,13 +19,13 @@ func (ctrl *V1Controller) HandleBillOfMaterialsExport() server.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) error { actor := services.UseUserCtx(r.Context()) - csv, err := ctrl.svc.Reporting.BillOfMaterialsTSV(r.Context(), actor.GroupID) + csv, err := ctrl.svc.Items.ExportBillOfMaterialsTSV(r.Context(), actor.GroupID) if err != nil { return err } - w.Header().Set("Content-Type", "text/csv") - w.Header().Set("Content-Disposition", "attachment; filename=bom.csv") + w.Header().Set("Content-Type", "text/tsv") + w.Header().Set("Content-Disposition", "attachment; filename=bill-of-materials.tsv") _, err = w.Write(csv) return err } diff --git a/backend/app/api/routes.go b/backend/app/api/routes.go index e995fa4..53083ee 100644 --- a/backend/app/api/routes.go +++ b/backend/app/api/routes.go @@ -89,6 +89,7 @@ func (a *app) mountRoutes(repos *repo.AllRepos) { a.server.Post(v1Base("/actions/ensure-asset-ids"), v1Ctrl.HandleEnsureAssetID(), userMW...) a.server.Post(v1Base("/actions/zero-item-time-fields"), v1Ctrl.HandleItemDateZeroOut(), userMW...) + a.server.Post(v1Base("/actions/ensure-import-refs"), v1Ctrl.HandleEnsureImportRefs(), userMW...) a.server.Get(v1Base("/locations"), v1Ctrl.HandleLocationGetAll(), userMW...) a.server.Post(v1Base("/locations"), v1Ctrl.HandleLocationCreate(), userMW...) @@ -106,6 +107,7 @@ func (a *app) mountRoutes(repos *repo.AllRepos) { a.server.Get(v1Base("/items"), v1Ctrl.HandleItemsGetAll(), userMW...) a.server.Post(v1Base("/items"), v1Ctrl.HandleItemsCreate(), userMW...) a.server.Post(v1Base("/items/import"), v1Ctrl.HandleItemsImport(), userMW...) + a.server.Get(v1Base("/items/export"), v1Ctrl.HandleItemsExport(), userMW...) a.server.Get(v1Base("/items/fields"), v1Ctrl.HandleGetAllCustomFieldNames(), userMW...) a.server.Get(v1Base("/items/fields/values"), v1Ctrl.HandleGetAllCustomFieldValues(), userMW...) diff --git a/backend/app/api/static/docs/docs.go b/backend/app/api/static/docs/docs.go index 8ceb1cf..77def9d 100644 --- a/backend/app/api/static/docs/docs.go +++ b/backend/app/api/static/docs/docs.go @@ -45,6 +45,30 @@ const docTemplate = `{ } } }, + "/v1/actions/ensure-import-refs": { + "post": { + "security": [ + { + "Bearer": [] + } + ], + "produces": [ + "application/json" + ], + "tags": [ + "Group" + ], + "summary": "Ensures all items in the database have an import ref", + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/v1.ActionAmountResult" + } + } + } + } + }, "/v1/actions/zero-item-time-fields": { "post": { "security": [ @@ -407,6 +431,27 @@ const docTemplate = `{ } } }, + "/v1/items/export": { + "get": { + "security": [ + { + "Bearer": [] + } + ], + "tags": [ + "Items" + ], + "summary": "exports items into the database", + "responses": { + "200": { + "description": "text/csv", + "schema": { + "type": "string" + } + } + } + } + }, "/v1/items/fields": { "get": { "security": [ diff --git a/backend/app/api/static/docs/swagger.json b/backend/app/api/static/docs/swagger.json index 3808fb5..08dbb4e 100644 --- a/backend/app/api/static/docs/swagger.json +++ b/backend/app/api/static/docs/swagger.json @@ -37,6 +37,30 @@ } } }, + "/v1/actions/ensure-import-refs": { + "post": { + "security": [ + { + "Bearer": [] + } + ], + "produces": [ + "application/json" + ], + "tags": [ + "Group" + ], + "summary": "Ensures all items in the database have an import ref", + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/v1.ActionAmountResult" + } + } + } + } + }, "/v1/actions/zero-item-time-fields": { "post": { "security": [ @@ -399,6 +423,27 @@ } } }, + "/v1/items/export": { + "get": { + "security": [ + { + "Bearer": [] + } + ], + "tags": [ + "Items" + ], + "summary": "exports items into the database", + "responses": { + "200": { + "description": "text/csv", + "schema": { + "type": "string" + } + } + } + } + }, "/v1/items/fields": { "get": { "security": [ diff --git a/backend/app/api/static/docs/swagger.yaml b/backend/app/api/static/docs/swagger.yaml index c461867..9c97791 100644 --- a/backend/app/api/static/docs/swagger.yaml +++ b/backend/app/api/static/docs/swagger.yaml @@ -650,6 +650,20 @@ paths: summary: Ensures all items in the database have an asset id tags: - Group + /v1/actions/ensure-import-refs: + post: + produces: + - application/json + responses: + "200": + description: OK + schema: + $ref: '#/definitions/v1.ActionAmountResult' + security: + - Bearer: [] + summary: Ensures all items in the database have an import ref + tags: + - Group /v1/actions/zero-item-time-fields: post: produces: @@ -1109,6 +1123,18 @@ paths: summary: Update Maintenance Entry tags: - Maintenance + /v1/items/export: + get: + responses: + "200": + description: text/csv + schema: + type: string + security: + - Bearer: [] + summary: exports items into the database + tags: + - Items /v1/items/fields: get: produces: diff --git a/backend/go.sum b/backend/go.sum index eb146f2..5e10593 100644 --- a/backend/go.sum +++ b/backend/go.sum @@ -76,13 +76,11 @@ github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27k github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= github.com/mattn/go-isatty v0.0.17 h1:BTarxUcIeDqL27Mc+vyvdWYSL28zpIhv3RoTdsLMPng= github.com/mattn/go-isatty v0.0.17/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= -github.com/mattn/go-runewidth v0.0.9 h1:Lm995f3rfxdpd6TSmuVCHVb/QhupuXlYr8sCI/QdE+0= github.com/mattn/go-sqlite3 v1.14.16 h1:yOQRA0RpS5PFz/oikGwBEqvAWhWg5ufRz4ETLjwpU1Y= github.com/mattn/go-sqlite3 v1.14.16/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg= github.com/mitchellh/go-wordwrap v1.0.1 h1:TLuKupo69TCn6TQSyGxwI1EblZZEsQ0vMlAFQflz0v0= github.com/mitchellh/go-wordwrap v1.0.1/go.mod h1:R62XHJLzvMFRBbcrT7m7WgmE1eOyTSsCt+hzestvNj0= github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= -github.com/olekukonko/tablewriter v0.0.5 h1:P2Ga83D34wi1o9J6Wh1mRuqd4mF/x/lgBS7N7AbDhec= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= @@ -92,8 +90,6 @@ github.com/rs/xid v1.4.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg= github.com/rs/zerolog v1.29.0 h1:Zes4hju04hjbvkVkOhdl2HpZa+0PmVwigmo8XoORE5w= github.com/rs/zerolog v1.29.0/go.mod h1:NILgTygv/Uej1ra5XxGf82ZFSLk58MFGAUS2o6usyD0= github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ= -github.com/spf13/cobra v1.6.1 h1:o94oiPyS4KD1mPy2fmcYYHHfCxLqYjJOhGsCHFZtEzA= -github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= diff --git a/backend/internal/core/services/all.go b/backend/internal/core/services/all.go index 2997095..dab59ef 100644 --- a/backend/internal/core/services/all.go +++ b/backend/internal/core/services/all.go @@ -1,16 +1,13 @@ package services import ( - "github.com/hay-kot/homebox/backend/internal/core/services/reporting" "github.com/hay-kot/homebox/backend/internal/data/repo" - "github.com/rs/zerolog/log" ) type AllServices struct { - User *UserService - Group *GroupService - Items *ItemService - Reporting *reporting.ReportingService + User *UserService + Group *GroupService + Items *ItemService } type OptionsFunc func(*options) @@ -45,7 +42,5 @@ func New(repos *repo.AllRepos, opts ...OptionsFunc) *AllServices { repo: repos, autoIncrementAssetID: options.autoIncrementAssetID, }, - // TODO: don't use global logger - Reporting: reporting.NewReportingService(repos, &log.Logger), } } diff --git a/backend/internal/core/services/.testdata/import.csv b/backend/internal/core/services/reporting/.testdata/import.csv similarity index 100% rename from backend/internal/core/services/.testdata/import.csv rename to backend/internal/core/services/reporting/.testdata/import.csv diff --git a/backend/internal/core/services/.testdata/import.tsv b/backend/internal/core/services/reporting/.testdata/import.tsv similarity index 100% rename from backend/internal/core/services/.testdata/import.tsv rename to backend/internal/core/services/reporting/.testdata/import.tsv diff --git a/backend/internal/core/services/reporting/.testdata/import/fields.csv b/backend/internal/core/services/reporting/.testdata/import/fields.csv new file mode 100644 index 0000000..28c3c17 --- /dev/null +++ b/backend/internal/core/services/reporting/.testdata/import/fields.csv @@ -0,0 +1,5 @@ +HB.location,HB.name,HB.quantity,HB.description,HB.field.Custom Field 1,HB.field.Custom Field 2,HB.field.Custom Field 3 +loc,Item 1,1,Description 1,Value 1[1],Value 1[2],Value 1[3] +loc,Item 2,2,Description 2,Value 2[1],Value 2[2],Value 2[3] +loc,Item 3,3,Description 3,Value 3[1],Value 3[2],Value 3[3] + diff --git a/backend/internal/core/services/reporting/.testdata/import/minimal.csv b/backend/internal/core/services/reporting/.testdata/import/minimal.csv new file mode 100644 index 0000000..be39ad2 --- /dev/null +++ b/backend/internal/core/services/reporting/.testdata/import/minimal.csv @@ -0,0 +1,4 @@ +HB.location,HB.name,HB.quantity,HB.description +loc,Item 1,1,Description 1 +loc,Item 2,2,Description 2 +loc,Item 3,3,Description 3 \ No newline at end of file diff --git a/backend/internal/core/services/reporting/.testdata/import/types.csv b/backend/internal/core/services/reporting/.testdata/import/types.csv new file mode 100644 index 0000000..96ff236 --- /dev/null +++ b/backend/internal/core/services/reporting/.testdata/import/types.csv @@ -0,0 +1,4 @@ +HB.name,HB.asset_id,HB.location,HB.labels +Item 1,1,Path / To / Location 1,L1 ; L2 ; L3 +Item 2,000-002,Path /To/ Location 2,L1;L2;L3 +Item 3,1000-003,Path / To /Location 3 , L1;L2; L3 \ No newline at end of file diff --git a/backend/internal/core/services/reporting/bill_of_materials.go b/backend/internal/core/services/reporting/bill_of_materials.go new file mode 100644 index 0000000..4147d4b --- /dev/null +++ b/backend/internal/core/services/reporting/bill_of_materials.go @@ -0,0 +1,42 @@ +package reporting + +import ( + "github.com/gocarina/gocsv" + "github.com/hay-kot/homebox/backend/internal/data/repo" + "github.com/hay-kot/homebox/backend/internal/data/types" +) + +// ================================================================================================= + +type BillOfMaterialsEntry struct { + PurchaseDate types.Date `csv:"Purchase Date"` + Name string `csv:"Name"` + Description string `csv:"Description"` + Manufacturer string `csv:"Manufacturer"` + SerialNumber string `csv:"Serial Number"` + ModelNumber string `csv:"Model Number"` + Quantity int `csv:"Quantity"` + Price float64 `csv:"Price"` + TotalPrice float64 `csv:"Total Price"` +} + +// BillOfMaterialsTSV returns a byte slice of the Bill of Materials for a given GID in TSV format +// See BillOfMaterialsEntry for the format of the output +func BillOfMaterialsTSV(entities []repo.ItemOut) ([]byte, error) { + bomEntries := make([]BillOfMaterialsEntry, len(entities)) + for i, entity := range entities { + bomEntries[i] = BillOfMaterialsEntry{ + PurchaseDate: entity.PurchaseTime, + Name: entity.Name, + Description: entity.Description, + Manufacturer: entity.Manufacturer, + SerialNumber: entity.SerialNumber, + ModelNumber: entity.ModelNumber, + Quantity: entity.Quantity, + Price: entity.PurchasePrice, + TotalPrice: entity.PurchasePrice * float64(entity.Quantity), + } + } + + return gocsv.MarshalBytes(&bomEntries) +} diff --git a/backend/internal/core/services/reporting/import.go b/backend/internal/core/services/reporting/import.go new file mode 100644 index 0000000..b608e62 --- /dev/null +++ b/backend/internal/core/services/reporting/import.go @@ -0,0 +1,93 @@ +package reporting + +import ( + "bytes" + "encoding/csv" + "errors" + "io" + "strings" +) + +var ( + ErrNoHomeboxHeaders = errors.New("no headers found") + ErrMissingRequiredHeaders = errors.New("missing required headers `HB.location` or `HB.name`") +) + +// determineSeparator determines the separator used in the CSV file +// It returns the separator as a rune and an error if it could not be determined +// +// It is assumed that the first row is the header row and that the separator is the same +// for all rows. +// +// Supported separators are `,` and `\t` +func determineSeparator(data []byte) (rune, error) { + // First row + firstRow := bytes.Split(data, []byte("\n"))[0] + + // find first comma or /t + comma := bytes.IndexByte(firstRow, ',') + tab := bytes.IndexByte(firstRow, '\t') + + switch { + case comma == -1 && tab == -1: + return 0, errors.New("could not determine separator") + case tab > comma: + return '\t', nil + default: + return ',', nil + } +} + +// readRawCsv reads a CSV file and returns the raw data as a 2D string array +// It determines the separator used in the CSV file and returns an error if +// it could not be determined +func readRawCsv(r io.Reader) ([][]string, error) { + data, err := io.ReadAll(r) + if err != nil { + return nil, err + } + + reader := csv.NewReader(bytes.NewReader(data)) + + // Determine separator + sep, err := determineSeparator(data) + if err != nil { + return nil, err + } + + reader.Comma = sep + + return reader.ReadAll() +} + +// parseHeaders parses the homebox headers from the CSV file and returns a map of the headers +// and their column index as well as a list of the field headers (HB.field.*) in the order +// they appear in the CSV file +// +// It returns an error if no homebox headers are found +func parseHeaders(headers []string) (hbHeaders map[string]int, fieldHeaders []string, err error) { + hbHeaders = map[string]int{} // initialize map + + for col, h := range headers { + if strings.HasPrefix(h, "HB.field.") { + fieldHeaders = append(fieldHeaders, h) + } + + if strings.HasPrefix(h, "HB.") { + hbHeaders[h] = col + } + } + + required := []string{"HB.location", "HB.name"} + for _, h := range required { + if _, ok := hbHeaders[h]; !ok { + return nil, nil, ErrMissingRequiredHeaders + } + } + + if len(hbHeaders) == 0 { + return nil, nil, ErrNoHomeboxHeaders + } + + return hbHeaders, fieldHeaders, nil +} diff --git a/backend/internal/core/services/reporting/io_row.go b/backend/internal/core/services/reporting/io_row.go new file mode 100644 index 0000000..faa5d25 --- /dev/null +++ b/backend/internal/core/services/reporting/io_row.go @@ -0,0 +1,85 @@ +package reporting + +import ( + "strings" + + "github.com/hay-kot/homebox/backend/internal/data/repo" + "github.com/hay-kot/homebox/backend/internal/data/types" +) + +type ExportItemFields struct { + Name string + Value string +} + +type ExportTSVRow struct { + ImportRef string `csv:"HB.import_ref"` + Location LocationString `csv:"HB.location"` + LabelStr LabelString `csv:"HB.labels"` + AssetID repo.AssetID `csv:"HB.asset_id"` + Archived bool `csv:"HB.archived"` + + Name string `csv:"HB.name"` + Quantity int `csv:"HB.quantity"` + Description string `csv:"HB.description"` + Insured bool `csv:"HB.insured"` + Notes string `csv:"HB.notes"` + + PurchasePrice float64 `csv:"HB.purchase_price"` + PurchaseFrom string `csv:"HB.purchase_from"` + PurchaseTime types.Date `csv:"HB.purchase_time"` + + Manufacturer string `csv:"HB.manufacturer"` + ModelNumber string `csv:"HB.model_number"` + SerialNumber string `csv:"HB.serial_number"` + + LifetimeWarranty bool `csv:"HB.lifetime_warranty"` + WarrantyExpires types.Date `csv:"HB.warranty_expires"` + WarrantyDetails string `csv:"HB.warranty_details"` + + SoldTo string `csv:"HB.sold_to"` + SoldPrice float64 `csv:"HB.sold_price"` + SoldTime types.Date `csv:"HB.sold_time"` + SoldNotes string `csv:"HB.sold_notes"` + + Fields []ExportItemFields `csv:"-"` +} + +// ============================================================================ + +// LabelString is a string slice that is used to represent a list of labels. +// +// For example, a list of labels "Important; Work" would be represented as a +// LabelString with the following values: +// +// LabelString{"Important", "Work"} +type LabelString []string + +func parseLabelString(s string) LabelString { + v, _ := parseSeparatedString(s, ";") + return v +} + +func (ls LabelString) String() string { + return strings.Join(ls, "; ") +} + +// ============================================================================ + +// LocationString is a string slice that is used to represent a location +// hierarchy. +// +// For example, a location hierarchy of "Home / Bedroom / Desk" would be +// represented as a LocationString with the following values: +// +// LocationString{"Home", "Bedroom", "Desk"} +type LocationString []string + +func parseLocationString(s string) LocationString { + v, _ := parseSeparatedString(s, "/") + return v +} + +func (csf LocationString) String() string { + return strings.Join(csf, " / ") +} diff --git a/backend/internal/core/services/reporting/io_sheet.go b/backend/internal/core/services/reporting/io_sheet.go new file mode 100644 index 0000000..88d1d36 --- /dev/null +++ b/backend/internal/core/services/reporting/io_sheet.go @@ -0,0 +1,310 @@ +package reporting + +import ( + "fmt" + "io" + "reflect" + "sort" + "strconv" + "strings" + + "github.com/hay-kot/homebox/backend/internal/data/repo" + "github.com/hay-kot/homebox/backend/internal/data/types" + "github.com/rs/zerolog/log" +) + +// IOSheet is the representation of a CSV/TSV sheet that is used for importing/exporting +// items from homebox. It is used to read/write the data from/to a CSV/TSV file given +// the standard format of the file. +// +// See ExportTSVRow for the format of the data in the sheet. +type IOSheet struct { + headers []string + custom []int + index map[string]int + Rows []ExportTSVRow +} + +func (s *IOSheet) indexHeaders() { + s.index = make(map[string]int) + + for i, h := range s.headers { + if strings.HasPrefix(h, "HB.field") { + s.custom = append(s.custom, i) + } + + if strings.HasPrefix(h, "HB.") { + s.index[h] = i + } + } +} + +func (s *IOSheet) GetColumn(str string) (col int, ok bool) { + if s.index == nil { + s.indexHeaders() + } + + col, ok = s.index[str] + return +} + +// Read reads a CSV/TSV and populates the "Rows" field with the data from the sheet +// Custom Fields are supported via the `HB.field.*` headers. The `HB.field.*` the "Name" +// of the field is the part after the `HB.field.` prefix. Additionally, Custom Fields with +// no value are excluded from the row.Fields slice, this includes empty strings. +// +// Note That +// - the first row is assumed to be the header +// - at least 1 row of data is required +// - rows and columns must be rectangular (i.e. all rows must have the same number of columns) +func (s *IOSheet) Read(data io.Reader) error { + sheet, err := readRawCsv(data) + if err != nil { + return err + } + + if len(sheet) < 2 { + return fmt.Errorf("sheet must have at least 1 row of data (header + 1)") + } + + s.headers = sheet[0] + s.Rows = make([]ExportTSVRow, len(sheet)-1) + + for i, row := range sheet[1:] { + if len(row) != len(s.headers) { + return fmt.Errorf("row has %d columns, expected %d", len(row), len(s.headers)) + } + + rowData := ExportTSVRow{} + + st := reflect.TypeOf(ExportTSVRow{}) + + for i := 0; i < st.NumField(); i++ { + field := st.Field(i) + tag := field.Tag.Get("csv") + if tag == "" || tag == "-" { + continue + } + + col, ok := s.GetColumn(tag) + if !ok { + continue + } + + val := row[col] + + var v interface{} + + switch field.Type { + case reflect.TypeOf(""): + v = val + case reflect.TypeOf(int(0)): + v = parseInt(val) + case reflect.TypeOf(bool(false)): + v = parseBool(val) + case reflect.TypeOf(float64(0)): + v = parseFloat(val) + + // Custom Types + case reflect.TypeOf(types.Date{}): + v = types.DateFromString(val) + case reflect.TypeOf(repo.AssetID(0)): + v, _ = repo.ParseAssetID(val) + case reflect.TypeOf(LocationString{}): + v = parseLocationString(val) + case reflect.TypeOf(LabelString{}): + v = parseLabelString(val) + } + + log.Debug(). + Str("tag", tag). + Interface("val", v). + Str("type", fmt.Sprintf("%T", v)). + Msg("parsed value") + + // Nil values are not allowed at the moment. This may change. + if v == nil { + return fmt.Errorf("could not convert %q to %s", val, field.Type) + } + + ptrField := reflect.ValueOf(&rowData).Elem().Field(i) + ptrField.Set(reflect.ValueOf(v)) + } + + for _, col := range s.custom { + colName := strings.TrimPrefix(s.headers[col], "HB.field.") + customVal := row[col] + if customVal == "" { + continue + } + + rowData.Fields = append(rowData.Fields, ExportItemFields{ + Name: colName, + Value: customVal, + }) + } + + s.Rows[i] = rowData + } + + return nil +} + +// Write writes the sheet to a writer. +func (s *IOSheet) ReadItems(items []repo.ItemOut) { + s.Rows = make([]ExportTSVRow, len(items)) + + extraHeaders := map[string]struct{}{} + + for i := range items { + item := items[i] + + // TODO: Support fetching nested locations + locString := LocationString{item.Location.Name} + + labelString := make([]string, len(item.Labels)) + + for i, l := range item.Labels { + labelString[i] = l.Name + } + + customFields := make([]ExportItemFields, len(item.Fields)) + + for i, f := range item.Fields { + extraHeaders[f.Name] = struct{}{} + + customFields[i] = ExportItemFields{ + Name: f.Name, + Value: f.TextValue, + } + } + + s.Rows[i] = ExportTSVRow{ + // fill struct + Location: locString, + LabelStr: labelString, + + ImportRef: item.ImportRef, + AssetID: item.AssetID, + Name: item.Name, + Quantity: item.Quantity, + Description: item.Description, + Insured: item.Insured, + Archived: item.Archived, + + PurchasePrice: item.PurchasePrice, + PurchaseFrom: item.PurchaseFrom, + PurchaseTime: item.PurchaseTime, + + Manufacturer: item.Manufacturer, + ModelNumber: item.ModelNumber, + SerialNumber: item.SerialNumber, + + LifetimeWarranty: item.LifetimeWarranty, + WarrantyExpires: item.WarrantyExpires, + WarrantyDetails: item.WarrantyDetails, + + SoldTo: item.SoldTo, + SoldTime: item.SoldTime, + SoldPrice: item.SoldPrice, + SoldNotes: item.SoldNotes, + + Fields: customFields, + } + } + + // Extract and sort additional headers for deterministic output + customHeaders := make([]string, 0, len(extraHeaders)) + + for k := range extraHeaders { + customHeaders = append(customHeaders, k) + } + + sort.Strings(customHeaders) + + st := reflect.TypeOf(ExportTSVRow{}) + + // Write headers + for i := 0; i < st.NumField(); i++ { + field := st.Field(i) + tag := field.Tag.Get("csv") + if tag == "" || tag == "-" { + continue + } + + s.headers = append(s.headers, tag) + } + + for _, h := range customHeaders { + s.headers = append(s.headers, "HB.field."+h) + } +} + +// Writes the current sheet to a writer in TSV format. +func (s *IOSheet) TSV() ([][]string, error) { + memcsv := make([][]string, len(s.Rows)+1) + + memcsv[0] = s.headers + + // use struct tags in rows to dertmine column order + for i, row := range s.Rows { + rowIdx := i + 1 + + memcsv[rowIdx] = make([]string, len(s.headers)) + + st := reflect.TypeOf(row) + + for i := 0; i < st.NumField(); i++ { + field := st.Field(i) + tag := field.Tag.Get("csv") + if tag == "" || tag == "-" { + continue + } + + col, ok := s.GetColumn(tag) + if !ok { + continue + } + + val := reflect.ValueOf(row).Field(i) + + var v string + + switch field.Type { + case reflect.TypeOf(""): + v = val.String() + case reflect.TypeOf(int(0)): + v = strconv.Itoa(int(val.Int())) + case reflect.TypeOf(bool(false)): + v = strconv.FormatBool(val.Bool()) + case reflect.TypeOf(float64(0)): + v = strconv.FormatFloat(val.Float(), 'f', -1, 64) + + // Custom Types + case reflect.TypeOf(types.Date{}): + v = val.Interface().(types.Date).String() + case reflect.TypeOf(repo.AssetID(0)): + v = val.Interface().(repo.AssetID).String() + case reflect.TypeOf(LocationString{}): + v = val.Interface().(LocationString).String() + case reflect.TypeOf(LabelString{}): + v = val.Interface().(LabelString).String() + default: + log.Debug().Str("type", field.Type.String()).Msg("unknown type") + } + + memcsv[rowIdx][col] = v + } + + for _, f := range row.Fields { + col, ok := s.GetColumn("HB.field." + f.Name) + if !ok { + continue + } + + memcsv[i+1][col] = f.Value + } + } + + return memcsv, nil +} diff --git a/backend/internal/core/services/reporting/io_sheet_test.go b/backend/internal/core/services/reporting/io_sheet_test.go new file mode 100644 index 0000000..9d7f9a0 --- /dev/null +++ b/backend/internal/core/services/reporting/io_sheet_test.go @@ -0,0 +1,226 @@ +package reporting + +import ( + "bytes" + "reflect" + "testing" + + _ "embed" + + "github.com/hay-kot/homebox/backend/internal/data/repo" + "github.com/stretchr/testify/assert" +) + +var ( + //go:embed .testdata/import/minimal.csv + minimalImportCSV []byte + + //go:embed .testdata/import/fields.csv + customFieldImportCSV []byte + + //go:embed .testdata/import/types.csv + customTypesImportCSV []byte + + //go:embed .testdata/import.csv + CSVData_Comma []byte + + //go:embed .testdata/import.tsv + CSVData_Tab []byte +) + +func TestSheet_Read(t *testing.T) { + tests := []struct { + name string + data []byte + want []ExportTSVRow + wantErr bool + }{ + { + name: "minimal import", + data: minimalImportCSV, + want: []ExportTSVRow{ + {Location: LocationString{"loc"}, Name: "Item 1", Quantity: 1, Description: "Description 1"}, + {Location: LocationString{"loc"}, Name: "Item 2", Quantity: 2, Description: "Description 2"}, + {Location: LocationString{"loc"}, Name: "Item 3", Quantity: 3, Description: "Description 3"}, + }, + }, + { + name: "custom field import", + data: customFieldImportCSV, + want: []ExportTSVRow{ + { + Location: LocationString{"loc"}, Name: "Item 1", Quantity: 1, Description: "Description 1", + Fields: []ExportItemFields{ + {Name: "Custom Field 1", Value: "Value 1[1]"}, + {Name: "Custom Field 2", Value: "Value 1[2]"}, + {Name: "Custom Field 3", Value: "Value 1[3]"}, + }, + }, + { + Location: LocationString{"loc"}, Name: "Item 2", Quantity: 2, Description: "Description 2", + Fields: []ExportItemFields{ + {Name: "Custom Field 1", Value: "Value 2[1]"}, + {Name: "Custom Field 2", Value: "Value 2[2]"}, + {Name: "Custom Field 3", Value: "Value 2[3]"}, + }, + }, + { + Location: LocationString{"loc"}, Name: "Item 3", Quantity: 3, Description: "Description 3", + Fields: []ExportItemFields{ + {Name: "Custom Field 1", Value: "Value 3[1]"}, + {Name: "Custom Field 2", Value: "Value 3[2]"}, + {Name: "Custom Field 3", Value: "Value 3[3]"}, + }, + }, + }, + }, + { + name: "custom types import", + data: customTypesImportCSV, + want: []ExportTSVRow{ + { + Name: "Item 1", + AssetID: repo.AssetID(1), + Location: LocationString{"Path", "To", "Location 1"}, + LabelStr: LabelString{"L1", "L2", "L3"}, + }, + { + Name: "Item 2", + AssetID: repo.AssetID(2), + Location: LocationString{"Path", "To", "Location 2"}, + LabelStr: LabelString{"L1", "L2", "L3"}, + }, + { + Name: "Item 3", + AssetID: repo.AssetID(1000003), + Location: LocationString{"Path", "To", "Location 3"}, + LabelStr: LabelString{"L1", "L2", "L3"}, + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + reader := bytes.NewReader(tt.data) + + sheet := &IOSheet{} + err := sheet.Read(reader) + + switch { + case tt.wantErr: + assert.Error(t, err) + default: + assert.NoError(t, err) + assert.ElementsMatch(t, tt.want, sheet.Rows) + } + }) + } +} + +func Test_parseHeaders(t *testing.T) { + tests := []struct { + name string + rawHeaders []string + wantHbHeaders map[string]int + wantFieldHeaders []string + wantErr bool + }{ + { + name: "no hombox headers", + rawHeaders: []string{"Header 1", "Header 2", "Header 3"}, + wantHbHeaders: nil, + wantFieldHeaders: nil, + wantErr: true, + }, + { + name: "field headers only", + rawHeaders: []string{"HB.location", "HB.name", "HB.field.1", "HB.field.2", "HB.field.3"}, + wantHbHeaders: map[string]int{ + "HB.location": 0, + "HB.name": 1, + "HB.field.1": 2, + "HB.field.2": 3, + "HB.field.3": 4, + }, + wantFieldHeaders: []string{"HB.field.1", "HB.field.2", "HB.field.3"}, + wantErr: false, + }, + { + name: "mixed headers", + rawHeaders: []string{"Header 1", "HB.name", "Header 2", "HB.field.2", "Header 3", "HB.field.3", "HB.location"}, + wantHbHeaders: map[string]int{ + "HB.name": 1, + "HB.field.2": 3, + "HB.field.3": 5, + "HB.location": 6, + }, + wantFieldHeaders: []string{"HB.field.2", "HB.field.3"}, + wantErr: false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + gotHbHeaders, gotFieldHeaders, err := parseHeaders(tt.rawHeaders) + if (err != nil) != tt.wantErr { + t.Errorf("parseHeaders() error = %v, wantErr %v", err, tt.wantErr) + return + } + if !reflect.DeepEqual(gotHbHeaders, tt.wantHbHeaders) { + t.Errorf("parseHeaders() gotHbHeaders = %v, want %v", gotHbHeaders, tt.wantHbHeaders) + } + if !reflect.DeepEqual(gotFieldHeaders, tt.wantFieldHeaders) { + t.Errorf("parseHeaders() gotFieldHeaders = %v, want %v", gotFieldHeaders, tt.wantFieldHeaders) + } + }) + } +} + +func Test_determineSeparator(t *testing.T) { + type args struct { + data []byte + } + tests := []struct { + name string + args args + want rune + wantErr bool + }{ + { + name: "comma", + args: args{ + data: CSVData_Comma, + }, + want: ',', + wantErr: false, + }, + { + name: "tab", + args: args{ + data: CSVData_Tab, + }, + want: '\t', + wantErr: false, + }, + { + name: "invalid", + args: args{ + data: []byte("a;b;c"), + }, + want: 0, + wantErr: true, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := determineSeparator(tt.args.data) + if (err != nil) != tt.wantErr { + t.Errorf("determineSeparator() error = %v, wantErr %v", err, tt.wantErr) + return + } + if got != tt.want { + t.Errorf("determineSeparator() = %v, want %v", got, tt.want) + } + }) + } +} diff --git a/backend/internal/core/services/reporting/reporting.go b/backend/internal/core/services/reporting/reporting.go deleted file mode 100644 index 4ba408b..0000000 --- a/backend/internal/core/services/reporting/reporting.go +++ /dev/null @@ -1,85 +0,0 @@ -package reporting - -import ( - "context" - "encoding/csv" - "io" - "time" - - "github.com/gocarina/gocsv" - "github.com/google/uuid" - "github.com/hay-kot/homebox/backend/internal/data/repo" - "github.com/rs/zerolog" -) - -type ReportingService struct { - repos *repo.AllRepos - l *zerolog.Logger -} - -func NewReportingService(repos *repo.AllRepos, l *zerolog.Logger) *ReportingService { - gocsv.SetCSVWriter(func(out io.Writer) *gocsv.SafeCSVWriter { - writer := csv.NewWriter(out) - writer.Comma = '\t' - return gocsv.NewSafeCSVWriter(writer) - }) - - return &ReportingService{ - repos: repos, - l: l, - } -} - -// ================================================================================================= - -// NullableTime is a custom type that implements the MarshalCSV interface -// to allow for nullable time.Time fields in the CSV output to be empty -// and not "0001-01-01". It also overrides the default CSV output format -type NullableTime time.Time - -func (t NullableTime) MarshalCSV() (string, error) { - if time.Time(t).IsZero() { - return "", nil - } - // YYYY-MM-DD - return time.Time(t).Format("2006-01-02"), nil -} - -type BillOfMaterialsEntry struct { - PurchaseDate NullableTime `csv:"Purchase Date"` - Name string `csv:"Name"` - Description string `csv:"Description"` - Manufacturer string `csv:"Manufacturer"` - SerialNumber string `csv:"Serial Number"` - ModelNumber string `csv:"Model Number"` - Quantity int `csv:"Quantity"` - Price float64 `csv:"Price"` - TotalPrice float64 `csv:"Total Price"` -} - -// BillOfMaterialsTSV returns a byte slice of the Bill of Materials for a given GID in TSV format -// See BillOfMaterialsEntry for the format of the output -func (rs *ReportingService) BillOfMaterialsTSV(ctx context.Context, GID uuid.UUID) ([]byte, error) { - entities, err := rs.repos.Items.GetAll(ctx, GID) - if err != nil { - rs.l.Debug().Err(err).Msg("failed to get all items for BOM Csv Reporting") - return nil, err - } - - bomEntries := make([]BillOfMaterialsEntry, len(entities)) - for i, entity := range entities { - bomEntries[i] = BillOfMaterialsEntry{ - PurchaseDate: NullableTime(entity.PurchaseTime), - Name: entity.Name, - Description: entity.Description, - Manufacturer: entity.Manufacturer, - SerialNumber: entity.SerialNumber, - ModelNumber: entity.ModelNumber, - Quantity: entity.Quantity, - Price: entity.PurchasePrice, - TotalPrice: entity.PurchasePrice * float64(entity.Quantity), - } - } - - return gocsv.MarshalBytes(&bomEntries) -} diff --git a/backend/internal/core/services/reporting/value_parsers.go b/backend/internal/core/services/reporting/value_parsers.go new file mode 100644 index 0000000..7410396 --- /dev/null +++ b/backend/internal/core/services/reporting/value_parsers.go @@ -0,0 +1,38 @@ +package reporting + +import ( + "strconv" + "strings" +) + +func parseSeparatedString(s string, sep string) ([]string, error) { + list := strings.Split(s, sep) + + csf := make([]string, 0, len(list)) + for _, s := range list { + trimmed := strings.TrimSpace(s) + if trimmed != "" { + csf = append(csf, trimmed) + } + } + + return csf, nil +} + +func parseFloat(s string) float64 { + if s == "" { + return 0 + } + f, _ := strconv.ParseFloat(s, 64) + return f +} + +func parseBool(s string) bool { + b, _ := strconv.ParseBool(s) + return b +} + +func parseInt(s string) int { + i, _ := strconv.Atoi(s) + return i +} diff --git a/backend/internal/core/services/reporting/value_parsers_test.go b/backend/internal/core/services/reporting/value_parsers_test.go new file mode 100644 index 0000000..bcd7431 --- /dev/null +++ b/backend/internal/core/services/reporting/value_parsers_test.go @@ -0,0 +1,65 @@ +package reporting + +import ( + "reflect" + "testing" +) + +func Test_parseSeparatedString(t *testing.T) { + type args struct { + s string + sep string + } + tests := []struct { + name string + args args + want []string + wantErr bool + }{ + { + name: "comma", + args: args{ + s: "a,b,c", + sep: ",", + }, + want: []string{"a", "b", "c"}, + wantErr: false, + }, + { + name: "trimmed comma", + args: args{ + s: "a, b, c", + sep: ",", + }, + want: []string{"a", "b", "c"}, + }, + { + name: "excessive whitespace", + args: args{ + s: " a, b, c ", + sep: ",", + }, + want: []string{"a", "b", "c"}, + }, + { + name: "empty", + args: args{ + s: "", + sep: ",", + }, + want: []string{}, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := parseSeparatedString(tt.args.s, tt.args.sep) + if (err != nil) != tt.wantErr { + t.Errorf("parseSeparatedString() error = %v, wantErr %v", err, tt.wantErr) + return + } + if !reflect.DeepEqual(got, tt.want) { + t.Errorf("parseSeparatedString() = %v, want %v", got, tt.want) + } + }) + } +} diff --git a/backend/internal/core/services/service_items.go b/backend/internal/core/services/service_items.go index df37e67..c1a37d5 100644 --- a/backend/internal/core/services/service_items.go +++ b/backend/internal/core/services/service_items.go @@ -3,10 +3,13 @@ package services import ( "context" "errors" + "fmt" + "io" + "strings" "github.com/google/uuid" + "github.com/hay-kot/homebox/backend/internal/core/services/reporting" "github.com/hay-kot/homebox/backend/internal/data/repo" - "github.com/rs/zerolog/log" ) var ( @@ -37,7 +40,6 @@ func (svc *ItemService) Create(ctx Context, item repo.ItemCreate) (repo.ItemOut, func (svc *ItemService) EnsureAssetID(ctx context.Context, GID uuid.UUID) (int, error) { items, err := svc.repo.Items.GetAllZeroAssetID(ctx, GID) - if err != nil { return 0, err } @@ -61,190 +63,290 @@ func (svc *ItemService) EnsureAssetID(ctx context.Context, GID uuid.UUID) (int, return finished, nil } -func (svc *ItemService) CsvImport(ctx context.Context, GID uuid.UUID, data [][]string) (int, error) { - loaded := []csvRow{} - // Skip first row - for _, row := range data[1:] { - // Skip empty rows - if len(row) == 0 { - continue - } - - if len(row) != NumOfCols { - return 0, ErrInvalidCsv - } - - r := newCsvRow(row) - loaded = append(loaded, r) - } - - // validate rows - var errMap = map[int][]error{} - var hasErr bool - for i, r := range loaded { - - errs := r.validate() - - if len(errs) > 0 { - hasErr = true - lineNum := i + 2 - - errMap[lineNum] = errs - } - } - - if hasErr { - for lineNum, errs := range errMap { - for _, err := range errs { - log.Error().Err(err).Int("line", lineNum).Msg("csv import error") - } - } - } - - // Bootstrap the locations and labels so we can reuse the created IDs for the items - locations := map[string]uuid.UUID{} - existingLocation, err := svc.repo.Locations.GetAll(ctx, GID, repo.LocationQuery{}) +func (svc *ItemService) EnsureImportRef(ctx context.Context, GID uuid.UUID) (int, error) { + ids, err := svc.repo.Items.GetAllZeroImportRef(ctx, GID) if err != nil { return 0, err } - for _, loc := range existingLocation { - locations[loc.Name] = loc.ID + + finished := 0 + for _, itemID := range ids { + ref := uuid.New().String()[0:8] + + err = svc.repo.Items.Patch(ctx, GID, itemID, repo.ItemPatch{ImportRef: &ref}) + if err != nil { + return 0, err + } + + finished++ } - labels := map[string]uuid.UUID{} - existingLabels, err := svc.repo.Labels.GetAll(ctx, GID) + return finished, nil +} + +func serializeLocation[T ~[]string](location T) string { + return strings.Join(location, "/") +} + +// CsvImport imports items from a CSV file. using the standard defined format. +// +// CsvImport applies the following rules/operations +// +// 1. If the item does not exist, it is created. +// 2. If the item has a ImportRef and it exists it is skipped +// 3. Locations and Labels are created if they do not exist. +func (svc *ItemService) CsvImport(ctx context.Context, GID uuid.UUID, data io.Reader) (int, error) { + sheet := reporting.IOSheet{} + + err := sheet.Read(data) if err != nil { return 0, err } - for _, label := range existingLabels { - labels[label.Name] = label.ID - } - for _, row := range loaded { + // ======================================== + // Labels - // Locations - if _, exists := locations[row.Location]; !exists { - result, err := svc.repo.Locations.Create(ctx, GID, repo.LocationCreate{ - Name: row.Location, - Description: "", - }) - if err != nil { - return 0, err - } - locations[row.Location] = result.ID + labelMap := make(map[string]uuid.UUID) + { + labels, err := svc.repo.Labels.GetAll(ctx, GID) + if err != nil { + return 0, err } - // Labels - - for _, label := range row.getLabels() { - if _, exists := labels[label]; exists { - continue - } - result, err := svc.repo.Labels.Create(ctx, GID, repo.LabelCreate{ - Name: label, - Description: "", - }) - if err != nil { - return 0, err - } - labels[label] = result.ID + for _, label := range labels { + labelMap[label.Name] = label.ID } } - highest := repo.AssetID(-1) + // ======================================== + // Locations + + locationMap := make(map[string]uuid.UUID) + { + locations, err := svc.repo.Locations.Tree(ctx, GID, repo.TreeQuery{WithItems: false}) + if err != nil { + return 0, err + } + + // Traverse the tree and build a map of location full paths to IDs + // where the full path is the location name joined by slashes. + var traverse func(location *repo.TreeItem, path []string) + traverse = func(location *repo.TreeItem, path []string) { + path = append(path, location.Name) + + locationMap[serializeLocation(path)] = location.ID + + for _, child := range location.Children { + traverse(child, path) + } + } + + for _, location := range locations { + traverse(&location, []string{}) + } + } + + // ======================================== + // Import items + + // Asset ID Pre-Check + highestAID := repo.AssetID(-1) if svc.autoIncrementAssetID { - highest, err = svc.repo.Items.GetHighestAssetID(ctx, GID) + highestAID, err = svc.repo.Items.GetHighestAssetID(ctx, GID) if err != nil { return 0, err } } - // Create the items - var count int - for _, row := range loaded { - // Check Import Ref - if row.Item.ImportRef != "" { - exists, err := svc.repo.Items.CheckRef(ctx, GID, row.Item.ImportRef) - if exists { - continue - } + finished := 0 + + for i := range sheet.Rows { + row := sheet.Rows[i] + + createRequired := true + + // ======================================== + // Preflight check for existing item + if row.ImportRef != "" { + exists, err := svc.repo.Items.CheckRef(ctx, GID, row.ImportRef) if err != nil { - log.Err(err).Msg("error checking import ref") + return 0, fmt.Errorf("error checking for existing item with ref %q: %w", row.ImportRef, err) + } + + if exists { + createRequired = false } } - locationID := locations[row.Location] - labelIDs := []uuid.UUID{} - for _, label := range row.getLabels() { - labelIDs = append(labelIDs, labels[label]) + // ======================================== + // Pre-Create Labels as necessary + labelIds := make([]uuid.UUID, len(row.LabelStr)) + + for j := range row.LabelStr { + label := row.LabelStr[j] + + id, ok := labelMap[label] + if !ok { + newLabel, err := svc.repo.Labels.Create(ctx, GID, repo.LabelCreate{Name: label}) + if err != nil { + return 0, err + } + id = newLabel.ID + } + + labelIds[j] = id + labelMap[label] = id } - log.Info(). - Str("name", row.Item.Name). - Str("location", row.Location). - Msgf("Creating Item: %s", row.Item.Name) + // ======================================== + // Pre-Create Locations as necessary + path := serializeLocation(row.Location) - data := repo.ItemCreate{ - ImportRef: row.Item.ImportRef, - Name: row.Item.Name, - Description: row.Item.Description, - LabelIDs: labelIDs, - LocationID: locationID, + locationID, ok := locationMap[path] + if !ok { // Traverse the path of LocationStr and check each path element to see if it exists already, if not create it. + paths := []string{} + for i, pathElement := range row.Location { + paths = append(paths, pathElement) + path := serializeLocation(paths) + + locationID, ok = locationMap[path] + if !ok { + parentID := uuid.Nil + + // Get the parent ID + if i > 0 { + parentPath := serializeLocation(row.Location[:i]) + parentID = locationMap[parentPath] + } + + newLocation, err := svc.repo.Locations.Create(ctx, GID, repo.LocationCreate{ + ParentID: parentID, + Name: pathElement, + }) + if err != nil { + return 0, err + } + locationID = newLocation.ID + } + + locationMap[path] = locationID + } + + locationID, ok = locationMap[path] + if !ok { + return 0, errors.New("failed to create location") + } } - if svc.autoIncrementAssetID { - highest++ - data.AssetID = highest + var effAID repo.AssetID + if svc.autoIncrementAssetID && row.AssetID.Nil() { + effAID = highestAID + 1 + highestAID++ + } else { + effAID = row.AssetID } - result, err := svc.repo.Items.Create(ctx, GID, data) + // ======================================== + // Create Item + var item repo.ItemOut + switch { + case createRequired: + newItem := repo.ItemCreate{ + ImportRef: row.ImportRef, + Name: row.Name, + Description: row.Description, + AssetID: effAID, + LocationID: locationID, + LabelIDs: labelIds, + } - if err != nil { - return count, err + item, err = svc.repo.Items.Create(ctx, GID, newItem) + if err != nil { + return 0, err + } + default: + item, err = svc.repo.Items.GetByRef(ctx, GID, row.ImportRef) + if err != nil { + return 0, err + } } - // Update the item with the rest of the data - _, err = svc.repo.Items.UpdateByGroup(ctx, GID, repo.ItemUpdate{ - // Edges + if item.ID == uuid.Nil { + panic("item ID is nil on import - this should never happen") + } + + fields := make([]repo.ItemField, len(row.Fields)) + for i := range row.Fields { + fields[i] = repo.ItemField{ + Name: row.Fields[i].Name, + Type: "text", + TextValue: row.Fields[i].Value, + } + } + + updateItem := repo.ItemUpdate{ + ID: item.ID, + LabelIDs: labelIds, LocationID: locationID, - LabelIDs: labelIDs, - AssetID: data.AssetID, - // General Fields - ID: result.ID, - Name: result.Name, - Description: result.Description, - Insured: row.Item.Insured, - Notes: row.Item.Notes, - Quantity: row.Item.Quantity, + Name: row.Name, + Description: row.Description, + AssetID: effAID, + Insured: row.Insured, + Quantity: row.Quantity, + Archived: row.Archived, - // Identifies the item as imported - SerialNumber: row.Item.SerialNumber, - ModelNumber: row.Item.ModelNumber, - Manufacturer: row.Item.Manufacturer, + PurchasePrice: row.PurchasePrice, + PurchaseFrom: row.PurchaseFrom, + PurchaseTime: row.PurchaseTime, - // Purchase - PurchaseFrom: row.Item.PurchaseFrom, - PurchasePrice: row.Item.PurchasePrice, - PurchaseTime: row.Item.PurchaseTime, + Manufacturer: row.Manufacturer, + ModelNumber: row.ModelNumber, + SerialNumber: row.SerialNumber, - // Warranty - LifetimeWarranty: row.Item.LifetimeWarranty, - WarrantyExpires: row.Item.WarrantyExpires, - WarrantyDetails: row.Item.WarrantyDetails, + LifetimeWarranty: row.LifetimeWarranty, + WarrantyExpires: row.WarrantyExpires, + WarrantyDetails: row.WarrantyDetails, - SoldTo: row.Item.SoldTo, - SoldPrice: row.Item.SoldPrice, - SoldTime: row.Item.SoldTime, - SoldNotes: row.Item.SoldNotes, - }) + SoldTo: row.SoldTo, + SoldTime: row.SoldTime, + SoldPrice: row.SoldPrice, + SoldNotes: row.SoldNotes, - if err != nil { - return count, err + Notes: row.Notes, + Fields: fields, } - count++ + item, err = svc.repo.Items.UpdateByGroup(ctx, GID, updateItem) + if err != nil { + return 0, err + } + + finished++ } - return count, nil + + return finished, nil +} + +func (svc *ItemService) ExportTSV(ctx context.Context, GID uuid.UUID) ([][]string, error) { + items, err := svc.repo.Items.GetAll(ctx, GID) + if err != nil { + return nil, err + } + + sheet := reporting.IOSheet{} + + sheet.ReadItems(items) + + return sheet.TSV() +} + +func (svc *ItemService) ExportBillOfMaterialsTSV(ctx context.Context, GID uuid.UUID) ([]byte, error) { + items, err := svc.repo.Items.GetAll(ctx, GID) + if err != nil { + return nil, err + } + + return reporting.BillOfMaterialsTSV(items) } diff --git a/backend/internal/core/services/service_items_csv.go b/backend/internal/core/services/service_items_csv.go deleted file mode 100644 index 2d93f6e..0000000 --- a/backend/internal/core/services/service_items_csv.go +++ /dev/null @@ -1,151 +0,0 @@ -package services - -import ( - "bytes" - "encoding/csv" - "errors" - "io" - "strconv" - "strings" - - "github.com/hay-kot/homebox/backend/internal/data/repo" - "github.com/hay-kot/homebox/backend/internal/data/types" -) - -func determineSeparator(data []byte) (rune, error) { - // First row - firstRow := bytes.Split(data, []byte("\n"))[0] - - // find first comma or /t - comma := bytes.IndexByte(firstRow, ',') - tab := bytes.IndexByte(firstRow, '\t') - - switch { - case comma == -1 && tab == -1: - return 0, errors.New("could not determine separator") - case tab > comma: - return '\t', nil - default: - return ',', nil - } -} - -func ReadCsv(r io.Reader) ([][]string, error) { - data, err := io.ReadAll(r) - if err != nil { - return nil, err - } - - reader := csv.NewReader(bytes.NewReader(data)) - - // Determine separator - sep, err := determineSeparator(data) - - if err != nil { - return nil, err - } - - reader.Comma = sep - - return reader.ReadAll() -} - -var ErrInvalidCsv = errors.New("invalid csv") - -const NumOfCols = 21 - -func parseFloat(s string) float64 { - if s == "" { - return 0 - } - f, _ := strconv.ParseFloat(s, 64) - return f -} - -func parseBool(s string) bool { - switch strings.ToLower(s) { - case "true", "yes", "1": - return true - default: - return false - } -} - -func parseInt(s string) int { - i, _ := strconv.Atoi(s) - return i -} - -type csvRow struct { - Item repo.ItemOut - Location string - LabelStr string -} - -func newCsvRow(row []string) csvRow { - - return csvRow{ - Location: row[1], - LabelStr: row[2], - Item: repo.ItemOut{ - ItemSummary: repo.ItemSummary{ - ImportRef: row[0], - Quantity: parseInt(row[3]), - Name: row[4], - Description: row[5], - Insured: parseBool(row[6]), - PurchasePrice: parseFloat(row[12]), - }, - SerialNumber: row[7], - ModelNumber: row[8], - Manufacturer: row[9], - Notes: row[10], - PurchaseFrom: row[11], - PurchaseTime: types.DateFromString(row[13]), - LifetimeWarranty: parseBool(row[14]), - WarrantyExpires: types.DateFromString(row[15]), - WarrantyDetails: row[16], - SoldTo: row[17], - SoldPrice: parseFloat(row[18]), - SoldTime: types.DateFromString(row[19]), - SoldNotes: row[20], - }, - } -} - -func (c csvRow) getLabels() []string { - split := strings.Split(c.LabelStr, ";") - - // Trim each - for i, s := range split { - split[i] = strings.TrimSpace(s) - } - - // Remove empty - for i, s := range split { - if s == "" { - split = append(split[:i], split[i+1:]...) - } - } - - return split -} - -func (c csvRow) validate() []error { - var errs []error - - add := func(err error) { - errs = append(errs, err) - } - - required := func(s string, name string) { - if s == "" { - add(errors.New(name + " is required")) - } - } - - required(c.Location, "Location") - required(c.Item.Name, "Name") - - return errs -} diff --git a/backend/internal/core/services/service_items_csv_test.go b/backend/internal/core/services/service_items_csv_test.go deleted file mode 100644 index af3056c..0000000 --- a/backend/internal/core/services/service_items_csv_test.go +++ /dev/null @@ -1,164 +0,0 @@ -package services - -import ( - "bytes" - _ "embed" - "encoding/csv" - "fmt" - "reflect" - "testing" - "time" - - "github.com/stretchr/testify/assert" -) - -//go:embed .testdata/import.csv -var CSVData_Comma []byte - -//go:embed .testdata/import.tsv -var CSVData_Tab []byte - -func loadcsv() [][]string { - reader := csv.NewReader(bytes.NewReader(CSVData_Comma)) - - records, err := reader.ReadAll() - if err != nil { - panic(err) - } - - return records -} - -func Test_CorrectDateParsing(t *testing.T) { - t.Parallel() - - expected := []time.Time{ - time.Date(2021, 10, 13, 0, 0, 0, 0, time.UTC), - time.Date(2021, 10, 15, 0, 0, 0, 0, time.UTC), - time.Date(2021, 10, 13, 0, 0, 0, 0, time.UTC), - time.Date(2020, 10, 21, 0, 0, 0, 0, time.UTC), - time.Date(2020, 10, 14, 0, 0, 0, 0, time.UTC), - time.Date(2020, 9, 30, 0, 0, 0, 0, time.UTC), - } - - records := loadcsv() - - for i, record := range records { - if i == 0 { - continue - } - entity := newCsvRow(record) - expected := expected[i-1] - - assert.Equal(t, expected, entity.Item.PurchaseTime.Time(), fmt.Sprintf("Failed on row %d", i)) - assert.Equal(t, expected, entity.Item.WarrantyExpires.Time(), fmt.Sprintf("Failed on row %d", i)) - assert.Equal(t, expected, entity.Item.SoldTime.Time(), fmt.Sprintf("Failed on row %d", i)) - } -} - -func Test_csvRow_getLabels(t *testing.T) { - type fields struct { - LabelStr string - } - tests := []struct { - name string - fields fields - want []string - }{ - { - name: "basic test", - fields: fields{ - LabelStr: "IOT;Home Assistant;Z-Wave", - }, - want: []string{"IOT", "Home Assistant", "Z-Wave"}, - }, - { - name: "no labels", - fields: fields{ - LabelStr: "", - }, - want: []string{}, - }, - { - name: "single label", - fields: fields{ - LabelStr: "IOT", - }, - want: []string{"IOT"}, - }, - { - name: "trailing semicolon", - fields: fields{ - LabelStr: "IOT;", - }, - want: []string{"IOT"}, - }, - - { - name: "whitespace", - fields: fields{ - LabelStr: " IOT; Home Assistant; Z-Wave ", - }, - want: []string{"IOT", "Home Assistant", "Z-Wave"}, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - c := csvRow{ - LabelStr: tt.fields.LabelStr, - } - if got := c.getLabels(); !reflect.DeepEqual(got, tt.want) { - t.Errorf("csvRow.getLabels() = %v, want %v", got, tt.want) - } - }) - } -} - -func Test_determineSeparator(t *testing.T) { - type args struct { - data []byte - } - tests := []struct { - name string - args args - want rune - wantErr bool - }{ - { - name: "comma", - args: args{ - data: CSVData_Comma, - }, - want: ',', - wantErr: false, - }, - { - name: "tab", - args: args{ - data: CSVData_Tab, - }, - want: '\t', - wantErr: false, - }, - { - name: "invalid", - args: args{ - data: []byte("a;b;c"), - }, - want: 0, - wantErr: true, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - got, err := determineSeparator(tt.args.data) - if (err != nil) != tt.wantErr { - t.Errorf("determineSeparator() error = %v, wantErr %v", err, tt.wantErr) - return - } - if got != tt.want { - t.Errorf("determineSeparator() = %v, want %v", got, tt.want) - } - }) - } -} diff --git a/backend/internal/core/services/service_items_test.go b/backend/internal/core/services/service_items_test.go deleted file mode 100644 index 105c842..0000000 --- a/backend/internal/core/services/service_items_test.go +++ /dev/null @@ -1,78 +0,0 @@ -package services - -import ( - "context" - "testing" - - "github.com/google/uuid" - "github.com/hay-kot/homebox/backend/internal/data/repo" - "github.com/stretchr/testify/assert" -) - -func TestItemService_CsvImport(t *testing.T) { - data := loadcsv() - svc := &ItemService{ - repo: tRepos, - } - count, err := svc.CsvImport(context.Background(), tGroup.ID, data) - assert.Equal(t, 6, count) - assert.NoError(t, err) - - // Check import refs are deduplicated - count, err = svc.CsvImport(context.Background(), tGroup.ID, data) - assert.Equal(t, 0, count) - assert.NoError(t, err) - - items, err := svc.repo.Items.GetAll(context.Background(), tGroup.ID) - assert.NoError(t, err) - t.Cleanup(func() { - for _, item := range items { - err := svc.repo.Items.Delete(context.Background(), item.ID) - assert.NoError(t, err) - } - }) - - assert.Equal(t, len(items), 6) - - dataCsv := []csvRow{} - for _, item := range data { - dataCsv = append(dataCsv, newCsvRow(item)) - } - - allLocation, err := tRepos.Locations.GetAll(context.Background(), tGroup.ID, repo.LocationQuery{}) - assert.NoError(t, err) - locNames := []string{} - for _, loc := range allLocation { - locNames = append(locNames, loc.Name) - } - - allLabels, err := tRepos.Labels.GetAll(context.Background(), tGroup.ID) - assert.NoError(t, err) - labelNames := []string{} - for _, label := range allLabels { - labelNames = append(labelNames, label.Name) - } - - ids := []uuid.UUID{} - t.Cleanup((func() { - for _, id := range ids { - err := svc.repo.Items.Delete(context.Background(), id) - assert.NoError(t, err) - } - })) - - for _, item := range items { - assert.Contains(t, locNames, item.Location.Name) - for _, label := range item.Labels { - assert.Contains(t, labelNames, label.Name) - } - - for _, csvRow := range dataCsv { - if csvRow.Item.Name == item.Name { - assert.Equal(t, csvRow.Item.Description, item.Description) - assert.Equal(t, csvRow.Item.Quantity, item.Quantity) - assert.Equal(t, csvRow.Item.Insured, item.Insured) - } - } - } -} diff --git a/backend/internal/data/ent/item_update.go b/backend/internal/data/ent/item_update.go index 88796e4..fa988df 100644 --- a/backend/internal/data/ent/item_update.go +++ b/backend/internal/data/ent/item_update.go @@ -67,6 +67,26 @@ func (iu *ItemUpdate) ClearDescription() *ItemUpdate { return iu } +// SetImportRef sets the "import_ref" field. +func (iu *ItemUpdate) SetImportRef(s string) *ItemUpdate { + iu.mutation.SetImportRef(s) + return iu +} + +// SetNillableImportRef sets the "import_ref" field if the given value is not nil. +func (iu *ItemUpdate) SetNillableImportRef(s *string) *ItemUpdate { + if s != nil { + iu.SetImportRef(*s) + } + return iu +} + +// ClearImportRef clears the value of the "import_ref" field. +func (iu *ItemUpdate) ClearImportRef() *ItemUpdate { + iu.mutation.ClearImportRef() + return iu +} + // SetNotes sets the "notes" field. func (iu *ItemUpdate) SetNotes(s string) *ItemUpdate { iu.mutation.SetNotes(s) @@ -713,6 +733,11 @@ func (iu *ItemUpdate) check() error { return &ValidationError{Name: "description", err: fmt.Errorf(`ent: validator failed for field "Item.description": %w`, err)} } } + if v, ok := iu.mutation.ImportRef(); ok { + if err := item.ImportRefValidator(v); err != nil { + return &ValidationError{Name: "import_ref", err: fmt.Errorf(`ent: validator failed for field "Item.import_ref": %w`, err)} + } + } if v, ok := iu.mutation.Notes(); ok { if err := item.NotesValidator(v); err != nil { return &ValidationError{Name: "notes", err: fmt.Errorf(`ent: validator failed for field "Item.notes": %w`, err)} @@ -773,6 +798,9 @@ func (iu *ItemUpdate) sqlSave(ctx context.Context) (n int, err error) { if iu.mutation.DescriptionCleared() { _spec.ClearField(item.FieldDescription, field.TypeString) } + if value, ok := iu.mutation.ImportRef(); ok { + _spec.SetField(item.FieldImportRef, field.TypeString, value) + } if iu.mutation.ImportRefCleared() { _spec.ClearField(item.FieldImportRef, field.TypeString) } @@ -1302,6 +1330,26 @@ func (iuo *ItemUpdateOne) ClearDescription() *ItemUpdateOne { return iuo } +// SetImportRef sets the "import_ref" field. +func (iuo *ItemUpdateOne) SetImportRef(s string) *ItemUpdateOne { + iuo.mutation.SetImportRef(s) + return iuo +} + +// SetNillableImportRef sets the "import_ref" field if the given value is not nil. +func (iuo *ItemUpdateOne) SetNillableImportRef(s *string) *ItemUpdateOne { + if s != nil { + iuo.SetImportRef(*s) + } + return iuo +} + +// ClearImportRef clears the value of the "import_ref" field. +func (iuo *ItemUpdateOne) ClearImportRef() *ItemUpdateOne { + iuo.mutation.ClearImportRef() + return iuo +} + // SetNotes sets the "notes" field. func (iuo *ItemUpdateOne) SetNotes(s string) *ItemUpdateOne { iuo.mutation.SetNotes(s) @@ -1961,6 +2009,11 @@ func (iuo *ItemUpdateOne) check() error { return &ValidationError{Name: "description", err: fmt.Errorf(`ent: validator failed for field "Item.description": %w`, err)} } } + if v, ok := iuo.mutation.ImportRef(); ok { + if err := item.ImportRefValidator(v); err != nil { + return &ValidationError{Name: "import_ref", err: fmt.Errorf(`ent: validator failed for field "Item.import_ref": %w`, err)} + } + } if v, ok := iuo.mutation.Notes(); ok { if err := item.NotesValidator(v); err != nil { return &ValidationError{Name: "notes", err: fmt.Errorf(`ent: validator failed for field "Item.notes": %w`, err)} @@ -2038,6 +2091,9 @@ func (iuo *ItemUpdateOne) sqlSave(ctx context.Context) (_node *Item, err error) if iuo.mutation.DescriptionCleared() { _spec.ClearField(item.FieldDescription, field.TypeString) } + if value, ok := iuo.mutation.ImportRef(); ok { + _spec.SetField(item.FieldImportRef, field.TypeString, value) + } if iuo.mutation.ImportRefCleared() { _spec.ClearField(item.FieldImportRef, field.TypeString) } diff --git a/backend/internal/data/ent/schema/item.go b/backend/internal/data/ent/schema/item.go index 5180f27..6efed21 100644 --- a/backend/internal/data/ent/schema/item.go +++ b/backend/internal/data/ent/schema/item.go @@ -38,8 +38,7 @@ func (Item) Fields() []ent.Field { return []ent.Field{ field.String("import_ref"). Optional(). - MaxLen(100). - Immutable(), + MaxLen(100), field.String("notes"). MaxLen(1000). Optional(), diff --git a/backend/internal/data/repo/asset_id_type.go b/backend/internal/data/repo/asset_id_type.go index 06d610e..678a510 100644 --- a/backend/internal/data/repo/asset_id_type.go +++ b/backend/internal/data/repo/asset_id_type.go @@ -32,10 +32,18 @@ func ParseAssetID(s string) (AID AssetID, ok bool) { return ParseAssetIDBytes([]byte(s)) } -func (aid AssetID) MarshalJSON() ([]byte, error) { +func (aid AssetID) String() string { + if aid.Nil() { + return "" + } + aidStr := fmt.Sprintf("%06d", aid) aidStr = fmt.Sprintf("%s-%s", aidStr[:3], aidStr[3:]) - return []byte(fmt.Sprintf(`"%s"`, aidStr)), nil + return aidStr +} + +func (aid AssetID) MarshalJSON() ([]byte, error) { + return []byte(`"` + aid.String() + `"`), nil } func (aid *AssetID) UnmarshalJSON(d []byte) error { @@ -50,3 +58,11 @@ func (aid *AssetID) UnmarshalJSON(d []byte) error { *aid = AssetID(aidInt) return nil } + +func (aid AssetID) MarshalCSV() (string, error) { + return aid.String(), nil +} + +func (aid *AssetID) UnmarshalCSV(d string) error { + return aid.UnmarshalJSON([]byte(d)) +} diff --git a/backend/internal/data/repo/asset_id_type_test.go b/backend/internal/data/repo/asset_id_type_test.go index 6a692d9..6aa7b99 100644 --- a/backend/internal/data/repo/asset_id_type_test.go +++ b/backend/internal/data/repo/asset_id_type_test.go @@ -21,7 +21,7 @@ func TestAssetID_MarshalJSON(t *testing.T) { { name: "zero test", aid: 0, - want: []byte(`"000-000"`), + want: []byte(`""`), }, { name: "large int", diff --git a/backend/internal/data/repo/repo_items.go b/backend/internal/data/repo/repo_items.go index 5e73565..69434b8 100644 --- a/backend/internal/data/repo/repo_items.go +++ b/backend/internal/data/repo/repo_items.go @@ -59,6 +59,7 @@ type ( LocationID uuid.UUID `json:"locationId"` LabelIDs []uuid.UUID `json:"labelIds"` } + ItemUpdate struct { ParentID uuid.UUID `json:"parentId" extensions:"x-nullable,x-omitempty"` ID uuid.UUID `json:"id"` @@ -99,6 +100,12 @@ type ( Fields []ItemField `json:"fields"` } + ItemPatch struct { + ID uuid.UUID `json:"id"` + Quantity *int `json:"quantity,omitempty" extensions:"x-nullable,x-omitempty"` + ImportRef *string `json:"importRef,omitempty" extensions:"x-nullable,x-omitempty"` + } + ItemSummary struct { ImportRef string `json:"-"` ID uuid.UUID `json:"id"` @@ -168,6 +175,7 @@ func mapItemSummary(item *ent.Item) ItemSummary { ID: item.ID, Name: item.Name, Description: item.Description, + ImportRef: item.ImportRef, Quantity: item.Quantity, CreatedAt: item.CreatedAt, UpdatedAt: item.UpdatedAt, @@ -285,6 +293,10 @@ func (e *ItemsRepository) CheckRef(ctx context.Context, GID uuid.UUID, ref strin return q.Where(item.ImportRef(ref)).Exist(ctx) } +func (e *ItemsRepository) GetByRef(ctx context.Context, GID uuid.UUID, ref string) (ItemOut, error) { + return e.getOne(ctx, item.ImportRef(ref), item.HasGroupWith(group.ID(GID))) +} + // GetOneByGroup returns a single item by ID. If the item does not exist, an error is returned. // GetOneByGroup ensures that the item belongs to a specific group. func (e *ItemsRepository) GetOneByGroup(ctx context.Context, gid, id uuid.UUID) (ItemOut, error) { @@ -628,6 +640,44 @@ func (e *ItemsRepository) UpdateByGroup(ctx context.Context, GID uuid.UUID, data return e.GetOne(ctx, data.ID) } +func (e *ItemsRepository) GetAllZeroImportRef(ctx context.Context, GID uuid.UUID) ([]uuid.UUID, error) { + var ids []uuid.UUID + + err := e.db.Item.Query(). + Where( + item.HasGroupWith(group.ID(GID)), + item.Or( + item.ImportRefEQ(""), + item.ImportRefIsNil(), + ), + ). + Select(item.FieldID). + Scan(ctx, &ids) + if err != nil { + return nil, err + } + + return ids, nil +} + +func (e *ItemsRepository) Patch(ctx context.Context, GID, ID uuid.UUID, data ItemPatch) error { + q := e.db.Item.Update(). + Where( + item.ID(ID), + item.HasGroupWith(group.ID(GID)), + ) + + if data.ImportRef != nil { + q.SetImportRef(*data.ImportRef) + } + + if data.Quantity != nil { + q.SetQuantity(*data.Quantity) + } + + return q.Exec(ctx) +} + func (e *ItemsRepository) GetAllCustomFieldValues(ctx context.Context, GID uuid.UUID, name string) ([]string, error) { type st struct { Value string `json:"text_value"` diff --git a/backend/internal/data/repo/repo_maintenance_entry_test.go b/backend/internal/data/repo/repo_maintenance_entry_test.go index bc9f6af..aafb08e 100644 --- a/backend/internal/data/repo/repo_maintenance_entry_test.go +++ b/backend/internal/data/repo/repo_maintenance_entry_test.go @@ -16,9 +16,7 @@ func getPrevMonth(now time.Time) time.Time { // avoid infinite loop max := 15 for t.Month() == now.Month() { - println("month is the same") t = t.AddDate(0, 0, -1) - println(t.String()) max-- if max == 0 { diff --git a/backend/internal/data/repo/repo_users_test.go b/backend/internal/data/repo/repo_users_test.go index 31d2737..d3cd361 100644 --- a/backend/internal/data/repo/repo_users_test.go +++ b/backend/internal/data/repo/repo_users_test.go @@ -2,7 +2,6 @@ package repo import ( "context" - "fmt" "testing" "github.com/stretchr/testify/assert" @@ -81,7 +80,6 @@ func TestUserRepo_GetAll(t *testing.T) { assert.Equal(t, len(created), len(allUsers)) for _, usr := range created { - fmt.Printf("%+v\n", usr) for _, usr2 := range allUsers { if usr.ID == usr2.ID { assert.Equal(t, usr.Email, usr2.Email) diff --git a/backend/internal/data/types/date.go b/backend/internal/data/types/date.go index 0dc09db..1b8b182 100644 --- a/backend/internal/data/types/date.go +++ b/backend/internal/data/types/date.go @@ -2,7 +2,6 @@ package types import ( "errors" - "fmt" "strings" "time" ) @@ -74,9 +73,7 @@ func (d Date) MarshalJSON() ([]byte, error) { func (d *Date) UnmarshalJSON(data []byte) (err error) { // unescape the string if necessary `\"` -> `"` str := strings.Trim(string(data), "\"") - fmt.Printf("str: %q\n", str) if str == "" || str == "null" || str == `""` { - println("empty date") *d = Date{} return nil } diff --git a/docs/docs/import-csv.md b/docs/docs/import-csv.md index c708e68..5dffc38 100644 --- a/docs/docs/import-csv.md +++ b/docs/docs/import-csv.md @@ -4,56 +4,80 @@ Using the CSV import is the recommended way for adding items to the database. It is always going to be the fastest way to import any large amount of items and provides the most flexibility when it comes to adding items. -**Limitations** +**Current Limitations** - - Currently only supports importing items, locations, and labels - - Does not support attachments. Attachments must be uploaded after import + - Imports only supports importing items, locations, and labels + - Imports and Exports do not support attachments. Attachments must be uploaded after import + - CSV Exports do not support nested path exports (e.g. `Home / Office / Desk`) and will only export the Items direct parent, (though imports _do_ support nested paths) + - Cannot specify item-to-item relationships (e.g. `Item A` is a child of `Item B`) !!! tip "File Formats" The CSV import supports both CSV and TSV files. The only difference is the delimiter used. CSV files use a comma `,` as the delimiter and TSV files use a tab `\t` as the delimiter. The file extension does not matter. -**Template** - -You can use this snippet as the headers for your CSV. Copy and paste it into your spreadsheet editor of choice and fill in the value. - -```csv -ImportRef Location Labels Quantity Name Description Insured Serial Number Model Number Manufacturer Notes Purchase From Purchased Price Purchased Time Lifetime Warranty Warranty Expires Warranty Details Sold To Sold Price Sold Time Sold Notes -``` - -!!! tip "Column Order" - Column headers are just there for reference, the important thing is that the order is correct. You can change the headers to anything you like, this behavior may change in the future. - - ## CSV Reference -| Column | Type | Description | -| ----------------- | -------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| ImportRef | String (100) | Import Refs are unique strings that can be used to deduplicate imports. Before an item is imported, we check the database for a matching ref. If the ref exists, we skip that item. | -| Location | String | This is the location of the item that will be created. These are de-duplicated and won't create another instance when reused. | -| Labels | `;` Separated String | List of labels to apply to the item separated by a `;`, can be existing or new | -| Quantity | Integer | The quantity of items to create | -| Name | String | Name of the item | -| Description | String | Description of the item | -| Insured | Boolean | Whether or not the item is insured | -| Serial Number | String | Serial number of the item | -| Model Number | String | Model of the item | -| Manufacturer | String | Manufacturer of the item | -| Notes | String (1000) | General notes about the product | -| Purchase From | String | Name of the place the item was purchased from | -| Purchase Price | Float64 | | -| Purchase At | Date | Date the item was purchased | -| Lifetime Warranty | Boolean | true or false - case insensitive | -| Warranty Expires | Date | Date in the format | -| Warranty Details | String | Details about the warranty | -| Sold To | String | Name of the person the item was sold to | -| Sold At | Date | Date the item was sold | -| Sold Price | Float64 | | -| Sold Notes | String (1000) | | +Below are the supported columns. They are case sensitive, can be in any ordered or can be omitted unless otherwise specified. + +### Special Syntax Columns + +`HB.import_ref` + +: Import Refs are unique strings that can be used to deduplicate imports. Before an item is imported, we check the database for a matching ref. If the ref exists, we skip creation of that item. + + * String Type + * Max 100 Characters + + Import Refs are used to de-duplicate imports. It is HIGHLY recommended that you use them to manage your items if you intend to manage your inventory via CSV import/export. If you do not use import refs, you will end up with duplicate items in your database on subsequent imports. + + !!! tip + + Specifying import refs also allows you to update existing items via the CSV import. If you specify an import ref that already exists in the database, we will update the existing item instead of creating a new one. + +`HB.location` + +: This is the location of the item that will be created. These are de-duplicated and won't create another instance when reused. + + * Supports Path Separators for nested locations (e.g. `Home / Office / Desk`) + +`HB.labels` + +: List of labels to apply to the item separated by a `;` can be existing or new labels. + +`HB.field.{field_name}` (e.g. `HB.field.Serial Number`) + +: This is a special column that allows you to add custom fields to the item. The column name must start with `HB.field.` followed by the name of the field. The value of the column will be the value of the field. + + - If the cell value is empty, it will be ignored. + +### Standard Columns + +| Column | Type | Description | +| -------------------- | ------------- | --------------------------------------------- | +| HB.quantity | Integer | The quantity of items to create | +| HB.name | String | Name of the item | +| HB.asset_id | AssetID | Asset ID for the item | +| HB.description | String | Description of the item | +| HB.insured | Boolean | Whether or not the item is insured | +| HB.serial_number | String | Serial number of the item | +| HB.model_number | String | Model of the item | +| HB.manufacturer | String | Manufacturer of the item | +| HB.notes | String (1000) | General notes about the product | +| HB.purchase_from | String | Name of the place the item was purchased from | +| HB.purchase_price | Float64 | | +| HB.purchase_at | Date | Date the item was purchased | +| HB.lifetime_warranty | Boolean | true or false - case insensitive | +| HB.warranty_expires | Date | Date in the format | +| HB.warranty_details | String | Details about the warranty | +| HB.sold_to | String | Name of the person the item was sold to | +| HB.sold_at | Date | Date the item was sold | +| HB.sold_price | Float64 | | +| HB.sold_notes | String (1000) | | **Type Key** | Type | Format | | ------- | --------------------------------------------------- | | String | Max 255 Characters unless otherwise specified | -| Date | MM/DD/YYYY | +| Date | YYYY-MM-DD | | Boolean | true or false, yes or no, 1 or 0 - case insensitive | +| AssetID | 000-000 | diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml index 1b7dda5..9033bb4 100644 --- a/docs/mkdocs.yml +++ b/docs/mkdocs.yml @@ -44,12 +44,11 @@ markdown_extensions: custom_checkbox: true - admonition - attr_list - - pymdownx.tabbed - pymdownx.superfences nav: - Home: index.md - Quick Start: quick-start.md - Tips and Tricks: tips-tricks.md - - Importing Data: import-csv.md + - Import and Export: import-csv.md - Building The Binary: build.md diff --git a/frontend/components/App/ImportDialog.vue b/frontend/components/App/ImportDialog.vue index 4d225a2..adfe993 100644 --- a/frontend/components/App/ImportDialog.vue +++ b/frontend/components/App/ImportDialog.vue @@ -5,6 +5,27 @@ Import a CSV file containing your items, labels, and locations. See documentation for more information on the required format.

+
+
+ + + + + Behavior for imports with existing import_refs has changed. If an import_ref is present in the CSV file, the + item will be updated with the values in the CSV file. + +
+
diff --git a/frontend/lib/api/__test__/user/stats.test.ts b/frontend/lib/api/__test__/user/stats.test.ts index 719f14f..a13a03d 100644 --- a/frontend/lib/api/__test__/user/stats.test.ts +++ b/frontend/lib/api/__test__/user/stats.test.ts @@ -4,27 +4,27 @@ import { UserClient } from "../../user"; import { factories } from "../factories"; type ImportObj = { - ImportRef: string; - Location: string; - Labels: string; - Quantity: string; - Name: string; - Description: string; - Insured: boolean; - SerialNumber: string; - ModelNumber: string; - Manufacturer: string; - Notes: string; - PurchaseFrom: string; - PurchasedPrice: number; - PurchasedTime: string; - LifetimeWarranty: boolean; - WarrantyExpires: string; - WarrantyDetails: string; - SoldTo: string; - SoldPrice: number; - SoldTime: string; - SoldNotes: string; + [`HB.import_ref`]: string; + [`HB.location`]: string; + [`HB.labels`]: string; + [`HB.quantity`]: number; + [`HB.name`]: string; + [`HB.description`]: string; + [`HB.insured`]: boolean; + [`HB.serial_number`]: string; + [`HB.model_number`]: string; + [`HB.manufacturer`]: string; + [`HB.notes`]: string; + [`HB.purchase_price`]: number; + [`HB.purchase_from`]: string; + [`HB.purchase_time`]: string; + [`HB.lifetime_warranty`]: boolean; + [`HB.warranty_expires`]: string; + [`HB.warranty_details`]: string; + [`HB.sold_to`]: string; + [`HB.sold_price`]: number; + [`HB.sold_time`]: string; + [`HB.sold_notes`]: string; }; function toCsv(data: ImportObj[]): string { @@ -36,7 +36,7 @@ function toCsv(data: ImportObj[]): string { } function importFileGenerator(entries: number): ImportObj[] { - const imports: ImportObj[] = []; + const imports: Partial[] = []; const pick = (arr: string[]) => arr[Math.floor(Math.random() * arr.length)]; @@ -45,37 +45,41 @@ function importFileGenerator(entries: number): ImportObj[] { const half = Math.floor(entries / 2); + // YYYY-MM-DD + const formatDate = (date: Date) => date.toISOString().split("T")[0]; + for (let i = 0; i < entries; i++) { imports.push({ - ImportRef: faker.database.mongodbObjectId(), - Location: pick(locations), - Labels: labels, - Quantity: faker.random.numeric(1), - Name: faker.random.words(3), - Description: "", - Insured: faker.datatype.boolean(), - SerialNumber: faker.random.alphaNumeric(5), - ModelNumber: faker.random.alphaNumeric(5), - Manufacturer: faker.random.alphaNumeric(5), - Notes: "", - PurchaseFrom: faker.name.fullName(), - PurchasedPrice: faker.datatype.number(100), - PurchasedTime: faker.date.past().toDateString(), - LifetimeWarranty: half > i, - WarrantyExpires: faker.date.future().toDateString(), - WarrantyDetails: "", - SoldTo: faker.name.fullName(), - SoldPrice: faker.datatype.number(100), - SoldTime: faker.date.past().toDateString(), - SoldNotes: "", + [`HB.import_ref`]: faker.database.mongodbObjectId(), + [`HB.location`]: pick(locations), + [`HB.labels`]: labels, + [`HB.quantity`]: Number(faker.random.numeric(2)), + [`HB.name`]: faker.random.words(3), + [`HB.description`]: "", + [`HB.insured`]: faker.datatype.boolean(), + [`HB.serial_number`]: faker.random.alphaNumeric(5), + [`HB.model_number`]: faker.random.alphaNumeric(5), + [`HB.manufacturer`]: faker.random.alphaNumeric(5), + [`HB.notes`]: "", + [`HB.purchase_from`]: faker.name.fullName(), + [`HB.purchase_price`]: faker.datatype.number(100), + [`HB.purchase_time`]: faker.date.past().toDateString(), + [`HB.lifetime_warranty`]: half > i, + [`HB.warranty_details`]: "", + [`HB.sold_to`]: faker.name.fullName(), + [`HB.sold_price`]: faker.datatype.number(100), + [`HB.sold_time`]: formatDate(faker.date.past()), + [`HB.sold_notes`]: "", }); } - return imports; + return imports as ImportObj[]; } describe("group related statistics tests", () => { const TOTAL_ITEMS = 30; + const labelData: Record = {}; + const locationData: Record = {}; let tAPI: UserClient | undefined; const imports = importFileGenerator(TOTAL_ITEMS); @@ -97,10 +101,26 @@ describe("group related statistics tests", () => { const setupResp = await client.items.import(new Blob([csv], { type: "text/csv" })); expect(setupResp.status).toBe(204); + + for (const item of imports) { + const labels = item[`HB.labels`].split(";"); + for (const label of labels) { + if (labelData[label]) { + labelData[label] += item[`HB.purchase_price`]; + } else { + labelData[label] = item[`HB.purchase_price`]; + } + } + + const location = item[`HB.location`]; + if (locationData[location]) { + locationData[location] += item[`HB.purchase_price`]; + } else { + locationData[location] = item[`HB.purchase_price`]; + } + } }); - // Write to file system for debugging - // fs.writeFileSync("test.csv", csv); test("Validate Group Statistics", async () => { const { status, data } = await api().stats.group(); expect(status).toBe(200); @@ -112,17 +132,6 @@ describe("group related statistics tests", () => { expect(data.totalWithWarranty).toEqual(Math.floor(TOTAL_ITEMS / 2)); }); - const labelData: Record = {}; - const locationData: Record = {}; - - for (const item of imports) { - for (const label of item.Labels.split(";")) { - labelData[label] = (labelData[label] || 0) + item.PurchasedPrice; - } - - locationData[item.Location] = (locationData[item.Location] || 0) + item.PurchasedPrice; - } - test("Validate Labels Statistics", async () => { const { status, data } = await api().stats.labels(); expect(status).toBe(200); diff --git a/frontend/lib/api/classes/actions.ts b/frontend/lib/api/classes/actions.ts index a65e059..f30e332 100644 --- a/frontend/lib/api/classes/actions.ts +++ b/frontend/lib/api/classes/actions.ts @@ -13,4 +13,10 @@ export class ActionsAPI extends BaseAPI { url: route("/actions/zero-item-time-fields"), }); } + + ensureImportRefs() { + return this.http.post({ + url: route("/actions/ensure-import-refs"), + }); + } } diff --git a/frontend/pages/tools.vue b/frontend/pages/tools.vue index 8606e9b..7612659 100644 --- a/frontend/pages/tools.vue +++ b/frontend/pages/tools.vue @@ -45,10 +45,10 @@ Imports the standard CSV format for Homebox. This will not overwrite any existing items in your inventory. It will only add new items. - +
@@ -68,6 +68,11 @@ current asset_id field in the database and applying the next value to each item that has an unset asset_id field. This is done in order of the created_at field. + + + Ensures that all items in your inventory have a valid import_ref field. This is done by randomly generating + a 8 character string for each item that has an unset import_ref field. + Resets the time value for all date time fields in your inventory to the beginning of the date. This is to @@ -103,7 +108,13 @@ const notify = useNotifier(); function getBillOfMaterials() { - api.reports.billOfMaterialsURL(); + const url = api.reports.billOfMaterialsURL(); + window.open(url, "_blank"); + } + + function getExportTSV() { + const url = api.items.exportURL(); + window.open(url, "_blank"); } async function ensureAssetIDs() { @@ -125,6 +136,25 @@ notify.success(`${result.data.completed} assets have been updated.`); } + async function ensureImportRefs() { + const { isCanceled } = await confirm.open( + "Are you sure you want to ensure all assets have an import_ref? This can take a while and cannot be undone." + ); + + if (isCanceled) { + return; + } + + const result = await api.actions.ensureImportRefs(); + + if (result.error) { + notify.error("Failed to ensure import refs."); + return; + } + + notify.success(`${result.data.completed} assets have been updated.`); + } + async function resetItemDateTimes() { const { isCanceled } = await confirm.open( "Are you sure you want to reset all date and time values? This can take a while and cannot be undone."