mirror of
https://github.com/hay-kot/homebox.git
synced 2024-12-18 13:06:32 +00:00
feat: import export rewrite (#290)
* WIP: initial work * refactoring * fix failing JS tests * update import docs * fix import headers * fix column headers * update refs on import * remove demo status * finnnneeeee * formatting
This commit is contained in:
parent
a005fa5b9b
commit
a6bcb36c5b
41 changed files with 1616 additions and 796 deletions
|
@ -2,7 +2,6 @@ package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"encoding/csv"
|
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/hay-kot/homebox/backend/internal/core/services"
|
"github.com/hay-kot/homebox/backend/internal/core/services"
|
||||||
|
@ -10,7 +9,7 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
func (a *app) SetupDemo() {
|
func (a *app) SetupDemo() {
|
||||||
csvText := `Import Ref,Location,Labels,Quantity,Name,Description,Insured,Serial Number,Model Number,Manufacturer,Notes,Purchase From,Purchased Price,Purchased Time,Lifetime Warranty,Warranty Expires,Warranty Details,Sold To,Sold Price,Sold Time,Sold Notes
|
csvText := `HB.import_ref,HB.location,HB.labels,HB.quantity,HB.name,HB.description,HB.insured,HB.serial_number,HB.model_number,HB.manufacturer,HB.notes,HB.purchase_from,HB.purchase_price,HB.purchase_time,HB.lifetime_warranty,HB.warranty_expires,HB.warranty_details,HB.sold_to,HB.sold_price,HB.sold_time,HB.sold_notes
|
||||||
,Garage,IOT;Home Assistant; Z-Wave,1,Zooz Universal Relay ZEN17,"Zooz 700 Series Z-Wave Universal Relay ZEN17 for Awnings, Garage Doors, Sprinklers, and More | 2 NO-C-NC Relays (20A, 10A) | Signal Repeater | Hub Required (Compatible with SmartThings and Hubitat)",,,ZEN17,Zooz,,Amazon,39.95,10/13/2021,,,,,,,
|
,Garage,IOT;Home Assistant; Z-Wave,1,Zooz Universal Relay ZEN17,"Zooz 700 Series Z-Wave Universal Relay ZEN17 for Awnings, Garage Doors, Sprinklers, and More | 2 NO-C-NC Relays (20A, 10A) | Signal Repeater | Hub Required (Compatible with SmartThings and Hubitat)",,,ZEN17,Zooz,,Amazon,39.95,10/13/2021,,,,,,,
|
||||||
,Living Room,IOT;Home Assistant; Z-Wave,1,Zooz Motion Sensor,"Zooz Z-Wave Plus S2 Motion Sensor ZSE18 with Magnetic Mount, Works with Vera and SmartThings",,,ZSE18,Zooz,,Amazon,29.95,10/15/2021,,,,,,,
|
,Living Room,IOT;Home Assistant; Z-Wave,1,Zooz Motion Sensor,"Zooz Z-Wave Plus S2 Motion Sensor ZSE18 with Magnetic Mount, Works with Vera and SmartThings",,,ZSE18,Zooz,,Amazon,29.95,10/15/2021,,,,,,,
|
||||||
,Office,IOT;Home Assistant; Z-Wave,1,Zooz 110v Power Switch,"Zooz Z-Wave Plus Power Switch ZEN15 for 110V AC Units, Sump Pumps, Humidifiers, and More",,,ZEN15,Zooz,,Amazon,39.95,10/13/2021,,,,,,,
|
,Office,IOT;Home Assistant; Z-Wave,1,Zooz 110v Power Switch,"Zooz Z-Wave Plus Power Switch ZEN15 for 110V AC Units, Sump Pumps, Humidifiers, and More",,,ZEN15,Zooz,,Amazon,39.95,10/13/2021,,,,,,,
|
||||||
|
@ -19,13 +18,11 @@ func (a *app) SetupDemo() {
|
||||||
,Kitchen,IOT;Home Assistant; Z-Wave,1,Smart Rocker Light Dimmer,"UltraPro Z-Wave Smart Rocker Light Dimmer with QuickFit and SimpleWire, 3-Way Ready, Compatible with Alexa, Google Assistant, ZWave Hub Required, Repeater/Range Extender, White Paddle Only, 39351",,,39351,Honeywell,,Amazon,65.98,09/30/0202,,,,,,,
|
,Kitchen,IOT;Home Assistant; Z-Wave,1,Smart Rocker Light Dimmer,"UltraPro Z-Wave Smart Rocker Light Dimmer with QuickFit and SimpleWire, 3-Way Ready, Compatible with Alexa, Google Assistant, ZWave Hub Required, Repeater/Range Extender, White Paddle Only, 39351",,,39351,Honeywell,,Amazon,65.98,09/30/0202,,,,,,,
|
||||||
`
|
`
|
||||||
|
|
||||||
var (
|
registration := services.UserRegistration{
|
||||||
registration = services.UserRegistration{
|
|
||||||
Email: "demo@example.com",
|
Email: "demo@example.com",
|
||||||
Name: "Demo",
|
Name: "Demo",
|
||||||
Password: "demo",
|
Password: "demo",
|
||||||
}
|
}
|
||||||
)
|
|
||||||
|
|
||||||
// First check if we've already setup a demo user and skip if so
|
// First check if we've already setup a demo user and skip if so
|
||||||
_, err := a.services.User.Login(context.Background(), registration.Email, registration.Password)
|
_, err := a.services.User.Login(context.Background(), registration.Email, registration.Password)
|
||||||
|
@ -42,17 +39,7 @@ func (a *app) SetupDemo() {
|
||||||
token, _ := a.services.User.Login(context.Background(), registration.Email, registration.Password)
|
token, _ := a.services.User.Login(context.Background(), registration.Email, registration.Password)
|
||||||
self, _ := a.services.User.GetSelf(context.Background(), token.Raw)
|
self, _ := a.services.User.GetSelf(context.Background(), token.Raw)
|
||||||
|
|
||||||
// Read CSV Text
|
_, err = a.services.Items.CsvImport(context.Background(), self.GroupID, strings.NewReader(csvText))
|
||||||
reader := csv.NewReader(strings.NewReader(csvText))
|
|
||||||
reader.Comma = ','
|
|
||||||
|
|
||||||
records, err := reader.ReadAll()
|
|
||||||
if err != nil {
|
|
||||||
log.Err(err).Msg("Failed to read CSV")
|
|
||||||
log.Fatal().Msg("Failed to setup demo")
|
|
||||||
}
|
|
||||||
|
|
||||||
_, err = a.services.Items.CsvImport(context.Background(), self.GroupID, records)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Err(err).Msg("Failed to import CSV")
|
log.Err(err).Msg("Failed to import CSV")
|
||||||
log.Fatal().Msg("Failed to setup demo")
|
log.Fatal().Msg("Failed to setup demo")
|
||||||
|
|
|
@ -1,8 +1,10 @@
|
||||||
package v1
|
package v1
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
"github.com/hay-kot/homebox/backend/internal/core/services"
|
"github.com/hay-kot/homebox/backend/internal/core/services"
|
||||||
"github.com/hay-kot/homebox/backend/internal/sys/validate"
|
"github.com/hay-kot/homebox/backend/internal/sys/validate"
|
||||||
"github.com/hay-kot/homebox/backend/pkgs/server"
|
"github.com/hay-kot/homebox/backend/pkgs/server"
|
||||||
|
@ -13,6 +15,20 @@ type ActionAmountResult struct {
|
||||||
Completed int `json:"completed"`
|
Completed int `json:"completed"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func actionHandlerFactory(ref string, fn func(context.Context, uuid.UUID) (int, error)) server.HandlerFunc {
|
||||||
|
return func(w http.ResponseWriter, r *http.Request) error {
|
||||||
|
ctx := services.NewContext(r.Context())
|
||||||
|
|
||||||
|
totalCompleted, err := fn(ctx, ctx.GID)
|
||||||
|
if err != nil {
|
||||||
|
log.Err(err).Str("action_ref", ref).Msg("failed to run action")
|
||||||
|
return validate.NewRequestError(err, http.StatusInternalServerError)
|
||||||
|
}
|
||||||
|
|
||||||
|
return server.Respond(w, http.StatusOK, ActionAmountResult{Completed: totalCompleted})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// HandleGroupInvitationsCreate godoc
|
// HandleGroupInvitationsCreate godoc
|
||||||
// @Summary Ensures all items in the database have an asset id
|
// @Summary Ensures all items in the database have an asset id
|
||||||
// @Tags Group
|
// @Tags Group
|
||||||
|
@ -21,17 +37,18 @@ type ActionAmountResult struct {
|
||||||
// @Router /v1/actions/ensure-asset-ids [Post]
|
// @Router /v1/actions/ensure-asset-ids [Post]
|
||||||
// @Security Bearer
|
// @Security Bearer
|
||||||
func (ctrl *V1Controller) HandleEnsureAssetID() server.HandlerFunc {
|
func (ctrl *V1Controller) HandleEnsureAssetID() server.HandlerFunc {
|
||||||
return func(w http.ResponseWriter, r *http.Request) error {
|
return actionHandlerFactory("ensure asset IDs", ctrl.svc.Items.EnsureAssetID)
|
||||||
ctx := services.NewContext(r.Context())
|
}
|
||||||
|
|
||||||
totalCompleted, err := ctrl.svc.Items.EnsureAssetID(ctx, ctx.GID)
|
// HandleEnsureImportRefs godoc
|
||||||
if err != nil {
|
// @Summary Ensures all items in the database have an import ref
|
||||||
log.Err(err).Msg("failed to ensure asset id")
|
// @Tags Group
|
||||||
return validate.NewRequestError(err, http.StatusInternalServerError)
|
// @Produce json
|
||||||
}
|
// @Success 200 {object} ActionAmountResult
|
||||||
|
// @Router /v1/actions/ensure-import-refs [Post]
|
||||||
return server.Respond(w, http.StatusOK, ActionAmountResult{Completed: totalCompleted})
|
// @Security Bearer
|
||||||
}
|
func (ctrl *V1Controller) HandleEnsureImportRefs() server.HandlerFunc {
|
||||||
|
return actionHandlerFactory("ensure import refs", ctrl.svc.Items.EnsureImportRef)
|
||||||
}
|
}
|
||||||
|
|
||||||
// HandleItemDateZeroOut godoc
|
// HandleItemDateZeroOut godoc
|
||||||
|
@ -42,15 +59,5 @@ func (ctrl *V1Controller) HandleEnsureAssetID() server.HandlerFunc {
|
||||||
// @Router /v1/actions/zero-item-time-fields [Post]
|
// @Router /v1/actions/zero-item-time-fields [Post]
|
||||||
// @Security Bearer
|
// @Security Bearer
|
||||||
func (ctrl *V1Controller) HandleItemDateZeroOut() server.HandlerFunc {
|
func (ctrl *V1Controller) HandleItemDateZeroOut() server.HandlerFunc {
|
||||||
return func(w http.ResponseWriter, r *http.Request) error {
|
return actionHandlerFactory("zero out date time", ctrl.repo.Items.ZeroOutTimeFields)
|
||||||
ctx := services.NewContext(r.Context())
|
|
||||||
|
|
||||||
totalCompleted, err := ctrl.repo.Items.ZeroOutTimeFields(ctx, ctx.GID)
|
|
||||||
if err != nil {
|
|
||||||
log.Err(err).Msg("failed to ensure asset id")
|
|
||||||
return validate.NewRequestError(err, http.StatusInternalServerError)
|
|
||||||
}
|
|
||||||
|
|
||||||
return server.Respond(w, http.StatusOK, ActionAmountResult{Completed: totalCompleted})
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,6 +2,7 @@ package v1
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"database/sql"
|
"database/sql"
|
||||||
|
"encoding/csv"
|
||||||
"errors"
|
"errors"
|
||||||
"net/http"
|
"net/http"
|
||||||
"strings"
|
"strings"
|
||||||
|
@ -255,15 +256,9 @@ func (ctrl *V1Controller) HandleItemsImport() server.HandlerFunc {
|
||||||
return validate.NewRequestError(err, http.StatusInternalServerError)
|
return validate.NewRequestError(err, http.StatusInternalServerError)
|
||||||
}
|
}
|
||||||
|
|
||||||
data, err := services.ReadCsv(file)
|
|
||||||
if err != nil {
|
|
||||||
log.Err(err).Msg("failed to read csv")
|
|
||||||
return validate.NewRequestError(err, http.StatusInternalServerError)
|
|
||||||
}
|
|
||||||
|
|
||||||
user := services.UseUserCtx(r.Context())
|
user := services.UseUserCtx(r.Context())
|
||||||
|
|
||||||
_, err = ctrl.svc.Items.CsvImport(r.Context(), user.GroupID, data)
|
_, err = ctrl.svc.Items.CsvImport(r.Context(), user.GroupID, file)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Err(err).Msg("failed to import items")
|
log.Err(err).Msg("failed to import items")
|
||||||
return validate.NewRequestError(err, http.StatusInternalServerError)
|
return validate.NewRequestError(err, http.StatusInternalServerError)
|
||||||
|
@ -272,3 +267,26 @@ func (ctrl *V1Controller) HandleItemsImport() server.HandlerFunc {
|
||||||
return server.Respond(w, http.StatusNoContent, nil)
|
return server.Respond(w, http.StatusNoContent, nil)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// HandleItemsImport godocs
|
||||||
|
// @Summary exports items into the database
|
||||||
|
// @Tags Items
|
||||||
|
// @Success 200 {string} string "text/csv"
|
||||||
|
// @Router /v1/items/export [GET]
|
||||||
|
// @Security Bearer
|
||||||
|
func (ctrl *V1Controller) HandleItemsExport() server.HandlerFunc {
|
||||||
|
return func(w http.ResponseWriter, r *http.Request) error {
|
||||||
|
ctx := services.NewContext(r.Context())
|
||||||
|
|
||||||
|
csvData, err := ctrl.svc.Items.ExportTSV(r.Context(), ctx.GID)
|
||||||
|
if err != nil {
|
||||||
|
log.Err(err).Msg("failed to export items")
|
||||||
|
return validate.NewRequestError(err, http.StatusInternalServerError)
|
||||||
|
}
|
||||||
|
|
||||||
|
w.Header().Set("Content-Type", "text/tsv")
|
||||||
|
w.Header().Set("Content-Disposition", "attachment;filename=homebox-items.tsv")
|
||||||
|
writer := csv.NewWriter(w)
|
||||||
|
return writer.WriteAll(csvData)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -19,13 +19,13 @@ func (ctrl *V1Controller) HandleBillOfMaterialsExport() server.HandlerFunc {
|
||||||
return func(w http.ResponseWriter, r *http.Request) error {
|
return func(w http.ResponseWriter, r *http.Request) error {
|
||||||
actor := services.UseUserCtx(r.Context())
|
actor := services.UseUserCtx(r.Context())
|
||||||
|
|
||||||
csv, err := ctrl.svc.Reporting.BillOfMaterialsTSV(r.Context(), actor.GroupID)
|
csv, err := ctrl.svc.Items.ExportBillOfMaterialsTSV(r.Context(), actor.GroupID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
w.Header().Set("Content-Type", "text/csv")
|
w.Header().Set("Content-Type", "text/tsv")
|
||||||
w.Header().Set("Content-Disposition", "attachment; filename=bom.csv")
|
w.Header().Set("Content-Disposition", "attachment; filename=bill-of-materials.tsv")
|
||||||
_, err = w.Write(csv)
|
_, err = w.Write(csv)
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
|
@ -89,6 +89,7 @@ func (a *app) mountRoutes(repos *repo.AllRepos) {
|
||||||
|
|
||||||
a.server.Post(v1Base("/actions/ensure-asset-ids"), v1Ctrl.HandleEnsureAssetID(), userMW...)
|
a.server.Post(v1Base("/actions/ensure-asset-ids"), v1Ctrl.HandleEnsureAssetID(), userMW...)
|
||||||
a.server.Post(v1Base("/actions/zero-item-time-fields"), v1Ctrl.HandleItemDateZeroOut(), userMW...)
|
a.server.Post(v1Base("/actions/zero-item-time-fields"), v1Ctrl.HandleItemDateZeroOut(), userMW...)
|
||||||
|
a.server.Post(v1Base("/actions/ensure-import-refs"), v1Ctrl.HandleEnsureImportRefs(), userMW...)
|
||||||
|
|
||||||
a.server.Get(v1Base("/locations"), v1Ctrl.HandleLocationGetAll(), userMW...)
|
a.server.Get(v1Base("/locations"), v1Ctrl.HandleLocationGetAll(), userMW...)
|
||||||
a.server.Post(v1Base("/locations"), v1Ctrl.HandleLocationCreate(), userMW...)
|
a.server.Post(v1Base("/locations"), v1Ctrl.HandleLocationCreate(), userMW...)
|
||||||
|
@ -106,6 +107,7 @@ func (a *app) mountRoutes(repos *repo.AllRepos) {
|
||||||
a.server.Get(v1Base("/items"), v1Ctrl.HandleItemsGetAll(), userMW...)
|
a.server.Get(v1Base("/items"), v1Ctrl.HandleItemsGetAll(), userMW...)
|
||||||
a.server.Post(v1Base("/items"), v1Ctrl.HandleItemsCreate(), userMW...)
|
a.server.Post(v1Base("/items"), v1Ctrl.HandleItemsCreate(), userMW...)
|
||||||
a.server.Post(v1Base("/items/import"), v1Ctrl.HandleItemsImport(), userMW...)
|
a.server.Post(v1Base("/items/import"), v1Ctrl.HandleItemsImport(), userMW...)
|
||||||
|
a.server.Get(v1Base("/items/export"), v1Ctrl.HandleItemsExport(), userMW...)
|
||||||
a.server.Get(v1Base("/items/fields"), v1Ctrl.HandleGetAllCustomFieldNames(), userMW...)
|
a.server.Get(v1Base("/items/fields"), v1Ctrl.HandleGetAllCustomFieldNames(), userMW...)
|
||||||
a.server.Get(v1Base("/items/fields/values"), v1Ctrl.HandleGetAllCustomFieldValues(), userMW...)
|
a.server.Get(v1Base("/items/fields/values"), v1Ctrl.HandleGetAllCustomFieldValues(), userMW...)
|
||||||
|
|
||||||
|
|
|
@ -45,6 +45,30 @@ const docTemplate = `{
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"/v1/actions/ensure-import-refs": {
|
||||||
|
"post": {
|
||||||
|
"security": [
|
||||||
|
{
|
||||||
|
"Bearer": []
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"produces": [
|
||||||
|
"application/json"
|
||||||
|
],
|
||||||
|
"tags": [
|
||||||
|
"Group"
|
||||||
|
],
|
||||||
|
"summary": "Ensures all items in the database have an import ref",
|
||||||
|
"responses": {
|
||||||
|
"200": {
|
||||||
|
"description": "OK",
|
||||||
|
"schema": {
|
||||||
|
"$ref": "#/definitions/v1.ActionAmountResult"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
"/v1/actions/zero-item-time-fields": {
|
"/v1/actions/zero-item-time-fields": {
|
||||||
"post": {
|
"post": {
|
||||||
"security": [
|
"security": [
|
||||||
|
@ -407,6 +431,27 @@ const docTemplate = `{
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"/v1/items/export": {
|
||||||
|
"get": {
|
||||||
|
"security": [
|
||||||
|
{
|
||||||
|
"Bearer": []
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"tags": [
|
||||||
|
"Items"
|
||||||
|
],
|
||||||
|
"summary": "exports items into the database",
|
||||||
|
"responses": {
|
||||||
|
"200": {
|
||||||
|
"description": "text/csv",
|
||||||
|
"schema": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
"/v1/items/fields": {
|
"/v1/items/fields": {
|
||||||
"get": {
|
"get": {
|
||||||
"security": [
|
"security": [
|
||||||
|
|
|
@ -37,6 +37,30 @@
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"/v1/actions/ensure-import-refs": {
|
||||||
|
"post": {
|
||||||
|
"security": [
|
||||||
|
{
|
||||||
|
"Bearer": []
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"produces": [
|
||||||
|
"application/json"
|
||||||
|
],
|
||||||
|
"tags": [
|
||||||
|
"Group"
|
||||||
|
],
|
||||||
|
"summary": "Ensures all items in the database have an import ref",
|
||||||
|
"responses": {
|
||||||
|
"200": {
|
||||||
|
"description": "OK",
|
||||||
|
"schema": {
|
||||||
|
"$ref": "#/definitions/v1.ActionAmountResult"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
"/v1/actions/zero-item-time-fields": {
|
"/v1/actions/zero-item-time-fields": {
|
||||||
"post": {
|
"post": {
|
||||||
"security": [
|
"security": [
|
||||||
|
@ -399,6 +423,27 @@
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"/v1/items/export": {
|
||||||
|
"get": {
|
||||||
|
"security": [
|
||||||
|
{
|
||||||
|
"Bearer": []
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"tags": [
|
||||||
|
"Items"
|
||||||
|
],
|
||||||
|
"summary": "exports items into the database",
|
||||||
|
"responses": {
|
||||||
|
"200": {
|
||||||
|
"description": "text/csv",
|
||||||
|
"schema": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
"/v1/items/fields": {
|
"/v1/items/fields": {
|
||||||
"get": {
|
"get": {
|
||||||
"security": [
|
"security": [
|
||||||
|
|
|
@ -650,6 +650,20 @@ paths:
|
||||||
summary: Ensures all items in the database have an asset id
|
summary: Ensures all items in the database have an asset id
|
||||||
tags:
|
tags:
|
||||||
- Group
|
- Group
|
||||||
|
/v1/actions/ensure-import-refs:
|
||||||
|
post:
|
||||||
|
produces:
|
||||||
|
- application/json
|
||||||
|
responses:
|
||||||
|
"200":
|
||||||
|
description: OK
|
||||||
|
schema:
|
||||||
|
$ref: '#/definitions/v1.ActionAmountResult'
|
||||||
|
security:
|
||||||
|
- Bearer: []
|
||||||
|
summary: Ensures all items in the database have an import ref
|
||||||
|
tags:
|
||||||
|
- Group
|
||||||
/v1/actions/zero-item-time-fields:
|
/v1/actions/zero-item-time-fields:
|
||||||
post:
|
post:
|
||||||
produces:
|
produces:
|
||||||
|
@ -1109,6 +1123,18 @@ paths:
|
||||||
summary: Update Maintenance Entry
|
summary: Update Maintenance Entry
|
||||||
tags:
|
tags:
|
||||||
- Maintenance
|
- Maintenance
|
||||||
|
/v1/items/export:
|
||||||
|
get:
|
||||||
|
responses:
|
||||||
|
"200":
|
||||||
|
description: text/csv
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
security:
|
||||||
|
- Bearer: []
|
||||||
|
summary: exports items into the database
|
||||||
|
tags:
|
||||||
|
- Items
|
||||||
/v1/items/fields:
|
/v1/items/fields:
|
||||||
get:
|
get:
|
||||||
produces:
|
produces:
|
||||||
|
|
|
@ -76,13 +76,11 @@ github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27k
|
||||||
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
|
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
|
||||||
github.com/mattn/go-isatty v0.0.17 h1:BTarxUcIeDqL27Mc+vyvdWYSL28zpIhv3RoTdsLMPng=
|
github.com/mattn/go-isatty v0.0.17 h1:BTarxUcIeDqL27Mc+vyvdWYSL28zpIhv3RoTdsLMPng=
|
||||||
github.com/mattn/go-isatty v0.0.17/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
|
github.com/mattn/go-isatty v0.0.17/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
|
||||||
github.com/mattn/go-runewidth v0.0.9 h1:Lm995f3rfxdpd6TSmuVCHVb/QhupuXlYr8sCI/QdE+0=
|
|
||||||
github.com/mattn/go-sqlite3 v1.14.16 h1:yOQRA0RpS5PFz/oikGwBEqvAWhWg5ufRz4ETLjwpU1Y=
|
github.com/mattn/go-sqlite3 v1.14.16 h1:yOQRA0RpS5PFz/oikGwBEqvAWhWg5ufRz4ETLjwpU1Y=
|
||||||
github.com/mattn/go-sqlite3 v1.14.16/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg=
|
github.com/mattn/go-sqlite3 v1.14.16/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg=
|
||||||
github.com/mitchellh/go-wordwrap v1.0.1 h1:TLuKupo69TCn6TQSyGxwI1EblZZEsQ0vMlAFQflz0v0=
|
github.com/mitchellh/go-wordwrap v1.0.1 h1:TLuKupo69TCn6TQSyGxwI1EblZZEsQ0vMlAFQflz0v0=
|
||||||
github.com/mitchellh/go-wordwrap v1.0.1/go.mod h1:R62XHJLzvMFRBbcrT7m7WgmE1eOyTSsCt+hzestvNj0=
|
github.com/mitchellh/go-wordwrap v1.0.1/go.mod h1:R62XHJLzvMFRBbcrT7m7WgmE1eOyTSsCt+hzestvNj0=
|
||||||
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
|
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
|
||||||
github.com/olekukonko/tablewriter v0.0.5 h1:P2Ga83D34wi1o9J6Wh1mRuqd4mF/x/lgBS7N7AbDhec=
|
|
||||||
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
||||||
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||||
|
@ -92,8 +90,6 @@ github.com/rs/xid v1.4.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg=
|
||||||
github.com/rs/zerolog v1.29.0 h1:Zes4hju04hjbvkVkOhdl2HpZa+0PmVwigmo8XoORE5w=
|
github.com/rs/zerolog v1.29.0 h1:Zes4hju04hjbvkVkOhdl2HpZa+0PmVwigmo8XoORE5w=
|
||||||
github.com/rs/zerolog v1.29.0/go.mod h1:NILgTygv/Uej1ra5XxGf82ZFSLk58MFGAUS2o6usyD0=
|
github.com/rs/zerolog v1.29.0/go.mod h1:NILgTygv/Uej1ra5XxGf82ZFSLk58MFGAUS2o6usyD0=
|
||||||
github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ=
|
github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ=
|
||||||
github.com/spf13/cobra v1.6.1 h1:o94oiPyS4KD1mPy2fmcYYHHfCxLqYjJOhGsCHFZtEzA=
|
|
||||||
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
|
|
||||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||||
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
|
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
|
||||||
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
|
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
|
||||||
|
|
|
@ -1,16 +1,13 @@
|
||||||
package services
|
package services
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/hay-kot/homebox/backend/internal/core/services/reporting"
|
|
||||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||||
"github.com/rs/zerolog/log"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type AllServices struct {
|
type AllServices struct {
|
||||||
User *UserService
|
User *UserService
|
||||||
Group *GroupService
|
Group *GroupService
|
||||||
Items *ItemService
|
Items *ItemService
|
||||||
Reporting *reporting.ReportingService
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type OptionsFunc func(*options)
|
type OptionsFunc func(*options)
|
||||||
|
@ -45,7 +42,5 @@ func New(repos *repo.AllRepos, opts ...OptionsFunc) *AllServices {
|
||||||
repo: repos,
|
repo: repos,
|
||||||
autoIncrementAssetID: options.autoIncrementAssetID,
|
autoIncrementAssetID: options.autoIncrementAssetID,
|
||||||
},
|
},
|
||||||
// TODO: don't use global logger
|
|
||||||
Reporting: reporting.NewReportingService(repos, &log.Logger),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,5 @@
|
||||||
|
HB.location,HB.name,HB.quantity,HB.description,HB.field.Custom Field 1,HB.field.Custom Field 2,HB.field.Custom Field 3
|
||||||
|
loc,Item 1,1,Description 1,Value 1[1],Value 1[2],Value 1[3]
|
||||||
|
loc,Item 2,2,Description 2,Value 2[1],Value 2[2],Value 2[3]
|
||||||
|
loc,Item 3,3,Description 3,Value 3[1],Value 3[2],Value 3[3]
|
||||||
|
|
|
|
@ -0,0 +1,4 @@
|
||||||
|
HB.location,HB.name,HB.quantity,HB.description
|
||||||
|
loc,Item 1,1,Description 1
|
||||||
|
loc,Item 2,2,Description 2
|
||||||
|
loc,Item 3,3,Description 3
|
|
|
@ -0,0 +1,4 @@
|
||||||
|
HB.name,HB.asset_id,HB.location,HB.labels
|
||||||
|
Item 1,1,Path / To / Location 1,L1 ; L2 ; L3
|
||||||
|
Item 2,000-002,Path /To/ Location 2,L1;L2;L3
|
||||||
|
Item 3,1000-003,Path / To /Location 3 , L1;L2; L3
|
|
|
@ -0,0 +1,42 @@
|
||||||
|
package reporting
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/gocarina/gocsv"
|
||||||
|
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||||
|
"github.com/hay-kot/homebox/backend/internal/data/types"
|
||||||
|
)
|
||||||
|
|
||||||
|
// =================================================================================================
|
||||||
|
|
||||||
|
type BillOfMaterialsEntry struct {
|
||||||
|
PurchaseDate types.Date `csv:"Purchase Date"`
|
||||||
|
Name string `csv:"Name"`
|
||||||
|
Description string `csv:"Description"`
|
||||||
|
Manufacturer string `csv:"Manufacturer"`
|
||||||
|
SerialNumber string `csv:"Serial Number"`
|
||||||
|
ModelNumber string `csv:"Model Number"`
|
||||||
|
Quantity int `csv:"Quantity"`
|
||||||
|
Price float64 `csv:"Price"`
|
||||||
|
TotalPrice float64 `csv:"Total Price"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// BillOfMaterialsTSV returns a byte slice of the Bill of Materials for a given GID in TSV format
|
||||||
|
// See BillOfMaterialsEntry for the format of the output
|
||||||
|
func BillOfMaterialsTSV(entities []repo.ItemOut) ([]byte, error) {
|
||||||
|
bomEntries := make([]BillOfMaterialsEntry, len(entities))
|
||||||
|
for i, entity := range entities {
|
||||||
|
bomEntries[i] = BillOfMaterialsEntry{
|
||||||
|
PurchaseDate: entity.PurchaseTime,
|
||||||
|
Name: entity.Name,
|
||||||
|
Description: entity.Description,
|
||||||
|
Manufacturer: entity.Manufacturer,
|
||||||
|
SerialNumber: entity.SerialNumber,
|
||||||
|
ModelNumber: entity.ModelNumber,
|
||||||
|
Quantity: entity.Quantity,
|
||||||
|
Price: entity.PurchasePrice,
|
||||||
|
TotalPrice: entity.PurchasePrice * float64(entity.Quantity),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return gocsv.MarshalBytes(&bomEntries)
|
||||||
|
}
|
93
backend/internal/core/services/reporting/import.go
Normal file
93
backend/internal/core/services/reporting/import.go
Normal file
|
@ -0,0 +1,93 @@
|
||||||
|
package reporting
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/csv"
|
||||||
|
"errors"
|
||||||
|
"io"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
ErrNoHomeboxHeaders = errors.New("no headers found")
|
||||||
|
ErrMissingRequiredHeaders = errors.New("missing required headers `HB.location` or `HB.name`")
|
||||||
|
)
|
||||||
|
|
||||||
|
// determineSeparator determines the separator used in the CSV file
|
||||||
|
// It returns the separator as a rune and an error if it could not be determined
|
||||||
|
//
|
||||||
|
// It is assumed that the first row is the header row and that the separator is the same
|
||||||
|
// for all rows.
|
||||||
|
//
|
||||||
|
// Supported separators are `,` and `\t`
|
||||||
|
func determineSeparator(data []byte) (rune, error) {
|
||||||
|
// First row
|
||||||
|
firstRow := bytes.Split(data, []byte("\n"))[0]
|
||||||
|
|
||||||
|
// find first comma or /t
|
||||||
|
comma := bytes.IndexByte(firstRow, ',')
|
||||||
|
tab := bytes.IndexByte(firstRow, '\t')
|
||||||
|
|
||||||
|
switch {
|
||||||
|
case comma == -1 && tab == -1:
|
||||||
|
return 0, errors.New("could not determine separator")
|
||||||
|
case tab > comma:
|
||||||
|
return '\t', nil
|
||||||
|
default:
|
||||||
|
return ',', nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// readRawCsv reads a CSV file and returns the raw data as a 2D string array
|
||||||
|
// It determines the separator used in the CSV file and returns an error if
|
||||||
|
// it could not be determined
|
||||||
|
func readRawCsv(r io.Reader) ([][]string, error) {
|
||||||
|
data, err := io.ReadAll(r)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := csv.NewReader(bytes.NewReader(data))
|
||||||
|
|
||||||
|
// Determine separator
|
||||||
|
sep, err := determineSeparator(data)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
reader.Comma = sep
|
||||||
|
|
||||||
|
return reader.ReadAll()
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseHeaders parses the homebox headers from the CSV file and returns a map of the headers
|
||||||
|
// and their column index as well as a list of the field headers (HB.field.*) in the order
|
||||||
|
// they appear in the CSV file
|
||||||
|
//
|
||||||
|
// It returns an error if no homebox headers are found
|
||||||
|
func parseHeaders(headers []string) (hbHeaders map[string]int, fieldHeaders []string, err error) {
|
||||||
|
hbHeaders = map[string]int{} // initialize map
|
||||||
|
|
||||||
|
for col, h := range headers {
|
||||||
|
if strings.HasPrefix(h, "HB.field.") {
|
||||||
|
fieldHeaders = append(fieldHeaders, h)
|
||||||
|
}
|
||||||
|
|
||||||
|
if strings.HasPrefix(h, "HB.") {
|
||||||
|
hbHeaders[h] = col
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
required := []string{"HB.location", "HB.name"}
|
||||||
|
for _, h := range required {
|
||||||
|
if _, ok := hbHeaders[h]; !ok {
|
||||||
|
return nil, nil, ErrMissingRequiredHeaders
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(hbHeaders) == 0 {
|
||||||
|
return nil, nil, ErrNoHomeboxHeaders
|
||||||
|
}
|
||||||
|
|
||||||
|
return hbHeaders, fieldHeaders, nil
|
||||||
|
}
|
85
backend/internal/core/services/reporting/io_row.go
Normal file
85
backend/internal/core/services/reporting/io_row.go
Normal file
|
@ -0,0 +1,85 @@
|
||||||
|
package reporting
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||||
|
"github.com/hay-kot/homebox/backend/internal/data/types"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ExportItemFields struct {
|
||||||
|
Name string
|
||||||
|
Value string
|
||||||
|
}
|
||||||
|
|
||||||
|
type ExportTSVRow struct {
|
||||||
|
ImportRef string `csv:"HB.import_ref"`
|
||||||
|
Location LocationString `csv:"HB.location"`
|
||||||
|
LabelStr LabelString `csv:"HB.labels"`
|
||||||
|
AssetID repo.AssetID `csv:"HB.asset_id"`
|
||||||
|
Archived bool `csv:"HB.archived"`
|
||||||
|
|
||||||
|
Name string `csv:"HB.name"`
|
||||||
|
Quantity int `csv:"HB.quantity"`
|
||||||
|
Description string `csv:"HB.description"`
|
||||||
|
Insured bool `csv:"HB.insured"`
|
||||||
|
Notes string `csv:"HB.notes"`
|
||||||
|
|
||||||
|
PurchasePrice float64 `csv:"HB.purchase_price"`
|
||||||
|
PurchaseFrom string `csv:"HB.purchase_from"`
|
||||||
|
PurchaseTime types.Date `csv:"HB.purchase_time"`
|
||||||
|
|
||||||
|
Manufacturer string `csv:"HB.manufacturer"`
|
||||||
|
ModelNumber string `csv:"HB.model_number"`
|
||||||
|
SerialNumber string `csv:"HB.serial_number"`
|
||||||
|
|
||||||
|
LifetimeWarranty bool `csv:"HB.lifetime_warranty"`
|
||||||
|
WarrantyExpires types.Date `csv:"HB.warranty_expires"`
|
||||||
|
WarrantyDetails string `csv:"HB.warranty_details"`
|
||||||
|
|
||||||
|
SoldTo string `csv:"HB.sold_to"`
|
||||||
|
SoldPrice float64 `csv:"HB.sold_price"`
|
||||||
|
SoldTime types.Date `csv:"HB.sold_time"`
|
||||||
|
SoldNotes string `csv:"HB.sold_notes"`
|
||||||
|
|
||||||
|
Fields []ExportItemFields `csv:"-"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// LabelString is a string slice that is used to represent a list of labels.
|
||||||
|
//
|
||||||
|
// For example, a list of labels "Important; Work" would be represented as a
|
||||||
|
// LabelString with the following values:
|
||||||
|
//
|
||||||
|
// LabelString{"Important", "Work"}
|
||||||
|
type LabelString []string
|
||||||
|
|
||||||
|
func parseLabelString(s string) LabelString {
|
||||||
|
v, _ := parseSeparatedString(s, ";")
|
||||||
|
return v
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ls LabelString) String() string {
|
||||||
|
return strings.Join(ls, "; ")
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// LocationString is a string slice that is used to represent a location
|
||||||
|
// hierarchy.
|
||||||
|
//
|
||||||
|
// For example, a location hierarchy of "Home / Bedroom / Desk" would be
|
||||||
|
// represented as a LocationString with the following values:
|
||||||
|
//
|
||||||
|
// LocationString{"Home", "Bedroom", "Desk"}
|
||||||
|
type LocationString []string
|
||||||
|
|
||||||
|
func parseLocationString(s string) LocationString {
|
||||||
|
v, _ := parseSeparatedString(s, "/")
|
||||||
|
return v
|
||||||
|
}
|
||||||
|
|
||||||
|
func (csf LocationString) String() string {
|
||||||
|
return strings.Join(csf, " / ")
|
||||||
|
}
|
310
backend/internal/core/services/reporting/io_sheet.go
Normal file
310
backend/internal/core/services/reporting/io_sheet.go
Normal file
|
@ -0,0 +1,310 @@
|
||||||
|
package reporting
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"reflect"
|
||||||
|
"sort"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||||
|
"github.com/hay-kot/homebox/backend/internal/data/types"
|
||||||
|
"github.com/rs/zerolog/log"
|
||||||
|
)
|
||||||
|
|
||||||
|
// IOSheet is the representation of a CSV/TSV sheet that is used for importing/exporting
|
||||||
|
// items from homebox. It is used to read/write the data from/to a CSV/TSV file given
|
||||||
|
// the standard format of the file.
|
||||||
|
//
|
||||||
|
// See ExportTSVRow for the format of the data in the sheet.
|
||||||
|
type IOSheet struct {
|
||||||
|
headers []string
|
||||||
|
custom []int
|
||||||
|
index map[string]int
|
||||||
|
Rows []ExportTSVRow
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *IOSheet) indexHeaders() {
|
||||||
|
s.index = make(map[string]int)
|
||||||
|
|
||||||
|
for i, h := range s.headers {
|
||||||
|
if strings.HasPrefix(h, "HB.field") {
|
||||||
|
s.custom = append(s.custom, i)
|
||||||
|
}
|
||||||
|
|
||||||
|
if strings.HasPrefix(h, "HB.") {
|
||||||
|
s.index[h] = i
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *IOSheet) GetColumn(str string) (col int, ok bool) {
|
||||||
|
if s.index == nil {
|
||||||
|
s.indexHeaders()
|
||||||
|
}
|
||||||
|
|
||||||
|
col, ok = s.index[str]
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read reads a CSV/TSV and populates the "Rows" field with the data from the sheet
|
||||||
|
// Custom Fields are supported via the `HB.field.*` headers. The `HB.field.*` the "Name"
|
||||||
|
// of the field is the part after the `HB.field.` prefix. Additionally, Custom Fields with
|
||||||
|
// no value are excluded from the row.Fields slice, this includes empty strings.
|
||||||
|
//
|
||||||
|
// Note That
|
||||||
|
// - the first row is assumed to be the header
|
||||||
|
// - at least 1 row of data is required
|
||||||
|
// - rows and columns must be rectangular (i.e. all rows must have the same number of columns)
|
||||||
|
func (s *IOSheet) Read(data io.Reader) error {
|
||||||
|
sheet, err := readRawCsv(data)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(sheet) < 2 {
|
||||||
|
return fmt.Errorf("sheet must have at least 1 row of data (header + 1)")
|
||||||
|
}
|
||||||
|
|
||||||
|
s.headers = sheet[0]
|
||||||
|
s.Rows = make([]ExportTSVRow, len(sheet)-1)
|
||||||
|
|
||||||
|
for i, row := range sheet[1:] {
|
||||||
|
if len(row) != len(s.headers) {
|
||||||
|
return fmt.Errorf("row has %d columns, expected %d", len(row), len(s.headers))
|
||||||
|
}
|
||||||
|
|
||||||
|
rowData := ExportTSVRow{}
|
||||||
|
|
||||||
|
st := reflect.TypeOf(ExportTSVRow{})
|
||||||
|
|
||||||
|
for i := 0; i < st.NumField(); i++ {
|
||||||
|
field := st.Field(i)
|
||||||
|
tag := field.Tag.Get("csv")
|
||||||
|
if tag == "" || tag == "-" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
col, ok := s.GetColumn(tag)
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
val := row[col]
|
||||||
|
|
||||||
|
var v interface{}
|
||||||
|
|
||||||
|
switch field.Type {
|
||||||
|
case reflect.TypeOf(""):
|
||||||
|
v = val
|
||||||
|
case reflect.TypeOf(int(0)):
|
||||||
|
v = parseInt(val)
|
||||||
|
case reflect.TypeOf(bool(false)):
|
||||||
|
v = parseBool(val)
|
||||||
|
case reflect.TypeOf(float64(0)):
|
||||||
|
v = parseFloat(val)
|
||||||
|
|
||||||
|
// Custom Types
|
||||||
|
case reflect.TypeOf(types.Date{}):
|
||||||
|
v = types.DateFromString(val)
|
||||||
|
case reflect.TypeOf(repo.AssetID(0)):
|
||||||
|
v, _ = repo.ParseAssetID(val)
|
||||||
|
case reflect.TypeOf(LocationString{}):
|
||||||
|
v = parseLocationString(val)
|
||||||
|
case reflect.TypeOf(LabelString{}):
|
||||||
|
v = parseLabelString(val)
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Debug().
|
||||||
|
Str("tag", tag).
|
||||||
|
Interface("val", v).
|
||||||
|
Str("type", fmt.Sprintf("%T", v)).
|
||||||
|
Msg("parsed value")
|
||||||
|
|
||||||
|
// Nil values are not allowed at the moment. This may change.
|
||||||
|
if v == nil {
|
||||||
|
return fmt.Errorf("could not convert %q to %s", val, field.Type)
|
||||||
|
}
|
||||||
|
|
||||||
|
ptrField := reflect.ValueOf(&rowData).Elem().Field(i)
|
||||||
|
ptrField.Set(reflect.ValueOf(v))
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, col := range s.custom {
|
||||||
|
colName := strings.TrimPrefix(s.headers[col], "HB.field.")
|
||||||
|
customVal := row[col]
|
||||||
|
if customVal == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
rowData.Fields = append(rowData.Fields, ExportItemFields{
|
||||||
|
Name: colName,
|
||||||
|
Value: customVal,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
s.Rows[i] = rowData
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Write writes the sheet to a writer.
|
||||||
|
func (s *IOSheet) ReadItems(items []repo.ItemOut) {
|
||||||
|
s.Rows = make([]ExportTSVRow, len(items))
|
||||||
|
|
||||||
|
extraHeaders := map[string]struct{}{}
|
||||||
|
|
||||||
|
for i := range items {
|
||||||
|
item := items[i]
|
||||||
|
|
||||||
|
// TODO: Support fetching nested locations
|
||||||
|
locString := LocationString{item.Location.Name}
|
||||||
|
|
||||||
|
labelString := make([]string, len(item.Labels))
|
||||||
|
|
||||||
|
for i, l := range item.Labels {
|
||||||
|
labelString[i] = l.Name
|
||||||
|
}
|
||||||
|
|
||||||
|
customFields := make([]ExportItemFields, len(item.Fields))
|
||||||
|
|
||||||
|
for i, f := range item.Fields {
|
||||||
|
extraHeaders[f.Name] = struct{}{}
|
||||||
|
|
||||||
|
customFields[i] = ExportItemFields{
|
||||||
|
Name: f.Name,
|
||||||
|
Value: f.TextValue,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
s.Rows[i] = ExportTSVRow{
|
||||||
|
// fill struct
|
||||||
|
Location: locString,
|
||||||
|
LabelStr: labelString,
|
||||||
|
|
||||||
|
ImportRef: item.ImportRef,
|
||||||
|
AssetID: item.AssetID,
|
||||||
|
Name: item.Name,
|
||||||
|
Quantity: item.Quantity,
|
||||||
|
Description: item.Description,
|
||||||
|
Insured: item.Insured,
|
||||||
|
Archived: item.Archived,
|
||||||
|
|
||||||
|
PurchasePrice: item.PurchasePrice,
|
||||||
|
PurchaseFrom: item.PurchaseFrom,
|
||||||
|
PurchaseTime: item.PurchaseTime,
|
||||||
|
|
||||||
|
Manufacturer: item.Manufacturer,
|
||||||
|
ModelNumber: item.ModelNumber,
|
||||||
|
SerialNumber: item.SerialNumber,
|
||||||
|
|
||||||
|
LifetimeWarranty: item.LifetimeWarranty,
|
||||||
|
WarrantyExpires: item.WarrantyExpires,
|
||||||
|
WarrantyDetails: item.WarrantyDetails,
|
||||||
|
|
||||||
|
SoldTo: item.SoldTo,
|
||||||
|
SoldTime: item.SoldTime,
|
||||||
|
SoldPrice: item.SoldPrice,
|
||||||
|
SoldNotes: item.SoldNotes,
|
||||||
|
|
||||||
|
Fields: customFields,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract and sort additional headers for deterministic output
|
||||||
|
customHeaders := make([]string, 0, len(extraHeaders))
|
||||||
|
|
||||||
|
for k := range extraHeaders {
|
||||||
|
customHeaders = append(customHeaders, k)
|
||||||
|
}
|
||||||
|
|
||||||
|
sort.Strings(customHeaders)
|
||||||
|
|
||||||
|
st := reflect.TypeOf(ExportTSVRow{})
|
||||||
|
|
||||||
|
// Write headers
|
||||||
|
for i := 0; i < st.NumField(); i++ {
|
||||||
|
field := st.Field(i)
|
||||||
|
tag := field.Tag.Get("csv")
|
||||||
|
if tag == "" || tag == "-" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
s.headers = append(s.headers, tag)
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, h := range customHeaders {
|
||||||
|
s.headers = append(s.headers, "HB.field."+h)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Writes the current sheet to a writer in TSV format.
|
||||||
|
func (s *IOSheet) TSV() ([][]string, error) {
|
||||||
|
memcsv := make([][]string, len(s.Rows)+1)
|
||||||
|
|
||||||
|
memcsv[0] = s.headers
|
||||||
|
|
||||||
|
// use struct tags in rows to dertmine column order
|
||||||
|
for i, row := range s.Rows {
|
||||||
|
rowIdx := i + 1
|
||||||
|
|
||||||
|
memcsv[rowIdx] = make([]string, len(s.headers))
|
||||||
|
|
||||||
|
st := reflect.TypeOf(row)
|
||||||
|
|
||||||
|
for i := 0; i < st.NumField(); i++ {
|
||||||
|
field := st.Field(i)
|
||||||
|
tag := field.Tag.Get("csv")
|
||||||
|
if tag == "" || tag == "-" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
col, ok := s.GetColumn(tag)
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
val := reflect.ValueOf(row).Field(i)
|
||||||
|
|
||||||
|
var v string
|
||||||
|
|
||||||
|
switch field.Type {
|
||||||
|
case reflect.TypeOf(""):
|
||||||
|
v = val.String()
|
||||||
|
case reflect.TypeOf(int(0)):
|
||||||
|
v = strconv.Itoa(int(val.Int()))
|
||||||
|
case reflect.TypeOf(bool(false)):
|
||||||
|
v = strconv.FormatBool(val.Bool())
|
||||||
|
case reflect.TypeOf(float64(0)):
|
||||||
|
v = strconv.FormatFloat(val.Float(), 'f', -1, 64)
|
||||||
|
|
||||||
|
// Custom Types
|
||||||
|
case reflect.TypeOf(types.Date{}):
|
||||||
|
v = val.Interface().(types.Date).String()
|
||||||
|
case reflect.TypeOf(repo.AssetID(0)):
|
||||||
|
v = val.Interface().(repo.AssetID).String()
|
||||||
|
case reflect.TypeOf(LocationString{}):
|
||||||
|
v = val.Interface().(LocationString).String()
|
||||||
|
case reflect.TypeOf(LabelString{}):
|
||||||
|
v = val.Interface().(LabelString).String()
|
||||||
|
default:
|
||||||
|
log.Debug().Str("type", field.Type.String()).Msg("unknown type")
|
||||||
|
}
|
||||||
|
|
||||||
|
memcsv[rowIdx][col] = v
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, f := range row.Fields {
|
||||||
|
col, ok := s.GetColumn("HB.field." + f.Name)
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
memcsv[i+1][col] = f.Value
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return memcsv, nil
|
||||||
|
}
|
226
backend/internal/core/services/reporting/io_sheet_test.go
Normal file
226
backend/internal/core/services/reporting/io_sheet_test.go
Normal file
|
@ -0,0 +1,226 @@
|
||||||
|
package reporting
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"reflect"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
_ "embed"
|
||||||
|
|
||||||
|
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
//go:embed .testdata/import/minimal.csv
|
||||||
|
minimalImportCSV []byte
|
||||||
|
|
||||||
|
//go:embed .testdata/import/fields.csv
|
||||||
|
customFieldImportCSV []byte
|
||||||
|
|
||||||
|
//go:embed .testdata/import/types.csv
|
||||||
|
customTypesImportCSV []byte
|
||||||
|
|
||||||
|
//go:embed .testdata/import.csv
|
||||||
|
CSVData_Comma []byte
|
||||||
|
|
||||||
|
//go:embed .testdata/import.tsv
|
||||||
|
CSVData_Tab []byte
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestSheet_Read(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
data []byte
|
||||||
|
want []ExportTSVRow
|
||||||
|
wantErr bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "minimal import",
|
||||||
|
data: minimalImportCSV,
|
||||||
|
want: []ExportTSVRow{
|
||||||
|
{Location: LocationString{"loc"}, Name: "Item 1", Quantity: 1, Description: "Description 1"},
|
||||||
|
{Location: LocationString{"loc"}, Name: "Item 2", Quantity: 2, Description: "Description 2"},
|
||||||
|
{Location: LocationString{"loc"}, Name: "Item 3", Quantity: 3, Description: "Description 3"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "custom field import",
|
||||||
|
data: customFieldImportCSV,
|
||||||
|
want: []ExportTSVRow{
|
||||||
|
{
|
||||||
|
Location: LocationString{"loc"}, Name: "Item 1", Quantity: 1, Description: "Description 1",
|
||||||
|
Fields: []ExportItemFields{
|
||||||
|
{Name: "Custom Field 1", Value: "Value 1[1]"},
|
||||||
|
{Name: "Custom Field 2", Value: "Value 1[2]"},
|
||||||
|
{Name: "Custom Field 3", Value: "Value 1[3]"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Location: LocationString{"loc"}, Name: "Item 2", Quantity: 2, Description: "Description 2",
|
||||||
|
Fields: []ExportItemFields{
|
||||||
|
{Name: "Custom Field 1", Value: "Value 2[1]"},
|
||||||
|
{Name: "Custom Field 2", Value: "Value 2[2]"},
|
||||||
|
{Name: "Custom Field 3", Value: "Value 2[3]"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Location: LocationString{"loc"}, Name: "Item 3", Quantity: 3, Description: "Description 3",
|
||||||
|
Fields: []ExportItemFields{
|
||||||
|
{Name: "Custom Field 1", Value: "Value 3[1]"},
|
||||||
|
{Name: "Custom Field 2", Value: "Value 3[2]"},
|
||||||
|
{Name: "Custom Field 3", Value: "Value 3[3]"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "custom types import",
|
||||||
|
data: customTypesImportCSV,
|
||||||
|
want: []ExportTSVRow{
|
||||||
|
{
|
||||||
|
Name: "Item 1",
|
||||||
|
AssetID: repo.AssetID(1),
|
||||||
|
Location: LocationString{"Path", "To", "Location 1"},
|
||||||
|
LabelStr: LabelString{"L1", "L2", "L3"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "Item 2",
|
||||||
|
AssetID: repo.AssetID(2),
|
||||||
|
Location: LocationString{"Path", "To", "Location 2"},
|
||||||
|
LabelStr: LabelString{"L1", "L2", "L3"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "Item 3",
|
||||||
|
AssetID: repo.AssetID(1000003),
|
||||||
|
Location: LocationString{"Path", "To", "Location 3"},
|
||||||
|
LabelStr: LabelString{"L1", "L2", "L3"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
reader := bytes.NewReader(tt.data)
|
||||||
|
|
||||||
|
sheet := &IOSheet{}
|
||||||
|
err := sheet.Read(reader)
|
||||||
|
|
||||||
|
switch {
|
||||||
|
case tt.wantErr:
|
||||||
|
assert.Error(t, err)
|
||||||
|
default:
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.ElementsMatch(t, tt.want, sheet.Rows)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func Test_parseHeaders(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
rawHeaders []string
|
||||||
|
wantHbHeaders map[string]int
|
||||||
|
wantFieldHeaders []string
|
||||||
|
wantErr bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "no hombox headers",
|
||||||
|
rawHeaders: []string{"Header 1", "Header 2", "Header 3"},
|
||||||
|
wantHbHeaders: nil,
|
||||||
|
wantFieldHeaders: nil,
|
||||||
|
wantErr: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "field headers only",
|
||||||
|
rawHeaders: []string{"HB.location", "HB.name", "HB.field.1", "HB.field.2", "HB.field.3"},
|
||||||
|
wantHbHeaders: map[string]int{
|
||||||
|
"HB.location": 0,
|
||||||
|
"HB.name": 1,
|
||||||
|
"HB.field.1": 2,
|
||||||
|
"HB.field.2": 3,
|
||||||
|
"HB.field.3": 4,
|
||||||
|
},
|
||||||
|
wantFieldHeaders: []string{"HB.field.1", "HB.field.2", "HB.field.3"},
|
||||||
|
wantErr: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "mixed headers",
|
||||||
|
rawHeaders: []string{"Header 1", "HB.name", "Header 2", "HB.field.2", "Header 3", "HB.field.3", "HB.location"},
|
||||||
|
wantHbHeaders: map[string]int{
|
||||||
|
"HB.name": 1,
|
||||||
|
"HB.field.2": 3,
|
||||||
|
"HB.field.3": 5,
|
||||||
|
"HB.location": 6,
|
||||||
|
},
|
||||||
|
wantFieldHeaders: []string{"HB.field.2", "HB.field.3"},
|
||||||
|
wantErr: false,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
gotHbHeaders, gotFieldHeaders, err := parseHeaders(tt.rawHeaders)
|
||||||
|
if (err != nil) != tt.wantErr {
|
||||||
|
t.Errorf("parseHeaders() error = %v, wantErr %v", err, tt.wantErr)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if !reflect.DeepEqual(gotHbHeaders, tt.wantHbHeaders) {
|
||||||
|
t.Errorf("parseHeaders() gotHbHeaders = %v, want %v", gotHbHeaders, tt.wantHbHeaders)
|
||||||
|
}
|
||||||
|
if !reflect.DeepEqual(gotFieldHeaders, tt.wantFieldHeaders) {
|
||||||
|
t.Errorf("parseHeaders() gotFieldHeaders = %v, want %v", gotFieldHeaders, tt.wantFieldHeaders)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func Test_determineSeparator(t *testing.T) {
|
||||||
|
type args struct {
|
||||||
|
data []byte
|
||||||
|
}
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
args args
|
||||||
|
want rune
|
||||||
|
wantErr bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "comma",
|
||||||
|
args: args{
|
||||||
|
data: CSVData_Comma,
|
||||||
|
},
|
||||||
|
want: ',',
|
||||||
|
wantErr: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "tab",
|
||||||
|
args: args{
|
||||||
|
data: CSVData_Tab,
|
||||||
|
},
|
||||||
|
want: '\t',
|
||||||
|
wantErr: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "invalid",
|
||||||
|
args: args{
|
||||||
|
data: []byte("a;b;c"),
|
||||||
|
},
|
||||||
|
want: 0,
|
||||||
|
wantErr: true,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
got, err := determineSeparator(tt.args.data)
|
||||||
|
if (err != nil) != tt.wantErr {
|
||||||
|
t.Errorf("determineSeparator() error = %v, wantErr %v", err, tt.wantErr)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if got != tt.want {
|
||||||
|
t.Errorf("determineSeparator() = %v, want %v", got, tt.want)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,85 +0,0 @@
|
||||||
package reporting
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"encoding/csv"
|
|
||||||
"io"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/gocarina/gocsv"
|
|
||||||
"github.com/google/uuid"
|
|
||||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
|
||||||
"github.com/rs/zerolog"
|
|
||||||
)
|
|
||||||
|
|
||||||
type ReportingService struct {
|
|
||||||
repos *repo.AllRepos
|
|
||||||
l *zerolog.Logger
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewReportingService(repos *repo.AllRepos, l *zerolog.Logger) *ReportingService {
|
|
||||||
gocsv.SetCSVWriter(func(out io.Writer) *gocsv.SafeCSVWriter {
|
|
||||||
writer := csv.NewWriter(out)
|
|
||||||
writer.Comma = '\t'
|
|
||||||
return gocsv.NewSafeCSVWriter(writer)
|
|
||||||
})
|
|
||||||
|
|
||||||
return &ReportingService{
|
|
||||||
repos: repos,
|
|
||||||
l: l,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// =================================================================================================
|
|
||||||
|
|
||||||
// NullableTime is a custom type that implements the MarshalCSV interface
|
|
||||||
// to allow for nullable time.Time fields in the CSV output to be empty
|
|
||||||
// and not "0001-01-01". It also overrides the default CSV output format
|
|
||||||
type NullableTime time.Time
|
|
||||||
|
|
||||||
func (t NullableTime) MarshalCSV() (string, error) {
|
|
||||||
if time.Time(t).IsZero() {
|
|
||||||
return "", nil
|
|
||||||
}
|
|
||||||
// YYYY-MM-DD
|
|
||||||
return time.Time(t).Format("2006-01-02"), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
type BillOfMaterialsEntry struct {
|
|
||||||
PurchaseDate NullableTime `csv:"Purchase Date"`
|
|
||||||
Name string `csv:"Name"`
|
|
||||||
Description string `csv:"Description"`
|
|
||||||
Manufacturer string `csv:"Manufacturer"`
|
|
||||||
SerialNumber string `csv:"Serial Number"`
|
|
||||||
ModelNumber string `csv:"Model Number"`
|
|
||||||
Quantity int `csv:"Quantity"`
|
|
||||||
Price float64 `csv:"Price"`
|
|
||||||
TotalPrice float64 `csv:"Total Price"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// BillOfMaterialsTSV returns a byte slice of the Bill of Materials for a given GID in TSV format
|
|
||||||
// See BillOfMaterialsEntry for the format of the output
|
|
||||||
func (rs *ReportingService) BillOfMaterialsTSV(ctx context.Context, GID uuid.UUID) ([]byte, error) {
|
|
||||||
entities, err := rs.repos.Items.GetAll(ctx, GID)
|
|
||||||
if err != nil {
|
|
||||||
rs.l.Debug().Err(err).Msg("failed to get all items for BOM Csv Reporting")
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
bomEntries := make([]BillOfMaterialsEntry, len(entities))
|
|
||||||
for i, entity := range entities {
|
|
||||||
bomEntries[i] = BillOfMaterialsEntry{
|
|
||||||
PurchaseDate: NullableTime(entity.PurchaseTime),
|
|
||||||
Name: entity.Name,
|
|
||||||
Description: entity.Description,
|
|
||||||
Manufacturer: entity.Manufacturer,
|
|
||||||
SerialNumber: entity.SerialNumber,
|
|
||||||
ModelNumber: entity.ModelNumber,
|
|
||||||
Quantity: entity.Quantity,
|
|
||||||
Price: entity.PurchasePrice,
|
|
||||||
TotalPrice: entity.PurchasePrice * float64(entity.Quantity),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return gocsv.MarshalBytes(&bomEntries)
|
|
||||||
}
|
|
38
backend/internal/core/services/reporting/value_parsers.go
Normal file
38
backend/internal/core/services/reporting/value_parsers.go
Normal file
|
@ -0,0 +1,38 @@
|
||||||
|
package reporting
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
func parseSeparatedString(s string, sep string) ([]string, error) {
|
||||||
|
list := strings.Split(s, sep)
|
||||||
|
|
||||||
|
csf := make([]string, 0, len(list))
|
||||||
|
for _, s := range list {
|
||||||
|
trimmed := strings.TrimSpace(s)
|
||||||
|
if trimmed != "" {
|
||||||
|
csf = append(csf, trimmed)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return csf, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseFloat(s string) float64 {
|
||||||
|
if s == "" {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
f, _ := strconv.ParseFloat(s, 64)
|
||||||
|
return f
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseBool(s string) bool {
|
||||||
|
b, _ := strconv.ParseBool(s)
|
||||||
|
return b
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseInt(s string) int {
|
||||||
|
i, _ := strconv.Atoi(s)
|
||||||
|
return i
|
||||||
|
}
|
|
@ -0,0 +1,65 @@
|
||||||
|
package reporting
|
||||||
|
|
||||||
|
import (
|
||||||
|
"reflect"
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func Test_parseSeparatedString(t *testing.T) {
|
||||||
|
type args struct {
|
||||||
|
s string
|
||||||
|
sep string
|
||||||
|
}
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
args args
|
||||||
|
want []string
|
||||||
|
wantErr bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "comma",
|
||||||
|
args: args{
|
||||||
|
s: "a,b,c",
|
||||||
|
sep: ",",
|
||||||
|
},
|
||||||
|
want: []string{"a", "b", "c"},
|
||||||
|
wantErr: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "trimmed comma",
|
||||||
|
args: args{
|
||||||
|
s: "a, b, c",
|
||||||
|
sep: ",",
|
||||||
|
},
|
||||||
|
want: []string{"a", "b", "c"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "excessive whitespace",
|
||||||
|
args: args{
|
||||||
|
s: " a, b, c ",
|
||||||
|
sep: ",",
|
||||||
|
},
|
||||||
|
want: []string{"a", "b", "c"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "empty",
|
||||||
|
args: args{
|
||||||
|
s: "",
|
||||||
|
sep: ",",
|
||||||
|
},
|
||||||
|
want: []string{},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
got, err := parseSeparatedString(tt.args.s, tt.args.sep)
|
||||||
|
if (err != nil) != tt.wantErr {
|
||||||
|
t.Errorf("parseSeparatedString() error = %v, wantErr %v", err, tt.wantErr)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if !reflect.DeepEqual(got, tt.want) {
|
||||||
|
t.Errorf("parseSeparatedString() = %v, want %v", got, tt.want)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
|
@ -3,10 +3,13 @@ package services
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"errors"
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"strings"
|
||||||
|
|
||||||
"github.com/google/uuid"
|
"github.com/google/uuid"
|
||||||
|
"github.com/hay-kot/homebox/backend/internal/core/services/reporting"
|
||||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||||
"github.com/rs/zerolog/log"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
|
@ -37,7 +40,6 @@ func (svc *ItemService) Create(ctx Context, item repo.ItemCreate) (repo.ItemOut,
|
||||||
|
|
||||||
func (svc *ItemService) EnsureAssetID(ctx context.Context, GID uuid.UUID) (int, error) {
|
func (svc *ItemService) EnsureAssetID(ctx context.Context, GID uuid.UUID) (int, error) {
|
||||||
items, err := svc.repo.Items.GetAllZeroAssetID(ctx, GID)
|
items, err := svc.repo.Items.GetAllZeroAssetID(ctx, GID)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return 0, err
|
return 0, err
|
||||||
}
|
}
|
||||||
|
@ -61,190 +63,290 @@ func (svc *ItemService) EnsureAssetID(ctx context.Context, GID uuid.UUID) (int,
|
||||||
|
|
||||||
return finished, nil
|
return finished, nil
|
||||||
}
|
}
|
||||||
func (svc *ItemService) CsvImport(ctx context.Context, GID uuid.UUID, data [][]string) (int, error) {
|
|
||||||
loaded := []csvRow{}
|
|
||||||
|
|
||||||
// Skip first row
|
func (svc *ItemService) EnsureImportRef(ctx context.Context, GID uuid.UUID) (int, error) {
|
||||||
for _, row := range data[1:] {
|
ids, err := svc.repo.Items.GetAllZeroImportRef(ctx, GID)
|
||||||
// Skip empty rows
|
|
||||||
if len(row) == 0 {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(row) != NumOfCols {
|
|
||||||
return 0, ErrInvalidCsv
|
|
||||||
}
|
|
||||||
|
|
||||||
r := newCsvRow(row)
|
|
||||||
loaded = append(loaded, r)
|
|
||||||
}
|
|
||||||
|
|
||||||
// validate rows
|
|
||||||
var errMap = map[int][]error{}
|
|
||||||
var hasErr bool
|
|
||||||
for i, r := range loaded {
|
|
||||||
|
|
||||||
errs := r.validate()
|
|
||||||
|
|
||||||
if len(errs) > 0 {
|
|
||||||
hasErr = true
|
|
||||||
lineNum := i + 2
|
|
||||||
|
|
||||||
errMap[lineNum] = errs
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if hasErr {
|
|
||||||
for lineNum, errs := range errMap {
|
|
||||||
for _, err := range errs {
|
|
||||||
log.Error().Err(err).Int("line", lineNum).Msg("csv import error")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Bootstrap the locations and labels so we can reuse the created IDs for the items
|
|
||||||
locations := map[string]uuid.UUID{}
|
|
||||||
existingLocation, err := svc.repo.Locations.GetAll(ctx, GID, repo.LocationQuery{})
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return 0, err
|
return 0, err
|
||||||
}
|
}
|
||||||
for _, loc := range existingLocation {
|
|
||||||
locations[loc.Name] = loc.ID
|
|
||||||
}
|
|
||||||
|
|
||||||
labels := map[string]uuid.UUID{}
|
finished := 0
|
||||||
existingLabels, err := svc.repo.Labels.GetAll(ctx, GID)
|
for _, itemID := range ids {
|
||||||
|
ref := uuid.New().String()[0:8]
|
||||||
|
|
||||||
|
err = svc.repo.Items.Patch(ctx, GID, itemID, repo.ItemPatch{ImportRef: &ref})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return 0, err
|
return 0, err
|
||||||
}
|
}
|
||||||
for _, label := range existingLabels {
|
|
||||||
labels[label.Name] = label.ID
|
finished++
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, row := range loaded {
|
return finished, nil
|
||||||
|
}
|
||||||
|
|
||||||
// Locations
|
func serializeLocation[T ~[]string](location T) string {
|
||||||
if _, exists := locations[row.Location]; !exists {
|
return strings.Join(location, "/")
|
||||||
result, err := svc.repo.Locations.Create(ctx, GID, repo.LocationCreate{
|
}
|
||||||
Name: row.Location,
|
|
||||||
Description: "",
|
// CsvImport imports items from a CSV file. using the standard defined format.
|
||||||
})
|
//
|
||||||
|
// CsvImport applies the following rules/operations
|
||||||
|
//
|
||||||
|
// 1. If the item does not exist, it is created.
|
||||||
|
// 2. If the item has a ImportRef and it exists it is skipped
|
||||||
|
// 3. Locations and Labels are created if they do not exist.
|
||||||
|
func (svc *ItemService) CsvImport(ctx context.Context, GID uuid.UUID, data io.Reader) (int, error) {
|
||||||
|
sheet := reporting.IOSheet{}
|
||||||
|
|
||||||
|
err := sheet.Read(data)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return 0, err
|
return 0, err
|
||||||
}
|
}
|
||||||
locations[row.Location] = result.ID
|
|
||||||
}
|
|
||||||
|
|
||||||
|
// ========================================
|
||||||
// Labels
|
// Labels
|
||||||
|
|
||||||
for _, label := range row.getLabels() {
|
labelMap := make(map[string]uuid.UUID)
|
||||||
if _, exists := labels[label]; exists {
|
{
|
||||||
continue
|
labels, err := svc.repo.Labels.GetAll(ctx, GID)
|
||||||
}
|
|
||||||
result, err := svc.repo.Labels.Create(ctx, GID, repo.LabelCreate{
|
|
||||||
Name: label,
|
|
||||||
Description: "",
|
|
||||||
})
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return 0, err
|
return 0, err
|
||||||
}
|
}
|
||||||
labels[label] = result.ID
|
|
||||||
|
for _, label := range labels {
|
||||||
|
labelMap[label.Name] = label.ID
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
highest := repo.AssetID(-1)
|
// ========================================
|
||||||
|
// Locations
|
||||||
|
|
||||||
|
locationMap := make(map[string]uuid.UUID)
|
||||||
|
{
|
||||||
|
locations, err := svc.repo.Locations.Tree(ctx, GID, repo.TreeQuery{WithItems: false})
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Traverse the tree and build a map of location full paths to IDs
|
||||||
|
// where the full path is the location name joined by slashes.
|
||||||
|
var traverse func(location *repo.TreeItem, path []string)
|
||||||
|
traverse = func(location *repo.TreeItem, path []string) {
|
||||||
|
path = append(path, location.Name)
|
||||||
|
|
||||||
|
locationMap[serializeLocation(path)] = location.ID
|
||||||
|
|
||||||
|
for _, child := range location.Children {
|
||||||
|
traverse(child, path)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, location := range locations {
|
||||||
|
traverse(&location, []string{})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ========================================
|
||||||
|
// Import items
|
||||||
|
|
||||||
|
// Asset ID Pre-Check
|
||||||
|
highestAID := repo.AssetID(-1)
|
||||||
if svc.autoIncrementAssetID {
|
if svc.autoIncrementAssetID {
|
||||||
highest, err = svc.repo.Items.GetHighestAssetID(ctx, GID)
|
highestAID, err = svc.repo.Items.GetHighestAssetID(ctx, GID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return 0, err
|
return 0, err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create the items
|
finished := 0
|
||||||
var count int
|
|
||||||
for _, row := range loaded {
|
for i := range sheet.Rows {
|
||||||
// Check Import Ref
|
row := sheet.Rows[i]
|
||||||
if row.Item.ImportRef != "" {
|
|
||||||
exists, err := svc.repo.Items.CheckRef(ctx, GID, row.Item.ImportRef)
|
createRequired := true
|
||||||
|
|
||||||
|
// ========================================
|
||||||
|
// Preflight check for existing item
|
||||||
|
if row.ImportRef != "" {
|
||||||
|
exists, err := svc.repo.Items.CheckRef(ctx, GID, row.ImportRef)
|
||||||
|
if err != nil {
|
||||||
|
return 0, fmt.Errorf("error checking for existing item with ref %q: %w", row.ImportRef, err)
|
||||||
|
}
|
||||||
|
|
||||||
if exists {
|
if exists {
|
||||||
continue
|
createRequired = false
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ========================================
|
||||||
|
// Pre-Create Labels as necessary
|
||||||
|
labelIds := make([]uuid.UUID, len(row.LabelStr))
|
||||||
|
|
||||||
|
for j := range row.LabelStr {
|
||||||
|
label := row.LabelStr[j]
|
||||||
|
|
||||||
|
id, ok := labelMap[label]
|
||||||
|
if !ok {
|
||||||
|
newLabel, err := svc.repo.Labels.Create(ctx, GID, repo.LabelCreate{Name: label})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Err(err).Msg("error checking import ref")
|
return 0, err
|
||||||
}
|
}
|
||||||
|
id = newLabel.ID
|
||||||
}
|
}
|
||||||
|
|
||||||
locationID := locations[row.Location]
|
labelIds[j] = id
|
||||||
labelIDs := []uuid.UUID{}
|
labelMap[label] = id
|
||||||
for _, label := range row.getLabels() {
|
|
||||||
labelIDs = append(labelIDs, labels[label])
|
|
||||||
}
|
}
|
||||||
|
|
||||||
log.Info().
|
// ========================================
|
||||||
Str("name", row.Item.Name).
|
// Pre-Create Locations as necessary
|
||||||
Str("location", row.Location).
|
path := serializeLocation(row.Location)
|
||||||
Msgf("Creating Item: %s", row.Item.Name)
|
|
||||||
|
|
||||||
data := repo.ItemCreate{
|
locationID, ok := locationMap[path]
|
||||||
ImportRef: row.Item.ImportRef,
|
if !ok { // Traverse the path of LocationStr and check each path element to see if it exists already, if not create it.
|
||||||
Name: row.Item.Name,
|
paths := []string{}
|
||||||
Description: row.Item.Description,
|
for i, pathElement := range row.Location {
|
||||||
LabelIDs: labelIDs,
|
paths = append(paths, pathElement)
|
||||||
LocationID: locationID,
|
path := serializeLocation(paths)
|
||||||
|
|
||||||
|
locationID, ok = locationMap[path]
|
||||||
|
if !ok {
|
||||||
|
parentID := uuid.Nil
|
||||||
|
|
||||||
|
// Get the parent ID
|
||||||
|
if i > 0 {
|
||||||
|
parentPath := serializeLocation(row.Location[:i])
|
||||||
|
parentID = locationMap[parentPath]
|
||||||
}
|
}
|
||||||
|
|
||||||
if svc.autoIncrementAssetID {
|
newLocation, err := svc.repo.Locations.Create(ctx, GID, repo.LocationCreate{
|
||||||
highest++
|
ParentID: parentID,
|
||||||
data.AssetID = highest
|
Name: pathElement,
|
||||||
}
|
|
||||||
|
|
||||||
result, err := svc.repo.Items.Create(ctx, GID, data)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return count, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// Update the item with the rest of the data
|
|
||||||
_, err = svc.repo.Items.UpdateByGroup(ctx, GID, repo.ItemUpdate{
|
|
||||||
// Edges
|
|
||||||
LocationID: locationID,
|
|
||||||
LabelIDs: labelIDs,
|
|
||||||
AssetID: data.AssetID,
|
|
||||||
|
|
||||||
// General Fields
|
|
||||||
ID: result.ID,
|
|
||||||
Name: result.Name,
|
|
||||||
Description: result.Description,
|
|
||||||
Insured: row.Item.Insured,
|
|
||||||
Notes: row.Item.Notes,
|
|
||||||
Quantity: row.Item.Quantity,
|
|
||||||
|
|
||||||
// Identifies the item as imported
|
|
||||||
SerialNumber: row.Item.SerialNumber,
|
|
||||||
ModelNumber: row.Item.ModelNumber,
|
|
||||||
Manufacturer: row.Item.Manufacturer,
|
|
||||||
|
|
||||||
// Purchase
|
|
||||||
PurchaseFrom: row.Item.PurchaseFrom,
|
|
||||||
PurchasePrice: row.Item.PurchasePrice,
|
|
||||||
PurchaseTime: row.Item.PurchaseTime,
|
|
||||||
|
|
||||||
// Warranty
|
|
||||||
LifetimeWarranty: row.Item.LifetimeWarranty,
|
|
||||||
WarrantyExpires: row.Item.WarrantyExpires,
|
|
||||||
WarrantyDetails: row.Item.WarrantyDetails,
|
|
||||||
|
|
||||||
SoldTo: row.Item.SoldTo,
|
|
||||||
SoldPrice: row.Item.SoldPrice,
|
|
||||||
SoldTime: row.Item.SoldTime,
|
|
||||||
SoldNotes: row.Item.SoldNotes,
|
|
||||||
})
|
})
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return count, err
|
return 0, err
|
||||||
|
}
|
||||||
|
locationID = newLocation.ID
|
||||||
}
|
}
|
||||||
|
|
||||||
count++
|
locationMap[path] = locationID
|
||||||
}
|
}
|
||||||
return count, nil
|
|
||||||
|
locationID, ok = locationMap[path]
|
||||||
|
if !ok {
|
||||||
|
return 0, errors.New("failed to create location")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var effAID repo.AssetID
|
||||||
|
if svc.autoIncrementAssetID && row.AssetID.Nil() {
|
||||||
|
effAID = highestAID + 1
|
||||||
|
highestAID++
|
||||||
|
} else {
|
||||||
|
effAID = row.AssetID
|
||||||
|
}
|
||||||
|
|
||||||
|
// ========================================
|
||||||
|
// Create Item
|
||||||
|
var item repo.ItemOut
|
||||||
|
switch {
|
||||||
|
case createRequired:
|
||||||
|
newItem := repo.ItemCreate{
|
||||||
|
ImportRef: row.ImportRef,
|
||||||
|
Name: row.Name,
|
||||||
|
Description: row.Description,
|
||||||
|
AssetID: effAID,
|
||||||
|
LocationID: locationID,
|
||||||
|
LabelIDs: labelIds,
|
||||||
|
}
|
||||||
|
|
||||||
|
item, err = svc.repo.Items.Create(ctx, GID, newItem)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
item, err = svc.repo.Items.GetByRef(ctx, GID, row.ImportRef)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if item.ID == uuid.Nil {
|
||||||
|
panic("item ID is nil on import - this should never happen")
|
||||||
|
}
|
||||||
|
|
||||||
|
fields := make([]repo.ItemField, len(row.Fields))
|
||||||
|
for i := range row.Fields {
|
||||||
|
fields[i] = repo.ItemField{
|
||||||
|
Name: row.Fields[i].Name,
|
||||||
|
Type: "text",
|
||||||
|
TextValue: row.Fields[i].Value,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
updateItem := repo.ItemUpdate{
|
||||||
|
ID: item.ID,
|
||||||
|
LabelIDs: labelIds,
|
||||||
|
LocationID: locationID,
|
||||||
|
|
||||||
|
Name: row.Name,
|
||||||
|
Description: row.Description,
|
||||||
|
AssetID: effAID,
|
||||||
|
Insured: row.Insured,
|
||||||
|
Quantity: row.Quantity,
|
||||||
|
Archived: row.Archived,
|
||||||
|
|
||||||
|
PurchasePrice: row.PurchasePrice,
|
||||||
|
PurchaseFrom: row.PurchaseFrom,
|
||||||
|
PurchaseTime: row.PurchaseTime,
|
||||||
|
|
||||||
|
Manufacturer: row.Manufacturer,
|
||||||
|
ModelNumber: row.ModelNumber,
|
||||||
|
SerialNumber: row.SerialNumber,
|
||||||
|
|
||||||
|
LifetimeWarranty: row.LifetimeWarranty,
|
||||||
|
WarrantyExpires: row.WarrantyExpires,
|
||||||
|
WarrantyDetails: row.WarrantyDetails,
|
||||||
|
|
||||||
|
SoldTo: row.SoldTo,
|
||||||
|
SoldTime: row.SoldTime,
|
||||||
|
SoldPrice: row.SoldPrice,
|
||||||
|
SoldNotes: row.SoldNotes,
|
||||||
|
|
||||||
|
Notes: row.Notes,
|
||||||
|
Fields: fields,
|
||||||
|
}
|
||||||
|
|
||||||
|
item, err = svc.repo.Items.UpdateByGroup(ctx, GID, updateItem)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
|
||||||
|
finished++
|
||||||
|
}
|
||||||
|
|
||||||
|
return finished, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (svc *ItemService) ExportTSV(ctx context.Context, GID uuid.UUID) ([][]string, error) {
|
||||||
|
items, err := svc.repo.Items.GetAll(ctx, GID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
sheet := reporting.IOSheet{}
|
||||||
|
|
||||||
|
sheet.ReadItems(items)
|
||||||
|
|
||||||
|
return sheet.TSV()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (svc *ItemService) ExportBillOfMaterialsTSV(ctx context.Context, GID uuid.UUID) ([]byte, error) {
|
||||||
|
items, err := svc.repo.Items.GetAll(ctx, GID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return reporting.BillOfMaterialsTSV(items)
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,151 +0,0 @@
|
||||||
package services
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"encoding/csv"
|
|
||||||
"errors"
|
|
||||||
"io"
|
|
||||||
"strconv"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
|
||||||
"github.com/hay-kot/homebox/backend/internal/data/types"
|
|
||||||
)
|
|
||||||
|
|
||||||
func determineSeparator(data []byte) (rune, error) {
|
|
||||||
// First row
|
|
||||||
firstRow := bytes.Split(data, []byte("\n"))[0]
|
|
||||||
|
|
||||||
// find first comma or /t
|
|
||||||
comma := bytes.IndexByte(firstRow, ',')
|
|
||||||
tab := bytes.IndexByte(firstRow, '\t')
|
|
||||||
|
|
||||||
switch {
|
|
||||||
case comma == -1 && tab == -1:
|
|
||||||
return 0, errors.New("could not determine separator")
|
|
||||||
case tab > comma:
|
|
||||||
return '\t', nil
|
|
||||||
default:
|
|
||||||
return ',', nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func ReadCsv(r io.Reader) ([][]string, error) {
|
|
||||||
data, err := io.ReadAll(r)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
reader := csv.NewReader(bytes.NewReader(data))
|
|
||||||
|
|
||||||
// Determine separator
|
|
||||||
sep, err := determineSeparator(data)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
reader.Comma = sep
|
|
||||||
|
|
||||||
return reader.ReadAll()
|
|
||||||
}
|
|
||||||
|
|
||||||
var ErrInvalidCsv = errors.New("invalid csv")
|
|
||||||
|
|
||||||
const NumOfCols = 21
|
|
||||||
|
|
||||||
func parseFloat(s string) float64 {
|
|
||||||
if s == "" {
|
|
||||||
return 0
|
|
||||||
}
|
|
||||||
f, _ := strconv.ParseFloat(s, 64)
|
|
||||||
return f
|
|
||||||
}
|
|
||||||
|
|
||||||
func parseBool(s string) bool {
|
|
||||||
switch strings.ToLower(s) {
|
|
||||||
case "true", "yes", "1":
|
|
||||||
return true
|
|
||||||
default:
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func parseInt(s string) int {
|
|
||||||
i, _ := strconv.Atoi(s)
|
|
||||||
return i
|
|
||||||
}
|
|
||||||
|
|
||||||
type csvRow struct {
|
|
||||||
Item repo.ItemOut
|
|
||||||
Location string
|
|
||||||
LabelStr string
|
|
||||||
}
|
|
||||||
|
|
||||||
func newCsvRow(row []string) csvRow {
|
|
||||||
|
|
||||||
return csvRow{
|
|
||||||
Location: row[1],
|
|
||||||
LabelStr: row[2],
|
|
||||||
Item: repo.ItemOut{
|
|
||||||
ItemSummary: repo.ItemSummary{
|
|
||||||
ImportRef: row[0],
|
|
||||||
Quantity: parseInt(row[3]),
|
|
||||||
Name: row[4],
|
|
||||||
Description: row[5],
|
|
||||||
Insured: parseBool(row[6]),
|
|
||||||
PurchasePrice: parseFloat(row[12]),
|
|
||||||
},
|
|
||||||
SerialNumber: row[7],
|
|
||||||
ModelNumber: row[8],
|
|
||||||
Manufacturer: row[9],
|
|
||||||
Notes: row[10],
|
|
||||||
PurchaseFrom: row[11],
|
|
||||||
PurchaseTime: types.DateFromString(row[13]),
|
|
||||||
LifetimeWarranty: parseBool(row[14]),
|
|
||||||
WarrantyExpires: types.DateFromString(row[15]),
|
|
||||||
WarrantyDetails: row[16],
|
|
||||||
SoldTo: row[17],
|
|
||||||
SoldPrice: parseFloat(row[18]),
|
|
||||||
SoldTime: types.DateFromString(row[19]),
|
|
||||||
SoldNotes: row[20],
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c csvRow) getLabels() []string {
|
|
||||||
split := strings.Split(c.LabelStr, ";")
|
|
||||||
|
|
||||||
// Trim each
|
|
||||||
for i, s := range split {
|
|
||||||
split[i] = strings.TrimSpace(s)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Remove empty
|
|
||||||
for i, s := range split {
|
|
||||||
if s == "" {
|
|
||||||
split = append(split[:i], split[i+1:]...)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return split
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c csvRow) validate() []error {
|
|
||||||
var errs []error
|
|
||||||
|
|
||||||
add := func(err error) {
|
|
||||||
errs = append(errs, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
required := func(s string, name string) {
|
|
||||||
if s == "" {
|
|
||||||
add(errors.New(name + " is required"))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
required(c.Location, "Location")
|
|
||||||
required(c.Item.Name, "Name")
|
|
||||||
|
|
||||||
return errs
|
|
||||||
}
|
|
|
@ -1,164 +0,0 @@
|
||||||
package services
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
_ "embed"
|
|
||||||
"encoding/csv"
|
|
||||||
"fmt"
|
|
||||||
"reflect"
|
|
||||||
"testing"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
)
|
|
||||||
|
|
||||||
//go:embed .testdata/import.csv
|
|
||||||
var CSVData_Comma []byte
|
|
||||||
|
|
||||||
//go:embed .testdata/import.tsv
|
|
||||||
var CSVData_Tab []byte
|
|
||||||
|
|
||||||
func loadcsv() [][]string {
|
|
||||||
reader := csv.NewReader(bytes.NewReader(CSVData_Comma))
|
|
||||||
|
|
||||||
records, err := reader.ReadAll()
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
return records
|
|
||||||
}
|
|
||||||
|
|
||||||
func Test_CorrectDateParsing(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
expected := []time.Time{
|
|
||||||
time.Date(2021, 10, 13, 0, 0, 0, 0, time.UTC),
|
|
||||||
time.Date(2021, 10, 15, 0, 0, 0, 0, time.UTC),
|
|
||||||
time.Date(2021, 10, 13, 0, 0, 0, 0, time.UTC),
|
|
||||||
time.Date(2020, 10, 21, 0, 0, 0, 0, time.UTC),
|
|
||||||
time.Date(2020, 10, 14, 0, 0, 0, 0, time.UTC),
|
|
||||||
time.Date(2020, 9, 30, 0, 0, 0, 0, time.UTC),
|
|
||||||
}
|
|
||||||
|
|
||||||
records := loadcsv()
|
|
||||||
|
|
||||||
for i, record := range records {
|
|
||||||
if i == 0 {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
entity := newCsvRow(record)
|
|
||||||
expected := expected[i-1]
|
|
||||||
|
|
||||||
assert.Equal(t, expected, entity.Item.PurchaseTime.Time(), fmt.Sprintf("Failed on row %d", i))
|
|
||||||
assert.Equal(t, expected, entity.Item.WarrantyExpires.Time(), fmt.Sprintf("Failed on row %d", i))
|
|
||||||
assert.Equal(t, expected, entity.Item.SoldTime.Time(), fmt.Sprintf("Failed on row %d", i))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func Test_csvRow_getLabels(t *testing.T) {
|
|
||||||
type fields struct {
|
|
||||||
LabelStr string
|
|
||||||
}
|
|
||||||
tests := []struct {
|
|
||||||
name string
|
|
||||||
fields fields
|
|
||||||
want []string
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
name: "basic test",
|
|
||||||
fields: fields{
|
|
||||||
LabelStr: "IOT;Home Assistant;Z-Wave",
|
|
||||||
},
|
|
||||||
want: []string{"IOT", "Home Assistant", "Z-Wave"},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "no labels",
|
|
||||||
fields: fields{
|
|
||||||
LabelStr: "",
|
|
||||||
},
|
|
||||||
want: []string{},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "single label",
|
|
||||||
fields: fields{
|
|
||||||
LabelStr: "IOT",
|
|
||||||
},
|
|
||||||
want: []string{"IOT"},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "trailing semicolon",
|
|
||||||
fields: fields{
|
|
||||||
LabelStr: "IOT;",
|
|
||||||
},
|
|
||||||
want: []string{"IOT"},
|
|
||||||
},
|
|
||||||
|
|
||||||
{
|
|
||||||
name: "whitespace",
|
|
||||||
fields: fields{
|
|
||||||
LabelStr: " IOT; Home Assistant; Z-Wave ",
|
|
||||||
},
|
|
||||||
want: []string{"IOT", "Home Assistant", "Z-Wave"},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
for _, tt := range tests {
|
|
||||||
t.Run(tt.name, func(t *testing.T) {
|
|
||||||
c := csvRow{
|
|
||||||
LabelStr: tt.fields.LabelStr,
|
|
||||||
}
|
|
||||||
if got := c.getLabels(); !reflect.DeepEqual(got, tt.want) {
|
|
||||||
t.Errorf("csvRow.getLabels() = %v, want %v", got, tt.want)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func Test_determineSeparator(t *testing.T) {
|
|
||||||
type args struct {
|
|
||||||
data []byte
|
|
||||||
}
|
|
||||||
tests := []struct {
|
|
||||||
name string
|
|
||||||
args args
|
|
||||||
want rune
|
|
||||||
wantErr bool
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
name: "comma",
|
|
||||||
args: args{
|
|
||||||
data: CSVData_Comma,
|
|
||||||
},
|
|
||||||
want: ',',
|
|
||||||
wantErr: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "tab",
|
|
||||||
args: args{
|
|
||||||
data: CSVData_Tab,
|
|
||||||
},
|
|
||||||
want: '\t',
|
|
||||||
wantErr: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "invalid",
|
|
||||||
args: args{
|
|
||||||
data: []byte("a;b;c"),
|
|
||||||
},
|
|
||||||
want: 0,
|
|
||||||
wantErr: true,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
for _, tt := range tests {
|
|
||||||
t.Run(tt.name, func(t *testing.T) {
|
|
||||||
got, err := determineSeparator(tt.args.data)
|
|
||||||
if (err != nil) != tt.wantErr {
|
|
||||||
t.Errorf("determineSeparator() error = %v, wantErr %v", err, tt.wantErr)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if got != tt.want {
|
|
||||||
t.Errorf("determineSeparator() = %v, want %v", got, tt.want)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,78 +0,0 @@
|
||||||
package services
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/google/uuid"
|
|
||||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestItemService_CsvImport(t *testing.T) {
|
|
||||||
data := loadcsv()
|
|
||||||
svc := &ItemService{
|
|
||||||
repo: tRepos,
|
|
||||||
}
|
|
||||||
count, err := svc.CsvImport(context.Background(), tGroup.ID, data)
|
|
||||||
assert.Equal(t, 6, count)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
|
|
||||||
// Check import refs are deduplicated
|
|
||||||
count, err = svc.CsvImport(context.Background(), tGroup.ID, data)
|
|
||||||
assert.Equal(t, 0, count)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
|
|
||||||
items, err := svc.repo.Items.GetAll(context.Background(), tGroup.ID)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
t.Cleanup(func() {
|
|
||||||
for _, item := range items {
|
|
||||||
err := svc.repo.Items.Delete(context.Background(), item.ID)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
assert.Equal(t, len(items), 6)
|
|
||||||
|
|
||||||
dataCsv := []csvRow{}
|
|
||||||
for _, item := range data {
|
|
||||||
dataCsv = append(dataCsv, newCsvRow(item))
|
|
||||||
}
|
|
||||||
|
|
||||||
allLocation, err := tRepos.Locations.GetAll(context.Background(), tGroup.ID, repo.LocationQuery{})
|
|
||||||
assert.NoError(t, err)
|
|
||||||
locNames := []string{}
|
|
||||||
for _, loc := range allLocation {
|
|
||||||
locNames = append(locNames, loc.Name)
|
|
||||||
}
|
|
||||||
|
|
||||||
allLabels, err := tRepos.Labels.GetAll(context.Background(), tGroup.ID)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
labelNames := []string{}
|
|
||||||
for _, label := range allLabels {
|
|
||||||
labelNames = append(labelNames, label.Name)
|
|
||||||
}
|
|
||||||
|
|
||||||
ids := []uuid.UUID{}
|
|
||||||
t.Cleanup((func() {
|
|
||||||
for _, id := range ids {
|
|
||||||
err := svc.repo.Items.Delete(context.Background(), id)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
}
|
|
||||||
}))
|
|
||||||
|
|
||||||
for _, item := range items {
|
|
||||||
assert.Contains(t, locNames, item.Location.Name)
|
|
||||||
for _, label := range item.Labels {
|
|
||||||
assert.Contains(t, labelNames, label.Name)
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, csvRow := range dataCsv {
|
|
||||||
if csvRow.Item.Name == item.Name {
|
|
||||||
assert.Equal(t, csvRow.Item.Description, item.Description)
|
|
||||||
assert.Equal(t, csvRow.Item.Quantity, item.Quantity)
|
|
||||||
assert.Equal(t, csvRow.Item.Insured, item.Insured)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -67,6 +67,26 @@ func (iu *ItemUpdate) ClearDescription() *ItemUpdate {
|
||||||
return iu
|
return iu
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SetImportRef sets the "import_ref" field.
|
||||||
|
func (iu *ItemUpdate) SetImportRef(s string) *ItemUpdate {
|
||||||
|
iu.mutation.SetImportRef(s)
|
||||||
|
return iu
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableImportRef sets the "import_ref" field if the given value is not nil.
|
||||||
|
func (iu *ItemUpdate) SetNillableImportRef(s *string) *ItemUpdate {
|
||||||
|
if s != nil {
|
||||||
|
iu.SetImportRef(*s)
|
||||||
|
}
|
||||||
|
return iu
|
||||||
|
}
|
||||||
|
|
||||||
|
// ClearImportRef clears the value of the "import_ref" field.
|
||||||
|
func (iu *ItemUpdate) ClearImportRef() *ItemUpdate {
|
||||||
|
iu.mutation.ClearImportRef()
|
||||||
|
return iu
|
||||||
|
}
|
||||||
|
|
||||||
// SetNotes sets the "notes" field.
|
// SetNotes sets the "notes" field.
|
||||||
func (iu *ItemUpdate) SetNotes(s string) *ItemUpdate {
|
func (iu *ItemUpdate) SetNotes(s string) *ItemUpdate {
|
||||||
iu.mutation.SetNotes(s)
|
iu.mutation.SetNotes(s)
|
||||||
|
@ -713,6 +733,11 @@ func (iu *ItemUpdate) check() error {
|
||||||
return &ValidationError{Name: "description", err: fmt.Errorf(`ent: validator failed for field "Item.description": %w`, err)}
|
return &ValidationError{Name: "description", err: fmt.Errorf(`ent: validator failed for field "Item.description": %w`, err)}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if v, ok := iu.mutation.ImportRef(); ok {
|
||||||
|
if err := item.ImportRefValidator(v); err != nil {
|
||||||
|
return &ValidationError{Name: "import_ref", err: fmt.Errorf(`ent: validator failed for field "Item.import_ref": %w`, err)}
|
||||||
|
}
|
||||||
|
}
|
||||||
if v, ok := iu.mutation.Notes(); ok {
|
if v, ok := iu.mutation.Notes(); ok {
|
||||||
if err := item.NotesValidator(v); err != nil {
|
if err := item.NotesValidator(v); err != nil {
|
||||||
return &ValidationError{Name: "notes", err: fmt.Errorf(`ent: validator failed for field "Item.notes": %w`, err)}
|
return &ValidationError{Name: "notes", err: fmt.Errorf(`ent: validator failed for field "Item.notes": %w`, err)}
|
||||||
|
@ -773,6 +798,9 @@ func (iu *ItemUpdate) sqlSave(ctx context.Context) (n int, err error) {
|
||||||
if iu.mutation.DescriptionCleared() {
|
if iu.mutation.DescriptionCleared() {
|
||||||
_spec.ClearField(item.FieldDescription, field.TypeString)
|
_spec.ClearField(item.FieldDescription, field.TypeString)
|
||||||
}
|
}
|
||||||
|
if value, ok := iu.mutation.ImportRef(); ok {
|
||||||
|
_spec.SetField(item.FieldImportRef, field.TypeString, value)
|
||||||
|
}
|
||||||
if iu.mutation.ImportRefCleared() {
|
if iu.mutation.ImportRefCleared() {
|
||||||
_spec.ClearField(item.FieldImportRef, field.TypeString)
|
_spec.ClearField(item.FieldImportRef, field.TypeString)
|
||||||
}
|
}
|
||||||
|
@ -1302,6 +1330,26 @@ func (iuo *ItemUpdateOne) ClearDescription() *ItemUpdateOne {
|
||||||
return iuo
|
return iuo
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SetImportRef sets the "import_ref" field.
|
||||||
|
func (iuo *ItemUpdateOne) SetImportRef(s string) *ItemUpdateOne {
|
||||||
|
iuo.mutation.SetImportRef(s)
|
||||||
|
return iuo
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableImportRef sets the "import_ref" field if the given value is not nil.
|
||||||
|
func (iuo *ItemUpdateOne) SetNillableImportRef(s *string) *ItemUpdateOne {
|
||||||
|
if s != nil {
|
||||||
|
iuo.SetImportRef(*s)
|
||||||
|
}
|
||||||
|
return iuo
|
||||||
|
}
|
||||||
|
|
||||||
|
// ClearImportRef clears the value of the "import_ref" field.
|
||||||
|
func (iuo *ItemUpdateOne) ClearImportRef() *ItemUpdateOne {
|
||||||
|
iuo.mutation.ClearImportRef()
|
||||||
|
return iuo
|
||||||
|
}
|
||||||
|
|
||||||
// SetNotes sets the "notes" field.
|
// SetNotes sets the "notes" field.
|
||||||
func (iuo *ItemUpdateOne) SetNotes(s string) *ItemUpdateOne {
|
func (iuo *ItemUpdateOne) SetNotes(s string) *ItemUpdateOne {
|
||||||
iuo.mutation.SetNotes(s)
|
iuo.mutation.SetNotes(s)
|
||||||
|
@ -1961,6 +2009,11 @@ func (iuo *ItemUpdateOne) check() error {
|
||||||
return &ValidationError{Name: "description", err: fmt.Errorf(`ent: validator failed for field "Item.description": %w`, err)}
|
return &ValidationError{Name: "description", err: fmt.Errorf(`ent: validator failed for field "Item.description": %w`, err)}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if v, ok := iuo.mutation.ImportRef(); ok {
|
||||||
|
if err := item.ImportRefValidator(v); err != nil {
|
||||||
|
return &ValidationError{Name: "import_ref", err: fmt.Errorf(`ent: validator failed for field "Item.import_ref": %w`, err)}
|
||||||
|
}
|
||||||
|
}
|
||||||
if v, ok := iuo.mutation.Notes(); ok {
|
if v, ok := iuo.mutation.Notes(); ok {
|
||||||
if err := item.NotesValidator(v); err != nil {
|
if err := item.NotesValidator(v); err != nil {
|
||||||
return &ValidationError{Name: "notes", err: fmt.Errorf(`ent: validator failed for field "Item.notes": %w`, err)}
|
return &ValidationError{Name: "notes", err: fmt.Errorf(`ent: validator failed for field "Item.notes": %w`, err)}
|
||||||
|
@ -2038,6 +2091,9 @@ func (iuo *ItemUpdateOne) sqlSave(ctx context.Context) (_node *Item, err error)
|
||||||
if iuo.mutation.DescriptionCleared() {
|
if iuo.mutation.DescriptionCleared() {
|
||||||
_spec.ClearField(item.FieldDescription, field.TypeString)
|
_spec.ClearField(item.FieldDescription, field.TypeString)
|
||||||
}
|
}
|
||||||
|
if value, ok := iuo.mutation.ImportRef(); ok {
|
||||||
|
_spec.SetField(item.FieldImportRef, field.TypeString, value)
|
||||||
|
}
|
||||||
if iuo.mutation.ImportRefCleared() {
|
if iuo.mutation.ImportRefCleared() {
|
||||||
_spec.ClearField(item.FieldImportRef, field.TypeString)
|
_spec.ClearField(item.FieldImportRef, field.TypeString)
|
||||||
}
|
}
|
||||||
|
|
|
@ -38,8 +38,7 @@ func (Item) Fields() []ent.Field {
|
||||||
return []ent.Field{
|
return []ent.Field{
|
||||||
field.String("import_ref").
|
field.String("import_ref").
|
||||||
Optional().
|
Optional().
|
||||||
MaxLen(100).
|
MaxLen(100),
|
||||||
Immutable(),
|
|
||||||
field.String("notes").
|
field.String("notes").
|
||||||
MaxLen(1000).
|
MaxLen(1000).
|
||||||
Optional(),
|
Optional(),
|
||||||
|
|
|
@ -32,10 +32,18 @@ func ParseAssetID(s string) (AID AssetID, ok bool) {
|
||||||
return ParseAssetIDBytes([]byte(s))
|
return ParseAssetIDBytes([]byte(s))
|
||||||
}
|
}
|
||||||
|
|
||||||
func (aid AssetID) MarshalJSON() ([]byte, error) {
|
func (aid AssetID) String() string {
|
||||||
|
if aid.Nil() {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
aidStr := fmt.Sprintf("%06d", aid)
|
aidStr := fmt.Sprintf("%06d", aid)
|
||||||
aidStr = fmt.Sprintf("%s-%s", aidStr[:3], aidStr[3:])
|
aidStr = fmt.Sprintf("%s-%s", aidStr[:3], aidStr[3:])
|
||||||
return []byte(fmt.Sprintf(`"%s"`, aidStr)), nil
|
return aidStr
|
||||||
|
}
|
||||||
|
|
||||||
|
func (aid AssetID) MarshalJSON() ([]byte, error) {
|
||||||
|
return []byte(`"` + aid.String() + `"`), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (aid *AssetID) UnmarshalJSON(d []byte) error {
|
func (aid *AssetID) UnmarshalJSON(d []byte) error {
|
||||||
|
@ -50,3 +58,11 @@ func (aid *AssetID) UnmarshalJSON(d []byte) error {
|
||||||
*aid = AssetID(aidInt)
|
*aid = AssetID(aidInt)
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (aid AssetID) MarshalCSV() (string, error) {
|
||||||
|
return aid.String(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (aid *AssetID) UnmarshalCSV(d string) error {
|
||||||
|
return aid.UnmarshalJSON([]byte(d))
|
||||||
|
}
|
||||||
|
|
|
@ -21,7 +21,7 @@ func TestAssetID_MarshalJSON(t *testing.T) {
|
||||||
{
|
{
|
||||||
name: "zero test",
|
name: "zero test",
|
||||||
aid: 0,
|
aid: 0,
|
||||||
want: []byte(`"000-000"`),
|
want: []byte(`""`),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "large int",
|
name: "large int",
|
||||||
|
|
|
@ -59,6 +59,7 @@ type (
|
||||||
LocationID uuid.UUID `json:"locationId"`
|
LocationID uuid.UUID `json:"locationId"`
|
||||||
LabelIDs []uuid.UUID `json:"labelIds"`
|
LabelIDs []uuid.UUID `json:"labelIds"`
|
||||||
}
|
}
|
||||||
|
|
||||||
ItemUpdate struct {
|
ItemUpdate struct {
|
||||||
ParentID uuid.UUID `json:"parentId" extensions:"x-nullable,x-omitempty"`
|
ParentID uuid.UUID `json:"parentId" extensions:"x-nullable,x-omitempty"`
|
||||||
ID uuid.UUID `json:"id"`
|
ID uuid.UUID `json:"id"`
|
||||||
|
@ -99,6 +100,12 @@ type (
|
||||||
Fields []ItemField `json:"fields"`
|
Fields []ItemField `json:"fields"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
ItemPatch struct {
|
||||||
|
ID uuid.UUID `json:"id"`
|
||||||
|
Quantity *int `json:"quantity,omitempty" extensions:"x-nullable,x-omitempty"`
|
||||||
|
ImportRef *string `json:"importRef,omitempty" extensions:"x-nullable,x-omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
ItemSummary struct {
|
ItemSummary struct {
|
||||||
ImportRef string `json:"-"`
|
ImportRef string `json:"-"`
|
||||||
ID uuid.UUID `json:"id"`
|
ID uuid.UUID `json:"id"`
|
||||||
|
@ -168,6 +175,7 @@ func mapItemSummary(item *ent.Item) ItemSummary {
|
||||||
ID: item.ID,
|
ID: item.ID,
|
||||||
Name: item.Name,
|
Name: item.Name,
|
||||||
Description: item.Description,
|
Description: item.Description,
|
||||||
|
ImportRef: item.ImportRef,
|
||||||
Quantity: item.Quantity,
|
Quantity: item.Quantity,
|
||||||
CreatedAt: item.CreatedAt,
|
CreatedAt: item.CreatedAt,
|
||||||
UpdatedAt: item.UpdatedAt,
|
UpdatedAt: item.UpdatedAt,
|
||||||
|
@ -285,6 +293,10 @@ func (e *ItemsRepository) CheckRef(ctx context.Context, GID uuid.UUID, ref strin
|
||||||
return q.Where(item.ImportRef(ref)).Exist(ctx)
|
return q.Where(item.ImportRef(ref)).Exist(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (e *ItemsRepository) GetByRef(ctx context.Context, GID uuid.UUID, ref string) (ItemOut, error) {
|
||||||
|
return e.getOne(ctx, item.ImportRef(ref), item.HasGroupWith(group.ID(GID)))
|
||||||
|
}
|
||||||
|
|
||||||
// GetOneByGroup returns a single item by ID. If the item does not exist, an error is returned.
|
// GetOneByGroup returns a single item by ID. If the item does not exist, an error is returned.
|
||||||
// GetOneByGroup ensures that the item belongs to a specific group.
|
// GetOneByGroup ensures that the item belongs to a specific group.
|
||||||
func (e *ItemsRepository) GetOneByGroup(ctx context.Context, gid, id uuid.UUID) (ItemOut, error) {
|
func (e *ItemsRepository) GetOneByGroup(ctx context.Context, gid, id uuid.UUID) (ItemOut, error) {
|
||||||
|
@ -628,6 +640,44 @@ func (e *ItemsRepository) UpdateByGroup(ctx context.Context, GID uuid.UUID, data
|
||||||
return e.GetOne(ctx, data.ID)
|
return e.GetOne(ctx, data.ID)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (e *ItemsRepository) GetAllZeroImportRef(ctx context.Context, GID uuid.UUID) ([]uuid.UUID, error) {
|
||||||
|
var ids []uuid.UUID
|
||||||
|
|
||||||
|
err := e.db.Item.Query().
|
||||||
|
Where(
|
||||||
|
item.HasGroupWith(group.ID(GID)),
|
||||||
|
item.Or(
|
||||||
|
item.ImportRefEQ(""),
|
||||||
|
item.ImportRefIsNil(),
|
||||||
|
),
|
||||||
|
).
|
||||||
|
Select(item.FieldID).
|
||||||
|
Scan(ctx, &ids)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return ids, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *ItemsRepository) Patch(ctx context.Context, GID, ID uuid.UUID, data ItemPatch) error {
|
||||||
|
q := e.db.Item.Update().
|
||||||
|
Where(
|
||||||
|
item.ID(ID),
|
||||||
|
item.HasGroupWith(group.ID(GID)),
|
||||||
|
)
|
||||||
|
|
||||||
|
if data.ImportRef != nil {
|
||||||
|
q.SetImportRef(*data.ImportRef)
|
||||||
|
}
|
||||||
|
|
||||||
|
if data.Quantity != nil {
|
||||||
|
q.SetQuantity(*data.Quantity)
|
||||||
|
}
|
||||||
|
|
||||||
|
return q.Exec(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
func (e *ItemsRepository) GetAllCustomFieldValues(ctx context.Context, GID uuid.UUID, name string) ([]string, error) {
|
func (e *ItemsRepository) GetAllCustomFieldValues(ctx context.Context, GID uuid.UUID, name string) ([]string, error) {
|
||||||
type st struct {
|
type st struct {
|
||||||
Value string `json:"text_value"`
|
Value string `json:"text_value"`
|
||||||
|
|
|
@ -16,9 +16,7 @@ func getPrevMonth(now time.Time) time.Time {
|
||||||
// avoid infinite loop
|
// avoid infinite loop
|
||||||
max := 15
|
max := 15
|
||||||
for t.Month() == now.Month() {
|
for t.Month() == now.Month() {
|
||||||
println("month is the same")
|
|
||||||
t = t.AddDate(0, 0, -1)
|
t = t.AddDate(0, 0, -1)
|
||||||
println(t.String())
|
|
||||||
|
|
||||||
max--
|
max--
|
||||||
if max == 0 {
|
if max == 0 {
|
||||||
|
|
|
@ -2,7 +2,6 @@ package repo
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
|
@ -81,7 +80,6 @@ func TestUserRepo_GetAll(t *testing.T) {
|
||||||
assert.Equal(t, len(created), len(allUsers))
|
assert.Equal(t, len(created), len(allUsers))
|
||||||
|
|
||||||
for _, usr := range created {
|
for _, usr := range created {
|
||||||
fmt.Printf("%+v\n", usr)
|
|
||||||
for _, usr2 := range allUsers {
|
for _, usr2 := range allUsers {
|
||||||
if usr.ID == usr2.ID {
|
if usr.ID == usr2.ID {
|
||||||
assert.Equal(t, usr.Email, usr2.Email)
|
assert.Equal(t, usr.Email, usr2.Email)
|
||||||
|
|
|
@ -2,7 +2,6 @@ package types
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
)
|
)
|
||||||
|
@ -74,9 +73,7 @@ func (d Date) MarshalJSON() ([]byte, error) {
|
||||||
func (d *Date) UnmarshalJSON(data []byte) (err error) {
|
func (d *Date) UnmarshalJSON(data []byte) (err error) {
|
||||||
// unescape the string if necessary `\"` -> `"`
|
// unescape the string if necessary `\"` -> `"`
|
||||||
str := strings.Trim(string(data), "\"")
|
str := strings.Trim(string(data), "\"")
|
||||||
fmt.Printf("str: %q\n", str)
|
|
||||||
if str == "" || str == "null" || str == `""` {
|
if str == "" || str == "null" || str == `""` {
|
||||||
println("empty date")
|
|
||||||
*d = Date{}
|
*d = Date{}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,56 +4,80 @@
|
||||||
|
|
||||||
Using the CSV import is the recommended way for adding items to the database. It is always going to be the fastest way to import any large amount of items and provides the most flexibility when it comes to adding items.
|
Using the CSV import is the recommended way for adding items to the database. It is always going to be the fastest way to import any large amount of items and provides the most flexibility when it comes to adding items.
|
||||||
|
|
||||||
**Limitations**
|
**Current Limitations**
|
||||||
|
|
||||||
- Currently only supports importing items, locations, and labels
|
- Imports only supports importing items, locations, and labels
|
||||||
- Does not support attachments. Attachments must be uploaded after import
|
- Imports and Exports do not support attachments. Attachments must be uploaded after import
|
||||||
|
- CSV Exports do not support nested path exports (e.g. `Home / Office / Desk`) and will only export the Items direct parent, (though imports _do_ support nested paths)
|
||||||
|
- Cannot specify item-to-item relationships (e.g. `Item A` is a child of `Item B`)
|
||||||
|
|
||||||
!!! tip "File Formats"
|
!!! tip "File Formats"
|
||||||
The CSV import supports both CSV and TSV files. The only difference is the delimiter used. CSV files use a comma `,` as the delimiter and TSV files use a tab `\t` as the delimiter. The file extension does not matter.
|
The CSV import supports both CSV and TSV files. The only difference is the delimiter used. CSV files use a comma `,` as the delimiter and TSV files use a tab `\t` as the delimiter. The file extension does not matter.
|
||||||
|
|
||||||
**Template**
|
|
||||||
|
|
||||||
You can use this snippet as the headers for your CSV. Copy and paste it into your spreadsheet editor of choice and fill in the value.
|
|
||||||
|
|
||||||
```csv
|
|
||||||
ImportRef Location Labels Quantity Name Description Insured Serial Number Model Number Manufacturer Notes Purchase From Purchased Price Purchased Time Lifetime Warranty Warranty Expires Warranty Details Sold To Sold Price Sold Time Sold Notes
|
|
||||||
```
|
|
||||||
|
|
||||||
!!! tip "Column Order"
|
|
||||||
Column headers are just there for reference, the important thing is that the order is correct. You can change the headers to anything you like, this behavior may change in the future.
|
|
||||||
|
|
||||||
|
|
||||||
## CSV Reference
|
## CSV Reference
|
||||||
|
|
||||||
|
Below are the supported columns. They are case sensitive, can be in any ordered or can be omitted unless otherwise specified.
|
||||||
|
|
||||||
|
### Special Syntax Columns
|
||||||
|
|
||||||
|
`HB.import_ref`
|
||||||
|
|
||||||
|
: Import Refs are unique strings that can be used to deduplicate imports. Before an item is imported, we check the database for a matching ref. If the ref exists, we skip creation of that item.
|
||||||
|
|
||||||
|
* String Type
|
||||||
|
* Max 100 Characters
|
||||||
|
|
||||||
|
Import Refs are used to de-duplicate imports. It is HIGHLY recommended that you use them to manage your items if you intend to manage your inventory via CSV import/export. If you do not use import refs, you will end up with duplicate items in your database on subsequent imports.
|
||||||
|
|
||||||
|
!!! tip
|
||||||
|
|
||||||
|
Specifying import refs also allows you to update existing items via the CSV import. If you specify an import ref that already exists in the database, we will update the existing item instead of creating a new one.
|
||||||
|
|
||||||
|
`HB.location`
|
||||||
|
|
||||||
|
: This is the location of the item that will be created. These are de-duplicated and won't create another instance when reused.
|
||||||
|
|
||||||
|
* Supports Path Separators for nested locations (e.g. `Home / Office / Desk`)
|
||||||
|
|
||||||
|
`HB.labels`
|
||||||
|
|
||||||
|
: List of labels to apply to the item separated by a `;` can be existing or new labels.
|
||||||
|
|
||||||
|
`HB.field.{field_name}` (e.g. `HB.field.Serial Number`)
|
||||||
|
|
||||||
|
: This is a special column that allows you to add custom fields to the item. The column name must start with `HB.field.` followed by the name of the field. The value of the column will be the value of the field.
|
||||||
|
|
||||||
|
- If the cell value is empty, it will be ignored.
|
||||||
|
|
||||||
|
### Standard Columns
|
||||||
|
|
||||||
| Column | Type | Description |
|
| Column | Type | Description |
|
||||||
| ----------------- | -------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
| -------------------- | ------------- | --------------------------------------------- |
|
||||||
| ImportRef | String (100) | Import Refs are unique strings that can be used to deduplicate imports. Before an item is imported, we check the database for a matching ref. If the ref exists, we skip that item. |
|
| HB.quantity | Integer | The quantity of items to create |
|
||||||
| Location | String | This is the location of the item that will be created. These are de-duplicated and won't create another instance when reused. |
|
| HB.name | String | Name of the item |
|
||||||
| Labels | `;` Separated String | List of labels to apply to the item separated by a `;`, can be existing or new |
|
| HB.asset_id | AssetID | Asset ID for the item |
|
||||||
| Quantity | Integer | The quantity of items to create |
|
| HB.description | String | Description of the item |
|
||||||
| Name | String | Name of the item |
|
| HB.insured | Boolean | Whether or not the item is insured |
|
||||||
| Description | String | Description of the item |
|
| HB.serial_number | String | Serial number of the item |
|
||||||
| Insured | Boolean | Whether or not the item is insured |
|
| HB.model_number | String | Model of the item |
|
||||||
| Serial Number | String | Serial number of the item |
|
| HB.manufacturer | String | Manufacturer of the item |
|
||||||
| Model Number | String | Model of the item |
|
| HB.notes | String (1000) | General notes about the product |
|
||||||
| Manufacturer | String | Manufacturer of the item |
|
| HB.purchase_from | String | Name of the place the item was purchased from |
|
||||||
| Notes | String (1000) | General notes about the product |
|
| HB.purchase_price | Float64 | |
|
||||||
| Purchase From | String | Name of the place the item was purchased from |
|
| HB.purchase_at | Date | Date the item was purchased |
|
||||||
| Purchase Price | Float64 | |
|
| HB.lifetime_warranty | Boolean | true or false - case insensitive |
|
||||||
| Purchase At | Date | Date the item was purchased |
|
| HB.warranty_expires | Date | Date in the format |
|
||||||
| Lifetime Warranty | Boolean | true or false - case insensitive |
|
| HB.warranty_details | String | Details about the warranty |
|
||||||
| Warranty Expires | Date | Date in the format |
|
| HB.sold_to | String | Name of the person the item was sold to |
|
||||||
| Warranty Details | String | Details about the warranty |
|
| HB.sold_at | Date | Date the item was sold |
|
||||||
| Sold To | String | Name of the person the item was sold to |
|
| HB.sold_price | Float64 | |
|
||||||
| Sold At | Date | Date the item was sold |
|
| HB.sold_notes | String (1000) | |
|
||||||
| Sold Price | Float64 | |
|
|
||||||
| Sold Notes | String (1000) | |
|
|
||||||
|
|
||||||
**Type Key**
|
**Type Key**
|
||||||
|
|
||||||
| Type | Format |
|
| Type | Format |
|
||||||
| ------- | --------------------------------------------------- |
|
| ------- | --------------------------------------------------- |
|
||||||
| String | Max 255 Characters unless otherwise specified |
|
| String | Max 255 Characters unless otherwise specified |
|
||||||
| Date | MM/DD/YYYY |
|
| Date | YYYY-MM-DD |
|
||||||
| Boolean | true or false, yes or no, 1 or 0 - case insensitive |
|
| Boolean | true or false, yes or no, 1 or 0 - case insensitive |
|
||||||
|
| AssetID | 000-000 |
|
||||||
|
|
|
@ -44,12 +44,11 @@ markdown_extensions:
|
||||||
custom_checkbox: true
|
custom_checkbox: true
|
||||||
- admonition
|
- admonition
|
||||||
- attr_list
|
- attr_list
|
||||||
- pymdownx.tabbed
|
|
||||||
- pymdownx.superfences
|
- pymdownx.superfences
|
||||||
|
|
||||||
nav:
|
nav:
|
||||||
- Home: index.md
|
- Home: index.md
|
||||||
- Quick Start: quick-start.md
|
- Quick Start: quick-start.md
|
||||||
- Tips and Tricks: tips-tricks.md
|
- Tips and Tricks: tips-tricks.md
|
||||||
- Importing Data: import-csv.md
|
- Import and Export: import-csv.md
|
||||||
- Building The Binary: build.md
|
- Building The Binary: build.md
|
||||||
|
|
|
@ -5,6 +5,27 @@
|
||||||
Import a CSV file containing your items, labels, and locations. See documentation for more information on the
|
Import a CSV file containing your items, labels, and locations. See documentation for more information on the
|
||||||
required format.
|
required format.
|
||||||
</p>
|
</p>
|
||||||
|
<div class="alert alert-warning shadow-lg mt-4">
|
||||||
|
<div>
|
||||||
|
<svg
|
||||||
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
|
class="stroke-current flex-shrink-0 h-6 w-6 mb-auto"
|
||||||
|
fill="none"
|
||||||
|
viewBox="0 0 24 24"
|
||||||
|
>
|
||||||
|
<path
|
||||||
|
stroke-linecap="round"
|
||||||
|
stroke-linejoin="round"
|
||||||
|
stroke-width="2"
|
||||||
|
d="M12 9v2m0 4h.01m-6.938 4h13.856c1.54 0 2.502-1.667 1.732-3L13.732 4c-.77-1.333-2.694-1.333-3.464 0L3.34 16c-.77 1.333.192 3 1.732 3z"
|
||||||
|
/>
|
||||||
|
</svg>
|
||||||
|
<span class="text-sm">
|
||||||
|
Behavior for imports with existing import_refs has changed. If an import_ref is present in the CSV file, the
|
||||||
|
item will be updated with the values in the CSV file.
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
<form @submit.prevent="submitCsvFile">
|
<form @submit.prevent="submitCsvFile">
|
||||||
<div class="flex flex-col gap-2 py-6">
|
<div class="flex flex-col gap-2 py-6">
|
||||||
|
|
|
@ -4,27 +4,27 @@ import { UserClient } from "../../user";
|
||||||
import { factories } from "../factories";
|
import { factories } from "../factories";
|
||||||
|
|
||||||
type ImportObj = {
|
type ImportObj = {
|
||||||
ImportRef: string;
|
[`HB.import_ref`]: string;
|
||||||
Location: string;
|
[`HB.location`]: string;
|
||||||
Labels: string;
|
[`HB.labels`]: string;
|
||||||
Quantity: string;
|
[`HB.quantity`]: number;
|
||||||
Name: string;
|
[`HB.name`]: string;
|
||||||
Description: string;
|
[`HB.description`]: string;
|
||||||
Insured: boolean;
|
[`HB.insured`]: boolean;
|
||||||
SerialNumber: string;
|
[`HB.serial_number`]: string;
|
||||||
ModelNumber: string;
|
[`HB.model_number`]: string;
|
||||||
Manufacturer: string;
|
[`HB.manufacturer`]: string;
|
||||||
Notes: string;
|
[`HB.notes`]: string;
|
||||||
PurchaseFrom: string;
|
[`HB.purchase_price`]: number;
|
||||||
PurchasedPrice: number;
|
[`HB.purchase_from`]: string;
|
||||||
PurchasedTime: string;
|
[`HB.purchase_time`]: string;
|
||||||
LifetimeWarranty: boolean;
|
[`HB.lifetime_warranty`]: boolean;
|
||||||
WarrantyExpires: string;
|
[`HB.warranty_expires`]: string;
|
||||||
WarrantyDetails: string;
|
[`HB.warranty_details`]: string;
|
||||||
SoldTo: string;
|
[`HB.sold_to`]: string;
|
||||||
SoldPrice: number;
|
[`HB.sold_price`]: number;
|
||||||
SoldTime: string;
|
[`HB.sold_time`]: string;
|
||||||
SoldNotes: string;
|
[`HB.sold_notes`]: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
function toCsv(data: ImportObj[]): string {
|
function toCsv(data: ImportObj[]): string {
|
||||||
|
@ -36,7 +36,7 @@ function toCsv(data: ImportObj[]): string {
|
||||||
}
|
}
|
||||||
|
|
||||||
function importFileGenerator(entries: number): ImportObj[] {
|
function importFileGenerator(entries: number): ImportObj[] {
|
||||||
const imports: ImportObj[] = [];
|
const imports: Partial<ImportObj>[] = [];
|
||||||
|
|
||||||
const pick = (arr: string[]) => arr[Math.floor(Math.random() * arr.length)];
|
const pick = (arr: string[]) => arr[Math.floor(Math.random() * arr.length)];
|
||||||
|
|
||||||
|
@ -45,37 +45,41 @@ function importFileGenerator(entries: number): ImportObj[] {
|
||||||
|
|
||||||
const half = Math.floor(entries / 2);
|
const half = Math.floor(entries / 2);
|
||||||
|
|
||||||
|
// YYYY-MM-DD
|
||||||
|
const formatDate = (date: Date) => date.toISOString().split("T")[0];
|
||||||
|
|
||||||
for (let i = 0; i < entries; i++) {
|
for (let i = 0; i < entries; i++) {
|
||||||
imports.push({
|
imports.push({
|
||||||
ImportRef: faker.database.mongodbObjectId(),
|
[`HB.import_ref`]: faker.database.mongodbObjectId(),
|
||||||
Location: pick(locations),
|
[`HB.location`]: pick(locations),
|
||||||
Labels: labels,
|
[`HB.labels`]: labels,
|
||||||
Quantity: faker.random.numeric(1),
|
[`HB.quantity`]: Number(faker.random.numeric(2)),
|
||||||
Name: faker.random.words(3),
|
[`HB.name`]: faker.random.words(3),
|
||||||
Description: "",
|
[`HB.description`]: "",
|
||||||
Insured: faker.datatype.boolean(),
|
[`HB.insured`]: faker.datatype.boolean(),
|
||||||
SerialNumber: faker.random.alphaNumeric(5),
|
[`HB.serial_number`]: faker.random.alphaNumeric(5),
|
||||||
ModelNumber: faker.random.alphaNumeric(5),
|
[`HB.model_number`]: faker.random.alphaNumeric(5),
|
||||||
Manufacturer: faker.random.alphaNumeric(5),
|
[`HB.manufacturer`]: faker.random.alphaNumeric(5),
|
||||||
Notes: "",
|
[`HB.notes`]: "",
|
||||||
PurchaseFrom: faker.name.fullName(),
|
[`HB.purchase_from`]: faker.name.fullName(),
|
||||||
PurchasedPrice: faker.datatype.number(100),
|
[`HB.purchase_price`]: faker.datatype.number(100),
|
||||||
PurchasedTime: faker.date.past().toDateString(),
|
[`HB.purchase_time`]: faker.date.past().toDateString(),
|
||||||
LifetimeWarranty: half > i,
|
[`HB.lifetime_warranty`]: half > i,
|
||||||
WarrantyExpires: faker.date.future().toDateString(),
|
[`HB.warranty_details`]: "",
|
||||||
WarrantyDetails: "",
|
[`HB.sold_to`]: faker.name.fullName(),
|
||||||
SoldTo: faker.name.fullName(),
|
[`HB.sold_price`]: faker.datatype.number(100),
|
||||||
SoldPrice: faker.datatype.number(100),
|
[`HB.sold_time`]: formatDate(faker.date.past()),
|
||||||
SoldTime: faker.date.past().toDateString(),
|
[`HB.sold_notes`]: "",
|
||||||
SoldNotes: "",
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
return imports;
|
return imports as ImportObj[];
|
||||||
}
|
}
|
||||||
|
|
||||||
describe("group related statistics tests", () => {
|
describe("group related statistics tests", () => {
|
||||||
const TOTAL_ITEMS = 30;
|
const TOTAL_ITEMS = 30;
|
||||||
|
const labelData: Record<string, number> = {};
|
||||||
|
const locationData: Record<string, number> = {};
|
||||||
|
|
||||||
let tAPI: UserClient | undefined;
|
let tAPI: UserClient | undefined;
|
||||||
const imports = importFileGenerator(TOTAL_ITEMS);
|
const imports = importFileGenerator(TOTAL_ITEMS);
|
||||||
|
@ -97,10 +101,26 @@ describe("group related statistics tests", () => {
|
||||||
const setupResp = await client.items.import(new Blob([csv], { type: "text/csv" }));
|
const setupResp = await client.items.import(new Blob([csv], { type: "text/csv" }));
|
||||||
|
|
||||||
expect(setupResp.status).toBe(204);
|
expect(setupResp.status).toBe(204);
|
||||||
|
|
||||||
|
for (const item of imports) {
|
||||||
|
const labels = item[`HB.labels`].split(";");
|
||||||
|
for (const label of labels) {
|
||||||
|
if (labelData[label]) {
|
||||||
|
labelData[label] += item[`HB.purchase_price`];
|
||||||
|
} else {
|
||||||
|
labelData[label] = item[`HB.purchase_price`];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const location = item[`HB.location`];
|
||||||
|
if (locationData[location]) {
|
||||||
|
locationData[location] += item[`HB.purchase_price`];
|
||||||
|
} else {
|
||||||
|
locationData[location] = item[`HB.purchase_price`];
|
||||||
|
}
|
||||||
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
// Write to file system for debugging
|
|
||||||
// fs.writeFileSync("test.csv", csv);
|
|
||||||
test("Validate Group Statistics", async () => {
|
test("Validate Group Statistics", async () => {
|
||||||
const { status, data } = await api().stats.group();
|
const { status, data } = await api().stats.group();
|
||||||
expect(status).toBe(200);
|
expect(status).toBe(200);
|
||||||
|
@ -112,17 +132,6 @@ describe("group related statistics tests", () => {
|
||||||
expect(data.totalWithWarranty).toEqual(Math.floor(TOTAL_ITEMS / 2));
|
expect(data.totalWithWarranty).toEqual(Math.floor(TOTAL_ITEMS / 2));
|
||||||
});
|
});
|
||||||
|
|
||||||
const labelData: Record<string, number> = {};
|
|
||||||
const locationData: Record<string, number> = {};
|
|
||||||
|
|
||||||
for (const item of imports) {
|
|
||||||
for (const label of item.Labels.split(";")) {
|
|
||||||
labelData[label] = (labelData[label] || 0) + item.PurchasedPrice;
|
|
||||||
}
|
|
||||||
|
|
||||||
locationData[item.Location] = (locationData[item.Location] || 0) + item.PurchasedPrice;
|
|
||||||
}
|
|
||||||
|
|
||||||
test("Validate Labels Statistics", async () => {
|
test("Validate Labels Statistics", async () => {
|
||||||
const { status, data } = await api().stats.labels();
|
const { status, data } = await api().stats.labels();
|
||||||
expect(status).toBe(200);
|
expect(status).toBe(200);
|
||||||
|
|
|
@ -13,4 +13,10 @@ export class ActionsAPI extends BaseAPI {
|
||||||
url: route("/actions/zero-item-time-fields"),
|
url: route("/actions/zero-item-time-fields"),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
ensureImportRefs() {
|
||||||
|
return this.http.post<void, ActionAmountResult>({
|
||||||
|
url: route("/actions/ensure-import-refs"),
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -45,10 +45,10 @@
|
||||||
Imports the standard CSV format for Homebox. This will <b>not</b> overwrite any existing items in your
|
Imports the standard CSV format for Homebox. This will <b>not</b> overwrite any existing items in your
|
||||||
inventory. It will only add new items.
|
inventory. It will only add new items.
|
||||||
</DetailAction>
|
</DetailAction>
|
||||||
<!-- <DetailAction>
|
<DetailAction @action="getExportTSV()">
|
||||||
<template #title>Export Inventory</template>
|
<template #title>Export Inventory</template>
|
||||||
Exports the standard CSV format for Homebox. This will export all items in your inventory.
|
Exports the standard CSV format for Homebox. This will export all items in your inventory.
|
||||||
</DetailAction> -->
|
</DetailAction>
|
||||||
</div>
|
</div>
|
||||||
</BaseCard>
|
</BaseCard>
|
||||||
<BaseCard>
|
<BaseCard>
|
||||||
|
@ -68,6 +68,11 @@
|
||||||
current asset_id field in the database and applying the next value to each item that has an unset asset_id
|
current asset_id field in the database and applying the next value to each item that has an unset asset_id
|
||||||
field. This is done in order of the created_at field.
|
field. This is done in order of the created_at field.
|
||||||
</DetailAction>
|
</DetailAction>
|
||||||
|
<DetailAction @action="ensureImportRefs">
|
||||||
|
<template #title>Ensures Import Refs</template>
|
||||||
|
Ensures that all items in your inventory have a valid import_ref field. This is done by randomly generating
|
||||||
|
a 8 character string for each item that has an unset import_ref field.
|
||||||
|
</DetailAction>
|
||||||
<DetailAction @click="resetItemDateTimes">
|
<DetailAction @click="resetItemDateTimes">
|
||||||
<template #title> Zero Item Date Times</template>
|
<template #title> Zero Item Date Times</template>
|
||||||
Resets the time value for all date time fields in your inventory to the beginning of the date. This is to
|
Resets the time value for all date time fields in your inventory to the beginning of the date. This is to
|
||||||
|
@ -103,7 +108,13 @@
|
||||||
const notify = useNotifier();
|
const notify = useNotifier();
|
||||||
|
|
||||||
function getBillOfMaterials() {
|
function getBillOfMaterials() {
|
||||||
api.reports.billOfMaterialsURL();
|
const url = api.reports.billOfMaterialsURL();
|
||||||
|
window.open(url, "_blank");
|
||||||
|
}
|
||||||
|
|
||||||
|
function getExportTSV() {
|
||||||
|
const url = api.items.exportURL();
|
||||||
|
window.open(url, "_blank");
|
||||||
}
|
}
|
||||||
|
|
||||||
async function ensureAssetIDs() {
|
async function ensureAssetIDs() {
|
||||||
|
@ -125,6 +136,25 @@
|
||||||
notify.success(`${result.data.completed} assets have been updated.`);
|
notify.success(`${result.data.completed} assets have been updated.`);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async function ensureImportRefs() {
|
||||||
|
const { isCanceled } = await confirm.open(
|
||||||
|
"Are you sure you want to ensure all assets have an import_ref? This can take a while and cannot be undone."
|
||||||
|
);
|
||||||
|
|
||||||
|
if (isCanceled) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await api.actions.ensureImportRefs();
|
||||||
|
|
||||||
|
if (result.error) {
|
||||||
|
notify.error("Failed to ensure import refs.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
notify.success(`${result.data.completed} assets have been updated.`);
|
||||||
|
}
|
||||||
|
|
||||||
async function resetItemDateTimes() {
|
async function resetItemDateTimes() {
|
||||||
const { isCanceled } = await confirm.open(
|
const { isCanceled } = await confirm.open(
|
||||||
"Are you sure you want to reset all date and time values? This can take a while and cannot be undone."
|
"Are you sure you want to reset all date and time values? This can take a while and cannot be undone."
|
||||||
|
|
Loading…
Reference in a new issue