diff --git a/backend/app/api/demo.go b/backend/app/api/demo.go
index 538655d..6db2ba1 100644
--- a/backend/app/api/demo.go
+++ b/backend/app/api/demo.go
@@ -2,7 +2,6 @@ package main
import (
"context"
- "encoding/csv"
"strings"
"github.com/hay-kot/homebox/backend/internal/core/services"
@@ -19,13 +18,11 @@ func (a *app) SetupDemo() {
,Kitchen,IOT;Home Assistant; Z-Wave,1,Smart Rocker Light Dimmer,"UltraPro Z-Wave Smart Rocker Light Dimmer with QuickFit and SimpleWire, 3-Way Ready, Compatible with Alexa, Google Assistant, ZWave Hub Required, Repeater/Range Extender, White Paddle Only, 39351",,,39351,Honeywell,,Amazon,65.98,09/30/0202,,,,,,,
`
- var (
- registration = services.UserRegistration{
- Email: "demo@example.com",
- Name: "Demo",
- Password: "demo",
- }
- )
+ registration := services.UserRegistration{
+ Email: "demo@example.com",
+ Name: "Demo",
+ Password: "demo",
+ }
// First check if we've already setup a demo user and skip if so
_, err := a.services.User.Login(context.Background(), registration.Email, registration.Password)
@@ -42,17 +39,7 @@ func (a *app) SetupDemo() {
token, _ := a.services.User.Login(context.Background(), registration.Email, registration.Password)
self, _ := a.services.User.GetSelf(context.Background(), token.Raw)
- // Read CSV Text
- reader := csv.NewReader(strings.NewReader(csvText))
- reader.Comma = ','
-
- records, err := reader.ReadAll()
- if err != nil {
- log.Err(err).Msg("Failed to read CSV")
- log.Fatal().Msg("Failed to setup demo")
- }
-
- _, err = a.services.Items.CsvImport(context.Background(), self.GroupID, records)
+ _, err = a.services.Items.CsvImport(context.Background(), self.GroupID, strings.NewReader(csvText))
if err != nil {
log.Err(err).Msg("Failed to import CSV")
log.Fatal().Msg("Failed to setup demo")
diff --git a/backend/app/api/handlers/v1/v1_ctrl_items.go b/backend/app/api/handlers/v1/v1_ctrl_items.go
index b82d2f3..51c7fa3 100644
--- a/backend/app/api/handlers/v1/v1_ctrl_items.go
+++ b/backend/app/api/handlers/v1/v1_ctrl_items.go
@@ -2,6 +2,7 @@ package v1
import (
"database/sql"
+ "encoding/csv"
"errors"
"net/http"
"strings"
@@ -255,15 +256,9 @@ func (ctrl *V1Controller) HandleItemsImport() server.HandlerFunc {
return validate.NewRequestError(err, http.StatusInternalServerError)
}
- data, err := services.ReadCsv(file)
- if err != nil {
- log.Err(err).Msg("failed to read csv")
- return validate.NewRequestError(err, http.StatusInternalServerError)
- }
-
user := services.UseUserCtx(r.Context())
- _, err = ctrl.svc.Items.CsvImport(r.Context(), user.GroupID, data)
+ _, err = ctrl.svc.Items.CsvImport(r.Context(), user.GroupID, file)
if err != nil {
log.Err(err).Msg("failed to import items")
return validate.NewRequestError(err, http.StatusInternalServerError)
@@ -272,3 +267,26 @@ func (ctrl *V1Controller) HandleItemsImport() server.HandlerFunc {
return server.Respond(w, http.StatusNoContent, nil)
}
}
+
+// HandleItemsImport godocs
+// @Summary exports items into the database
+// @Tags Items
+// @Success 200 {string} string "text/csv"
+// @Router /v1/items/export [GET]
+// @Security Bearer
+func (ctrl *V1Controller) HandleItemsExport() server.HandlerFunc {
+ return func(w http.ResponseWriter, r *http.Request) error {
+ ctx := services.NewContext(r.Context())
+
+ csvData, err := ctrl.svc.Items.ExportTSV(r.Context(), ctx.GID)
+ if err != nil {
+ log.Err(err).Msg("failed to export items")
+ return validate.NewRequestError(err, http.StatusInternalServerError)
+ }
+
+ w.Header().Set("Content-Type", "text/tsv")
+ w.Header().Set("Content-Disposition", "attachment;filename=homebox-items.tsv")
+ writer := csv.NewWriter(w)
+ return writer.WriteAll(csvData)
+ }
+}
diff --git a/backend/app/api/handlers/v1/v1_ctrl_reporting.go b/backend/app/api/handlers/v1/v1_ctrl_reporting.go
index 09f2ae6..f098c25 100644
--- a/backend/app/api/handlers/v1/v1_ctrl_reporting.go
+++ b/backend/app/api/handlers/v1/v1_ctrl_reporting.go
@@ -24,8 +24,8 @@ func (ctrl *V1Controller) HandleBillOfMaterialsExport() server.HandlerFunc {
return err
}
- w.Header().Set("Content-Type", "text/csv")
- w.Header().Set("Content-Disposition", "attachment; filename=bom.csv")
+ w.Header().Set("Content-Type", "text/tsv")
+ w.Header().Set("Content-Disposition", "attachment; filename=bill-of-materials.tsv")
_, err = w.Write(csv)
return err
}
diff --git a/backend/app/api/routes.go b/backend/app/api/routes.go
index e995fa4..638a537 100644
--- a/backend/app/api/routes.go
+++ b/backend/app/api/routes.go
@@ -106,6 +106,7 @@ func (a *app) mountRoutes(repos *repo.AllRepos) {
a.server.Get(v1Base("/items"), v1Ctrl.HandleItemsGetAll(), userMW...)
a.server.Post(v1Base("/items"), v1Ctrl.HandleItemsCreate(), userMW...)
a.server.Post(v1Base("/items/import"), v1Ctrl.HandleItemsImport(), userMW...)
+ a.server.Get(v1Base("/items/export"), v1Ctrl.HandleItemsExport(), userMW...)
a.server.Get(v1Base("/items/fields"), v1Ctrl.HandleGetAllCustomFieldNames(), userMW...)
a.server.Get(v1Base("/items/fields/values"), v1Ctrl.HandleGetAllCustomFieldValues(), userMW...)
diff --git a/backend/app/api/static/docs/docs.go b/backend/app/api/static/docs/docs.go
index 8ceb1cf..56802cc 100644
--- a/backend/app/api/static/docs/docs.go
+++ b/backend/app/api/static/docs/docs.go
@@ -407,6 +407,27 @@ const docTemplate = `{
}
}
},
+ "/v1/items/export": {
+ "get": {
+ "security": [
+ {
+ "Bearer": []
+ }
+ ],
+ "tags": [
+ "Items"
+ ],
+ "summary": "exports items into the database",
+ "responses": {
+ "200": {
+ "description": "text/csv",
+ "schema": {
+ "type": "string"
+ }
+ }
+ }
+ }
+ },
"/v1/items/fields": {
"get": {
"security": [
diff --git a/backend/app/api/static/docs/swagger.json b/backend/app/api/static/docs/swagger.json
index 3808fb5..51a7f5b 100644
--- a/backend/app/api/static/docs/swagger.json
+++ b/backend/app/api/static/docs/swagger.json
@@ -399,6 +399,27 @@
}
}
},
+ "/v1/items/export": {
+ "get": {
+ "security": [
+ {
+ "Bearer": []
+ }
+ ],
+ "tags": [
+ "Items"
+ ],
+ "summary": "exports items into the database",
+ "responses": {
+ "200": {
+ "description": "text/csv",
+ "schema": {
+ "type": "string"
+ }
+ }
+ }
+ }
+ },
"/v1/items/fields": {
"get": {
"security": [
diff --git a/backend/app/api/static/docs/swagger.yaml b/backend/app/api/static/docs/swagger.yaml
index c461867..23492f6 100644
--- a/backend/app/api/static/docs/swagger.yaml
+++ b/backend/app/api/static/docs/swagger.yaml
@@ -1109,6 +1109,18 @@ paths:
summary: Update Maintenance Entry
tags:
- Maintenance
+ /v1/items/export:
+ get:
+ responses:
+ "200":
+ description: text/csv
+ schema:
+ type: string
+ security:
+ - Bearer: []
+ summary: exports items into the database
+ tags:
+ - Items
/v1/items/fields:
get:
produces:
diff --git a/backend/internal/core/services/.testdata/import.csv b/backend/internal/core/services/reporting/.testdata/import.csv
similarity index 100%
rename from backend/internal/core/services/.testdata/import.csv
rename to backend/internal/core/services/reporting/.testdata/import.csv
diff --git a/backend/internal/core/services/.testdata/import.tsv b/backend/internal/core/services/reporting/.testdata/import.tsv
similarity index 100%
rename from backend/internal/core/services/.testdata/import.tsv
rename to backend/internal/core/services/reporting/.testdata/import.tsv
diff --git a/backend/internal/core/services/reporting/.testdata/import/fields.csv b/backend/internal/core/services/reporting/.testdata/import/fields.csv
new file mode 100644
index 0000000..28c3c17
--- /dev/null
+++ b/backend/internal/core/services/reporting/.testdata/import/fields.csv
@@ -0,0 +1,5 @@
+HB.location,HB.name,HB.quantity,HB.description,HB.field.Custom Field 1,HB.field.Custom Field 2,HB.field.Custom Field 3
+loc,Item 1,1,Description 1,Value 1[1],Value 1[2],Value 1[3]
+loc,Item 2,2,Description 2,Value 2[1],Value 2[2],Value 2[3]
+loc,Item 3,3,Description 3,Value 3[1],Value 3[2],Value 3[3]
+
diff --git a/backend/internal/core/services/reporting/.testdata/import/minimal.csv b/backend/internal/core/services/reporting/.testdata/import/minimal.csv
new file mode 100644
index 0000000..be39ad2
--- /dev/null
+++ b/backend/internal/core/services/reporting/.testdata/import/minimal.csv
@@ -0,0 +1,4 @@
+HB.location,HB.name,HB.quantity,HB.description
+loc,Item 1,1,Description 1
+loc,Item 2,2,Description 2
+loc,Item 3,3,Description 3
\ No newline at end of file
diff --git a/backend/internal/core/services/reporting/.testdata/import/types.csv b/backend/internal/core/services/reporting/.testdata/import/types.csv
new file mode 100644
index 0000000..96ff236
--- /dev/null
+++ b/backend/internal/core/services/reporting/.testdata/import/types.csv
@@ -0,0 +1,4 @@
+HB.name,HB.asset_id,HB.location,HB.labels
+Item 1,1,Path / To / Location 1,L1 ; L2 ; L3
+Item 2,000-002,Path /To/ Location 2,L1;L2;L3
+Item 3,1000-003,Path / To /Location 3 , L1;L2; L3
\ No newline at end of file
diff --git a/backend/internal/core/services/reporting/bill_of_materials.go b/backend/internal/core/services/reporting/bill_of_materials.go
new file mode 100644
index 0000000..2cded8e
--- /dev/null
+++ b/backend/internal/core/services/reporting/bill_of_materials.go
@@ -0,0 +1,50 @@
+package reporting
+
+import (
+ "context"
+
+ "github.com/gocarina/gocsv"
+ "github.com/google/uuid"
+ "github.com/hay-kot/homebox/backend/internal/data/types"
+)
+
+// =================================================================================================
+
+type BillOfMaterialsEntry struct {
+ PurchaseDate types.Date `csv:"Purchase Date"`
+ Name string `csv:"Name"`
+ Description string `csv:"Description"`
+ Manufacturer string `csv:"Manufacturer"`
+ SerialNumber string `csv:"Serial Number"`
+ ModelNumber string `csv:"Model Number"`
+ Quantity int `csv:"Quantity"`
+ Price float64 `csv:"Price"`
+ TotalPrice float64 `csv:"Total Price"`
+}
+
+// BillOfMaterialsTSV returns a byte slice of the Bill of Materials for a given GID in TSV format
+// See BillOfMaterialsEntry for the format of the output
+func (rs *ReportingService) BillOfMaterialsTSV(ctx context.Context, GID uuid.UUID) ([]byte, error) {
+ entities, err := rs.repos.Items.GetAll(ctx, GID)
+ if err != nil {
+ rs.l.Debug().Err(err).Msg("failed to get all items for BOM Csv Reporting")
+ return nil, err
+ }
+
+ bomEntries := make([]BillOfMaterialsEntry, len(entities))
+ for i, entity := range entities {
+ bomEntries[i] = BillOfMaterialsEntry{
+ PurchaseDate: entity.PurchaseTime,
+ Name: entity.Name,
+ Description: entity.Description,
+ Manufacturer: entity.Manufacturer,
+ SerialNumber: entity.SerialNumber,
+ ModelNumber: entity.ModelNumber,
+ Quantity: entity.Quantity,
+ Price: entity.PurchasePrice,
+ TotalPrice: entity.PurchasePrice * float64(entity.Quantity),
+ }
+ }
+
+ return gocsv.MarshalBytes(&bomEntries)
+}
diff --git a/backend/internal/core/services/reporting/import.go b/backend/internal/core/services/reporting/import.go
new file mode 100644
index 0000000..b608e62
--- /dev/null
+++ b/backend/internal/core/services/reporting/import.go
@@ -0,0 +1,93 @@
+package reporting
+
+import (
+ "bytes"
+ "encoding/csv"
+ "errors"
+ "io"
+ "strings"
+)
+
+var (
+ ErrNoHomeboxHeaders = errors.New("no headers found")
+ ErrMissingRequiredHeaders = errors.New("missing required headers `HB.location` or `HB.name`")
+)
+
+// determineSeparator determines the separator used in the CSV file
+// It returns the separator as a rune and an error if it could not be determined
+//
+// It is assumed that the first row is the header row and that the separator is the same
+// for all rows.
+//
+// Supported separators are `,` and `\t`
+func determineSeparator(data []byte) (rune, error) {
+ // First row
+ firstRow := bytes.Split(data, []byte("\n"))[0]
+
+ // find first comma or /t
+ comma := bytes.IndexByte(firstRow, ',')
+ tab := bytes.IndexByte(firstRow, '\t')
+
+ switch {
+ case comma == -1 && tab == -1:
+ return 0, errors.New("could not determine separator")
+ case tab > comma:
+ return '\t', nil
+ default:
+ return ',', nil
+ }
+}
+
+// readRawCsv reads a CSV file and returns the raw data as a 2D string array
+// It determines the separator used in the CSV file and returns an error if
+// it could not be determined
+func readRawCsv(r io.Reader) ([][]string, error) {
+ data, err := io.ReadAll(r)
+ if err != nil {
+ return nil, err
+ }
+
+ reader := csv.NewReader(bytes.NewReader(data))
+
+ // Determine separator
+ sep, err := determineSeparator(data)
+ if err != nil {
+ return nil, err
+ }
+
+ reader.Comma = sep
+
+ return reader.ReadAll()
+}
+
+// parseHeaders parses the homebox headers from the CSV file and returns a map of the headers
+// and their column index as well as a list of the field headers (HB.field.*) in the order
+// they appear in the CSV file
+//
+// It returns an error if no homebox headers are found
+func parseHeaders(headers []string) (hbHeaders map[string]int, fieldHeaders []string, err error) {
+ hbHeaders = map[string]int{} // initialize map
+
+ for col, h := range headers {
+ if strings.HasPrefix(h, "HB.field.") {
+ fieldHeaders = append(fieldHeaders, h)
+ }
+
+ if strings.HasPrefix(h, "HB.") {
+ hbHeaders[h] = col
+ }
+ }
+
+ required := []string{"HB.location", "HB.name"}
+ for _, h := range required {
+ if _, ok := hbHeaders[h]; !ok {
+ return nil, nil, ErrMissingRequiredHeaders
+ }
+ }
+
+ if len(hbHeaders) == 0 {
+ return nil, nil, ErrNoHomeboxHeaders
+ }
+
+ return hbHeaders, fieldHeaders, nil
+}
diff --git a/backend/internal/core/services/reporting/io_row.go b/backend/internal/core/services/reporting/io_row.go
new file mode 100644
index 0000000..dd1f58a
--- /dev/null
+++ b/backend/internal/core/services/reporting/io_row.go
@@ -0,0 +1,84 @@
+package reporting
+
+import (
+ "strings"
+
+ "github.com/hay-kot/homebox/backend/internal/data/repo"
+ "github.com/hay-kot/homebox/backend/internal/data/types"
+)
+
+type ExportItemFields struct {
+ Name string
+ Value string
+}
+
+type ExportTSVRow struct {
+ Location LocationString `csv:"HB.location"`
+ LabelStr LabelString `csv:"HB.labels"`
+ ImportRef string `csv:"HB.import_ref"`
+ AssetID repo.AssetID `csv:"HB.asset_id"`
+
+ Name string `csv:"HB.name"`
+ Quantity int `csv:"HB.quantity"`
+ Description string `csv:"HB.description"`
+ Insured bool `csv:"HB.insured"`
+ Notes string `csv:"HB.notes"`
+
+ PurchasePrice float64 `csv:"HB.purchase_price"`
+ PurchaseFrom string `csv:"HB.purchase_from"`
+ PurchaseTime types.Date `csv:"HB.purchase_time"`
+
+ Manufacturer string `csv:"HB.manufacturer"`
+ ModelNumber string `csv:"HB.model_number"`
+ SerialNumber string `csv:"HB.serial_number"`
+
+ LifetimeWarranty bool `csv:"HB.lifetime_warranty"`
+ WarrantyExpires types.Date `csv:"HB.warranty_expires"`
+ WarrantyDetails string `csv:"HB.warranty_details"`
+
+ SoldTo string `csv:"HB.sold_to"`
+ SoldPrice float64 `csv:"HB.sold_price"`
+ SoldTime types.Date `csv:"HB.sold_time"`
+ SoldNotes string `csv:"HB.sold_notes"`
+
+ Fields []ExportItemFields `csv:"-"`
+}
+
+// ============================================================================
+
+// LabelString is a string slice that is used to represent a list of labels.
+//
+// For example, a list of labels "Important; Work" would be represented as a
+// LabelString with the following values:
+//
+// LabelString{"Important", "Work"}
+type LabelString []string
+
+func parseLabelString(s string) LabelString {
+ v, _ := parseSeparatedString(s, ";")
+ return v
+}
+
+func (ls LabelString) String() string {
+ return strings.Join(ls, "; ")
+}
+
+// ============================================================================
+
+// LocationString is a string slice that is used to represent a location
+// hierarchy.
+//
+// For example, a location hierarchy of "Home / Bedroom / Desk" would be
+// represented as a LocationString with the following values:
+//
+// LocationString{"Home", "Bedroom", "Desk"}
+type LocationString []string
+
+func parseLocationString(s string) LocationString {
+ v, _ := parseSeparatedString(s, "/")
+ return v
+}
+
+func (csf LocationString) String() string {
+ return strings.Join(csf, " / ")
+}
diff --git a/backend/internal/core/services/reporting/io_row_test.go b/backend/internal/core/services/reporting/io_row_test.go
new file mode 100644
index 0000000..bcd7431
--- /dev/null
+++ b/backend/internal/core/services/reporting/io_row_test.go
@@ -0,0 +1,65 @@
+package reporting
+
+import (
+ "reflect"
+ "testing"
+)
+
+func Test_parseSeparatedString(t *testing.T) {
+ type args struct {
+ s string
+ sep string
+ }
+ tests := []struct {
+ name string
+ args args
+ want []string
+ wantErr bool
+ }{
+ {
+ name: "comma",
+ args: args{
+ s: "a,b,c",
+ sep: ",",
+ },
+ want: []string{"a", "b", "c"},
+ wantErr: false,
+ },
+ {
+ name: "trimmed comma",
+ args: args{
+ s: "a, b, c",
+ sep: ",",
+ },
+ want: []string{"a", "b", "c"},
+ },
+ {
+ name: "excessive whitespace",
+ args: args{
+ s: " a, b, c ",
+ sep: ",",
+ },
+ want: []string{"a", "b", "c"},
+ },
+ {
+ name: "empty",
+ args: args{
+ s: "",
+ sep: ",",
+ },
+ want: []string{},
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ got, err := parseSeparatedString(tt.args.s, tt.args.sep)
+ if (err != nil) != tt.wantErr {
+ t.Errorf("parseSeparatedString() error = %v, wantErr %v", err, tt.wantErr)
+ return
+ }
+ if !reflect.DeepEqual(got, tt.want) {
+ t.Errorf("parseSeparatedString() = %v, want %v", got, tt.want)
+ }
+ })
+ }
+}
diff --git a/backend/internal/core/services/reporting/io_sheet.go b/backend/internal/core/services/reporting/io_sheet.go
new file mode 100644
index 0000000..4aa05df
--- /dev/null
+++ b/backend/internal/core/services/reporting/io_sheet.go
@@ -0,0 +1,302 @@
+package reporting
+
+import (
+ "fmt"
+ "io"
+ "reflect"
+ "sort"
+ "strconv"
+ "strings"
+
+ "github.com/hay-kot/homebox/backend/internal/data/repo"
+ "github.com/hay-kot/homebox/backend/internal/data/types"
+)
+
+// IOSheet is the representation of a CSV/TSV sheet that is used for importing/exporting
+// items from homebox. It is used to read/write the data from/to a CSV/TSV file given
+// the standard format of the file.
+//
+// See ExportTSVRow for the format of the data in the sheet.
+type IOSheet struct {
+ headers []string
+ custom []int
+ index map[string]int
+ Rows []ExportTSVRow
+}
+
+func (s *IOSheet) indexHeaders() {
+ s.index = make(map[string]int)
+
+ for i, h := range s.headers {
+ if strings.HasPrefix(h, "HB.field") {
+ s.custom = append(s.custom, i)
+ }
+
+ if strings.HasPrefix(h, "HB.") {
+ s.index[h] = i
+ }
+ }
+}
+
+func (s *IOSheet) GetColumn(str string) (col int, ok bool) {
+ if s.index == nil {
+ s.indexHeaders()
+ }
+
+ col, ok = s.index[str]
+ return
+}
+
+// Read reads a CSV/TSV and populates the "Rows" field with the data from the sheet
+// Custom Fields are supported via the `HB.field.*` headers. The `HB.field.*` the "Name"
+// of the field is the part after the `HB.field.` prefix. Additionally, Custom Fields with
+// no value are excluded from the row.Fields slice, this includes empty strings.
+//
+// Note That
+// - the first row is assumed to be the header
+// - at least 1 row of data is required
+// - rows and columns must be rectangular (i.e. all rows must have the same number of columns)
+func (s *IOSheet) Read(data io.Reader) error {
+ sheet, err := readRawCsv(data)
+ if err != nil {
+ return err
+ }
+
+ if len(sheet) < 2 {
+ return fmt.Errorf("sheet must have at least 1 row of data (header + 1)")
+ }
+
+ s.headers = sheet[0]
+ s.Rows = make([]ExportTSVRow, len(sheet)-1)
+
+ for i, row := range sheet[1:] {
+ if len(row) != len(s.headers) {
+ return fmt.Errorf("row has %d columns, expected %d", len(row), len(s.headers))
+ }
+
+ rowData := ExportTSVRow{}
+
+ st := reflect.TypeOf(ExportTSVRow{})
+
+ for i := 0; i < st.NumField(); i++ {
+ field := st.Field(i)
+ tag := field.Tag.Get("csv")
+ if tag == "" || tag == "-" {
+ continue
+ }
+
+ col, ok := s.GetColumn(tag)
+ if !ok {
+ continue
+ }
+
+ val := row[col]
+
+ var v interface{}
+
+ switch field.Type {
+ case reflect.TypeOf(""):
+ v = val
+ case reflect.TypeOf(int(0)):
+ v = parseInt(val)
+ case reflect.TypeOf(bool(false)):
+ v = parseBool(val)
+ case reflect.TypeOf(float64(0)):
+ v = parseFloat(val)
+
+ // Custom Types
+ case reflect.TypeOf(types.Date{}):
+ v = types.DateFromString(val)
+ case reflect.TypeOf(repo.AssetID(0)):
+ v, _ = repo.ParseAssetID(val)
+ case reflect.TypeOf(LocationString{}):
+ v = parseLocationString(val)
+ case reflect.TypeOf(LabelString{}):
+ v = parseLabelString(val)
+ }
+
+ fmt.Printf("%s: %v (%T)\n", tag, v, v)
+
+ // Nil values are not allowed at the moment. This may change.
+ if v == nil {
+ return fmt.Errorf("could not convert %q to %s", val, field.Type)
+ }
+
+ ptrField := reflect.ValueOf(&rowData).Elem().Field(i)
+ ptrField.Set(reflect.ValueOf(v))
+ }
+
+ for _, col := range s.custom {
+ colName := strings.TrimPrefix(s.headers[col], "HB.field.")
+ customVal := row[col]
+ if customVal == "" {
+ continue
+ }
+
+ rowData.Fields = append(rowData.Fields, ExportItemFields{
+ Name: colName,
+ Value: customVal,
+ })
+ }
+
+ s.Rows[i] = rowData
+ }
+
+ return nil
+}
+
+// Write writes the sheet to a writer.
+func (s *IOSheet) ReadItems(items []repo.ItemOut) {
+ s.Rows = make([]ExportTSVRow, len(items))
+
+ extraHeaders := map[string]struct{}{}
+
+ for i := range items {
+ item := items[i]
+
+ // TODO: Support fetching nested locations
+ locString := LocationString{item.Location.Name}
+
+ labelString := make([]string, len(item.Labels))
+
+ for i, l := range item.Labels {
+ labelString[i] = l.Name
+ }
+
+ customFields := make([]ExportItemFields, len(item.Fields))
+
+ for i, f := range item.Fields {
+ extraHeaders[f.Name] = struct{}{}
+
+ customFields[i] = ExportItemFields{
+ Name: f.Name,
+ Value: f.TextValue,
+ }
+ }
+
+ s.Rows[i] = ExportTSVRow{
+ // fill struct
+ Location: locString,
+ LabelStr: labelString,
+
+ ImportRef: item.ImportRef,
+ AssetID: item.AssetID,
+ Name: item.Name,
+ Quantity: item.Quantity,
+ Description: item.Description,
+ Insured: item.Insured,
+
+ PurchasePrice: item.PurchasePrice,
+ PurchaseFrom: item.PurchaseFrom,
+ PurchaseTime: item.PurchaseTime,
+
+ Manufacturer: item.Manufacturer,
+ ModelNumber: item.ModelNumber,
+ SerialNumber: item.SerialNumber,
+
+ LifetimeWarranty: item.LifetimeWarranty,
+ WarrantyExpires: item.WarrantyExpires,
+ WarrantyDetails: item.WarrantyDetails,
+
+ SoldTo: item.SoldTo,
+ SoldTime: item.SoldTime,
+ SoldPrice: item.SoldPrice,
+ SoldNotes: item.SoldNotes,
+
+ Fields: customFields,
+ }
+ }
+
+ // Extract and sort additional headers for deterministic output
+ customHeaders := make([]string, 0, len(extraHeaders))
+
+ for k := range extraHeaders {
+ customHeaders = append(customHeaders, k)
+ }
+
+ sort.Strings(customHeaders)
+
+ st := reflect.TypeOf(ExportTSVRow{})
+
+ // Write headers
+ for i := 0; i < st.NumField(); i++ {
+ field := st.Field(i)
+ tag := field.Tag.Get("csv")
+ if tag == "" || tag == "-" {
+ continue
+ }
+
+ s.headers = append(s.headers, tag)
+ }
+
+ for _, h := range customHeaders {
+ s.headers = append(s.headers, "HB.field."+h)
+ }
+}
+
+// Writes the current sheet to a writer in TSV format.
+func (s *IOSheet) TSV() ([][]string, error) {
+ memcsv := make([][]string, len(s.Rows)+1)
+
+ memcsv[0] = s.headers
+
+ // use struct tags in rows to dertmine column order
+ for i, row := range s.Rows {
+ rowIdx := i + 1
+
+ memcsv[rowIdx] = make([]string, len(s.headers))
+
+ st := reflect.TypeOf(row)
+
+ for i := 0; i < st.NumField(); i++ {
+ field := st.Field(i)
+ tag := field.Tag.Get("csv")
+ if tag == "" || tag == "-" {
+ continue
+ }
+
+ col, ok := s.GetColumn(tag)
+ if !ok {
+ continue
+ }
+
+ val := reflect.ValueOf(row).Field(i)
+
+ var v string
+
+ switch field.Type {
+ case reflect.TypeOf(""):
+ v = val.String()
+ case reflect.TypeOf(int(0)):
+ v = strconv.Itoa(int(val.Int()))
+ case reflect.TypeOf(bool(false)):
+ v = strconv.FormatBool(val.Bool())
+ case reflect.TypeOf(float64(0)):
+ v = strconv.FormatFloat(val.Float(), 'f', -1, 64)
+
+ // Custom Types
+ case reflect.TypeOf(types.Date{}):
+ v = val.Interface().(types.Date).String()
+ case reflect.TypeOf(repo.AssetID(0)):
+ v = val.Interface().(repo.AssetID).String()
+ case reflect.TypeOf(LocationString{}):
+ v = val.Interface().(LocationString).String()
+ case reflect.TypeOf(LabelString{}):
+ v = val.Interface().(LabelString).String()
+ }
+
+ memcsv[rowIdx][col] = v
+ }
+
+ for _, f := range row.Fields {
+ col, ok := s.GetColumn("HB.field." + f.Name)
+ if !ok {
+ continue
+ }
+
+ memcsv[i+1][col] = f.Value
+ }
+ }
+
+ return memcsv, nil
+}
diff --git a/backend/internal/core/services/reporting/io_sheet_test.go b/backend/internal/core/services/reporting/io_sheet_test.go
new file mode 100644
index 0000000..9d7f9a0
--- /dev/null
+++ b/backend/internal/core/services/reporting/io_sheet_test.go
@@ -0,0 +1,226 @@
+package reporting
+
+import (
+ "bytes"
+ "reflect"
+ "testing"
+
+ _ "embed"
+
+ "github.com/hay-kot/homebox/backend/internal/data/repo"
+ "github.com/stretchr/testify/assert"
+)
+
+var (
+ //go:embed .testdata/import/minimal.csv
+ minimalImportCSV []byte
+
+ //go:embed .testdata/import/fields.csv
+ customFieldImportCSV []byte
+
+ //go:embed .testdata/import/types.csv
+ customTypesImportCSV []byte
+
+ //go:embed .testdata/import.csv
+ CSVData_Comma []byte
+
+ //go:embed .testdata/import.tsv
+ CSVData_Tab []byte
+)
+
+func TestSheet_Read(t *testing.T) {
+ tests := []struct {
+ name string
+ data []byte
+ want []ExportTSVRow
+ wantErr bool
+ }{
+ {
+ name: "minimal import",
+ data: minimalImportCSV,
+ want: []ExportTSVRow{
+ {Location: LocationString{"loc"}, Name: "Item 1", Quantity: 1, Description: "Description 1"},
+ {Location: LocationString{"loc"}, Name: "Item 2", Quantity: 2, Description: "Description 2"},
+ {Location: LocationString{"loc"}, Name: "Item 3", Quantity: 3, Description: "Description 3"},
+ },
+ },
+ {
+ name: "custom field import",
+ data: customFieldImportCSV,
+ want: []ExportTSVRow{
+ {
+ Location: LocationString{"loc"}, Name: "Item 1", Quantity: 1, Description: "Description 1",
+ Fields: []ExportItemFields{
+ {Name: "Custom Field 1", Value: "Value 1[1]"},
+ {Name: "Custom Field 2", Value: "Value 1[2]"},
+ {Name: "Custom Field 3", Value: "Value 1[3]"},
+ },
+ },
+ {
+ Location: LocationString{"loc"}, Name: "Item 2", Quantity: 2, Description: "Description 2",
+ Fields: []ExportItemFields{
+ {Name: "Custom Field 1", Value: "Value 2[1]"},
+ {Name: "Custom Field 2", Value: "Value 2[2]"},
+ {Name: "Custom Field 3", Value: "Value 2[3]"},
+ },
+ },
+ {
+ Location: LocationString{"loc"}, Name: "Item 3", Quantity: 3, Description: "Description 3",
+ Fields: []ExportItemFields{
+ {Name: "Custom Field 1", Value: "Value 3[1]"},
+ {Name: "Custom Field 2", Value: "Value 3[2]"},
+ {Name: "Custom Field 3", Value: "Value 3[3]"},
+ },
+ },
+ },
+ },
+ {
+ name: "custom types import",
+ data: customTypesImportCSV,
+ want: []ExportTSVRow{
+ {
+ Name: "Item 1",
+ AssetID: repo.AssetID(1),
+ Location: LocationString{"Path", "To", "Location 1"},
+ LabelStr: LabelString{"L1", "L2", "L3"},
+ },
+ {
+ Name: "Item 2",
+ AssetID: repo.AssetID(2),
+ Location: LocationString{"Path", "To", "Location 2"},
+ LabelStr: LabelString{"L1", "L2", "L3"},
+ },
+ {
+ Name: "Item 3",
+ AssetID: repo.AssetID(1000003),
+ Location: LocationString{"Path", "To", "Location 3"},
+ LabelStr: LabelString{"L1", "L2", "L3"},
+ },
+ },
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ reader := bytes.NewReader(tt.data)
+
+ sheet := &IOSheet{}
+ err := sheet.Read(reader)
+
+ switch {
+ case tt.wantErr:
+ assert.Error(t, err)
+ default:
+ assert.NoError(t, err)
+ assert.ElementsMatch(t, tt.want, sheet.Rows)
+ }
+ })
+ }
+}
+
+func Test_parseHeaders(t *testing.T) {
+ tests := []struct {
+ name string
+ rawHeaders []string
+ wantHbHeaders map[string]int
+ wantFieldHeaders []string
+ wantErr bool
+ }{
+ {
+ name: "no hombox headers",
+ rawHeaders: []string{"Header 1", "Header 2", "Header 3"},
+ wantHbHeaders: nil,
+ wantFieldHeaders: nil,
+ wantErr: true,
+ },
+ {
+ name: "field headers only",
+ rawHeaders: []string{"HB.location", "HB.name", "HB.field.1", "HB.field.2", "HB.field.3"},
+ wantHbHeaders: map[string]int{
+ "HB.location": 0,
+ "HB.name": 1,
+ "HB.field.1": 2,
+ "HB.field.2": 3,
+ "HB.field.3": 4,
+ },
+ wantFieldHeaders: []string{"HB.field.1", "HB.field.2", "HB.field.3"},
+ wantErr: false,
+ },
+ {
+ name: "mixed headers",
+ rawHeaders: []string{"Header 1", "HB.name", "Header 2", "HB.field.2", "Header 3", "HB.field.3", "HB.location"},
+ wantHbHeaders: map[string]int{
+ "HB.name": 1,
+ "HB.field.2": 3,
+ "HB.field.3": 5,
+ "HB.location": 6,
+ },
+ wantFieldHeaders: []string{"HB.field.2", "HB.field.3"},
+ wantErr: false,
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ gotHbHeaders, gotFieldHeaders, err := parseHeaders(tt.rawHeaders)
+ if (err != nil) != tt.wantErr {
+ t.Errorf("parseHeaders() error = %v, wantErr %v", err, tt.wantErr)
+ return
+ }
+ if !reflect.DeepEqual(gotHbHeaders, tt.wantHbHeaders) {
+ t.Errorf("parseHeaders() gotHbHeaders = %v, want %v", gotHbHeaders, tt.wantHbHeaders)
+ }
+ if !reflect.DeepEqual(gotFieldHeaders, tt.wantFieldHeaders) {
+ t.Errorf("parseHeaders() gotFieldHeaders = %v, want %v", gotFieldHeaders, tt.wantFieldHeaders)
+ }
+ })
+ }
+}
+
+func Test_determineSeparator(t *testing.T) {
+ type args struct {
+ data []byte
+ }
+ tests := []struct {
+ name string
+ args args
+ want rune
+ wantErr bool
+ }{
+ {
+ name: "comma",
+ args: args{
+ data: CSVData_Comma,
+ },
+ want: ',',
+ wantErr: false,
+ },
+ {
+ name: "tab",
+ args: args{
+ data: CSVData_Tab,
+ },
+ want: '\t',
+ wantErr: false,
+ },
+ {
+ name: "invalid",
+ args: args{
+ data: []byte("a;b;c"),
+ },
+ want: 0,
+ wantErr: true,
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ got, err := determineSeparator(tt.args.data)
+ if (err != nil) != tt.wantErr {
+ t.Errorf("determineSeparator() error = %v, wantErr %v", err, tt.wantErr)
+ return
+ }
+ if got != tt.want {
+ t.Errorf("determineSeparator() = %v, want %v", got, tt.want)
+ }
+ })
+ }
+}
diff --git a/backend/internal/core/services/reporting/reporting.go b/backend/internal/core/services/reporting/reporting.go
index 4ba408b..8fa5efc 100644
--- a/backend/internal/core/services/reporting/reporting.go
+++ b/backend/internal/core/services/reporting/reporting.go
@@ -1,13 +1,10 @@
package reporting
import (
- "context"
"encoding/csv"
"io"
- "time"
"github.com/gocarina/gocsv"
- "github.com/google/uuid"
"github.com/hay-kot/homebox/backend/internal/data/repo"
"github.com/rs/zerolog"
)
@@ -29,57 +26,3 @@ func NewReportingService(repos *repo.AllRepos, l *zerolog.Logger) *ReportingServ
l: l,
}
}
-
-// =================================================================================================
-
-// NullableTime is a custom type that implements the MarshalCSV interface
-// to allow for nullable time.Time fields in the CSV output to be empty
-// and not "0001-01-01". It also overrides the default CSV output format
-type NullableTime time.Time
-
-func (t NullableTime) MarshalCSV() (string, error) {
- if time.Time(t).IsZero() {
- return "", nil
- }
- // YYYY-MM-DD
- return time.Time(t).Format("2006-01-02"), nil
-}
-
-type BillOfMaterialsEntry struct {
- PurchaseDate NullableTime `csv:"Purchase Date"`
- Name string `csv:"Name"`
- Description string `csv:"Description"`
- Manufacturer string `csv:"Manufacturer"`
- SerialNumber string `csv:"Serial Number"`
- ModelNumber string `csv:"Model Number"`
- Quantity int `csv:"Quantity"`
- Price float64 `csv:"Price"`
- TotalPrice float64 `csv:"Total Price"`
-}
-
-// BillOfMaterialsTSV returns a byte slice of the Bill of Materials for a given GID in TSV format
-// See BillOfMaterialsEntry for the format of the output
-func (rs *ReportingService) BillOfMaterialsTSV(ctx context.Context, GID uuid.UUID) ([]byte, error) {
- entities, err := rs.repos.Items.GetAll(ctx, GID)
- if err != nil {
- rs.l.Debug().Err(err).Msg("failed to get all items for BOM Csv Reporting")
- return nil, err
- }
-
- bomEntries := make([]BillOfMaterialsEntry, len(entities))
- for i, entity := range entities {
- bomEntries[i] = BillOfMaterialsEntry{
- PurchaseDate: NullableTime(entity.PurchaseTime),
- Name: entity.Name,
- Description: entity.Description,
- Manufacturer: entity.Manufacturer,
- SerialNumber: entity.SerialNumber,
- ModelNumber: entity.ModelNumber,
- Quantity: entity.Quantity,
- Price: entity.PurchasePrice,
- TotalPrice: entity.PurchasePrice * float64(entity.Quantity),
- }
- }
-
- return gocsv.MarshalBytes(&bomEntries)
-}
diff --git a/backend/internal/core/services/reporting/value_parsers.go b/backend/internal/core/services/reporting/value_parsers.go
new file mode 100644
index 0000000..7410396
--- /dev/null
+++ b/backend/internal/core/services/reporting/value_parsers.go
@@ -0,0 +1,38 @@
+package reporting
+
+import (
+ "strconv"
+ "strings"
+)
+
+func parseSeparatedString(s string, sep string) ([]string, error) {
+ list := strings.Split(s, sep)
+
+ csf := make([]string, 0, len(list))
+ for _, s := range list {
+ trimmed := strings.TrimSpace(s)
+ if trimmed != "" {
+ csf = append(csf, trimmed)
+ }
+ }
+
+ return csf, nil
+}
+
+func parseFloat(s string) float64 {
+ if s == "" {
+ return 0
+ }
+ f, _ := strconv.ParseFloat(s, 64)
+ return f
+}
+
+func parseBool(s string) bool {
+ b, _ := strconv.ParseBool(s)
+ return b
+}
+
+func parseInt(s string) int {
+ i, _ := strconv.Atoi(s)
+ return i
+}
diff --git a/backend/internal/core/services/service_items.go b/backend/internal/core/services/service_items.go
index df37e67..26b736d 100644
--- a/backend/internal/core/services/service_items.go
+++ b/backend/internal/core/services/service_items.go
@@ -3,10 +3,11 @@ package services
import (
"context"
"errors"
+ "io"
"github.com/google/uuid"
+ "github.com/hay-kot/homebox/backend/internal/core/services/reporting"
"github.com/hay-kot/homebox/backend/internal/data/repo"
- "github.com/rs/zerolog/log"
)
var (
@@ -37,7 +38,6 @@ func (svc *ItemService) Create(ctx Context, item repo.ItemCreate) (repo.ItemOut,
func (svc *ItemService) EnsureAssetID(ctx context.Context, GID uuid.UUID) (int, error) {
items, err := svc.repo.Items.GetAllZeroAssetID(ctx, GID)
-
if err != nil {
return 0, err
}
@@ -61,190 +61,25 @@ func (svc *ItemService) EnsureAssetID(ctx context.Context, GID uuid.UUID) (int,
return finished, nil
}
-func (svc *ItemService) CsvImport(ctx context.Context, GID uuid.UUID, data [][]string) (int, error) {
- loaded := []csvRow{}
- // Skip first row
- for _, row := range data[1:] {
- // Skip empty rows
- if len(row) == 0 {
- continue
- }
+func (svc *ItemService) CsvImport(ctx context.Context, GID uuid.UUID, data io.Reader) (int, error) {
+ // loaded, err := reporting.ReadCSV(data)
+ // if err != nil {
+ // return 0, err
+ // }
- if len(row) != NumOfCols {
- return 0, ErrInvalidCsv
- }
-
- r := newCsvRow(row)
- loaded = append(loaded, r)
- }
-
- // validate rows
- var errMap = map[int][]error{}
- var hasErr bool
- for i, r := range loaded {
-
- errs := r.validate()
-
- if len(errs) > 0 {
- hasErr = true
- lineNum := i + 2
-
- errMap[lineNum] = errs
- }
- }
-
- if hasErr {
- for lineNum, errs := range errMap {
- for _, err := range errs {
- log.Error().Err(err).Int("line", lineNum).Msg("csv import error")
- }
- }
- }
-
- // Bootstrap the locations and labels so we can reuse the created IDs for the items
- locations := map[string]uuid.UUID{}
- existingLocation, err := svc.repo.Locations.GetAll(ctx, GID, repo.LocationQuery{})
- if err != nil {
- return 0, err
- }
- for _, loc := range existingLocation {
- locations[loc.Name] = loc.ID
- }
-
- labels := map[string]uuid.UUID{}
- existingLabels, err := svc.repo.Labels.GetAll(ctx, GID)
- if err != nil {
- return 0, err
- }
- for _, label := range existingLabels {
- labels[label.Name] = label.ID
- }
-
- for _, row := range loaded {
-
- // Locations
- if _, exists := locations[row.Location]; !exists {
- result, err := svc.repo.Locations.Create(ctx, GID, repo.LocationCreate{
- Name: row.Location,
- Description: "",
- })
- if err != nil {
- return 0, err
- }
- locations[row.Location] = result.ID
- }
-
- // Labels
-
- for _, label := range row.getLabels() {
- if _, exists := labels[label]; exists {
- continue
- }
- result, err := svc.repo.Labels.Create(ctx, GID, repo.LabelCreate{
- Name: label,
- Description: "",
- })
- if err != nil {
- return 0, err
- }
- labels[label] = result.ID
- }
- }
-
- highest := repo.AssetID(-1)
- if svc.autoIncrementAssetID {
- highest, err = svc.repo.Items.GetHighestAssetID(ctx, GID)
- if err != nil {
- return 0, err
- }
- }
-
- // Create the items
- var count int
- for _, row := range loaded {
- // Check Import Ref
- if row.Item.ImportRef != "" {
- exists, err := svc.repo.Items.CheckRef(ctx, GID, row.Item.ImportRef)
- if exists {
- continue
- }
- if err != nil {
- log.Err(err).Msg("error checking import ref")
- }
- }
-
- locationID := locations[row.Location]
- labelIDs := []uuid.UUID{}
- for _, label := range row.getLabels() {
- labelIDs = append(labelIDs, labels[label])
- }
-
- log.Info().
- Str("name", row.Item.Name).
- Str("location", row.Location).
- Msgf("Creating Item: %s", row.Item.Name)
-
- data := repo.ItemCreate{
- ImportRef: row.Item.ImportRef,
- Name: row.Item.Name,
- Description: row.Item.Description,
- LabelIDs: labelIDs,
- LocationID: locationID,
- }
-
- if svc.autoIncrementAssetID {
- highest++
- data.AssetID = highest
- }
-
- result, err := svc.repo.Items.Create(ctx, GID, data)
-
- if err != nil {
- return count, err
- }
-
- // Update the item with the rest of the data
- _, err = svc.repo.Items.UpdateByGroup(ctx, GID, repo.ItemUpdate{
- // Edges
- LocationID: locationID,
- LabelIDs: labelIDs,
- AssetID: data.AssetID,
-
- // General Fields
- ID: result.ID,
- Name: result.Name,
- Description: result.Description,
- Insured: row.Item.Insured,
- Notes: row.Item.Notes,
- Quantity: row.Item.Quantity,
-
- // Identifies the item as imported
- SerialNumber: row.Item.SerialNumber,
- ModelNumber: row.Item.ModelNumber,
- Manufacturer: row.Item.Manufacturer,
-
- // Purchase
- PurchaseFrom: row.Item.PurchaseFrom,
- PurchasePrice: row.Item.PurchasePrice,
- PurchaseTime: row.Item.PurchaseTime,
-
- // Warranty
- LifetimeWarranty: row.Item.LifetimeWarranty,
- WarrantyExpires: row.Item.WarrantyExpires,
- WarrantyDetails: row.Item.WarrantyDetails,
-
- SoldTo: row.Item.SoldTo,
- SoldPrice: row.Item.SoldPrice,
- SoldTime: row.Item.SoldTime,
- SoldNotes: row.Item.SoldNotes,
- })
-
- if err != nil {
- return count, err
- }
-
- count++
- }
- return count, nil
+ return 0, nil
+}
+
+func (svc *ItemService) ExportTSV(ctx context.Context, GID uuid.UUID) ([][]string, error) {
+ items, err := svc.repo.Items.GetAll(ctx, GID)
+ if err != nil {
+ return nil, err
+ }
+
+ sheet := reporting.IOSheet{}
+
+ sheet.ReadItems(items)
+
+ return sheet.TSV()
}
diff --git a/backend/internal/core/services/service_items_csv.go b/backend/internal/core/services/service_items_csv.go
deleted file mode 100644
index 2d93f6e..0000000
--- a/backend/internal/core/services/service_items_csv.go
+++ /dev/null
@@ -1,151 +0,0 @@
-package services
-
-import (
- "bytes"
- "encoding/csv"
- "errors"
- "io"
- "strconv"
- "strings"
-
- "github.com/hay-kot/homebox/backend/internal/data/repo"
- "github.com/hay-kot/homebox/backend/internal/data/types"
-)
-
-func determineSeparator(data []byte) (rune, error) {
- // First row
- firstRow := bytes.Split(data, []byte("\n"))[0]
-
- // find first comma or /t
- comma := bytes.IndexByte(firstRow, ',')
- tab := bytes.IndexByte(firstRow, '\t')
-
- switch {
- case comma == -1 && tab == -1:
- return 0, errors.New("could not determine separator")
- case tab > comma:
- return '\t', nil
- default:
- return ',', nil
- }
-}
-
-func ReadCsv(r io.Reader) ([][]string, error) {
- data, err := io.ReadAll(r)
- if err != nil {
- return nil, err
- }
-
- reader := csv.NewReader(bytes.NewReader(data))
-
- // Determine separator
- sep, err := determineSeparator(data)
-
- if err != nil {
- return nil, err
- }
-
- reader.Comma = sep
-
- return reader.ReadAll()
-}
-
-var ErrInvalidCsv = errors.New("invalid csv")
-
-const NumOfCols = 21
-
-func parseFloat(s string) float64 {
- if s == "" {
- return 0
- }
- f, _ := strconv.ParseFloat(s, 64)
- return f
-}
-
-func parseBool(s string) bool {
- switch strings.ToLower(s) {
- case "true", "yes", "1":
- return true
- default:
- return false
- }
-}
-
-func parseInt(s string) int {
- i, _ := strconv.Atoi(s)
- return i
-}
-
-type csvRow struct {
- Item repo.ItemOut
- Location string
- LabelStr string
-}
-
-func newCsvRow(row []string) csvRow {
-
- return csvRow{
- Location: row[1],
- LabelStr: row[2],
- Item: repo.ItemOut{
- ItemSummary: repo.ItemSummary{
- ImportRef: row[0],
- Quantity: parseInt(row[3]),
- Name: row[4],
- Description: row[5],
- Insured: parseBool(row[6]),
- PurchasePrice: parseFloat(row[12]),
- },
- SerialNumber: row[7],
- ModelNumber: row[8],
- Manufacturer: row[9],
- Notes: row[10],
- PurchaseFrom: row[11],
- PurchaseTime: types.DateFromString(row[13]),
- LifetimeWarranty: parseBool(row[14]),
- WarrantyExpires: types.DateFromString(row[15]),
- WarrantyDetails: row[16],
- SoldTo: row[17],
- SoldPrice: parseFloat(row[18]),
- SoldTime: types.DateFromString(row[19]),
- SoldNotes: row[20],
- },
- }
-}
-
-func (c csvRow) getLabels() []string {
- split := strings.Split(c.LabelStr, ";")
-
- // Trim each
- for i, s := range split {
- split[i] = strings.TrimSpace(s)
- }
-
- // Remove empty
- for i, s := range split {
- if s == "" {
- split = append(split[:i], split[i+1:]...)
- }
- }
-
- return split
-}
-
-func (c csvRow) validate() []error {
- var errs []error
-
- add := func(err error) {
- errs = append(errs, err)
- }
-
- required := func(s string, name string) {
- if s == "" {
- add(errors.New(name + " is required"))
- }
- }
-
- required(c.Location, "Location")
- required(c.Item.Name, "Name")
-
- return errs
-}
diff --git a/backend/internal/core/services/service_items_csv_test.go b/backend/internal/core/services/service_items_csv_test.go
deleted file mode 100644
index af3056c..0000000
--- a/backend/internal/core/services/service_items_csv_test.go
+++ /dev/null
@@ -1,164 +0,0 @@
-package services
-
-import (
- "bytes"
- _ "embed"
- "encoding/csv"
- "fmt"
- "reflect"
- "testing"
- "time"
-
- "github.com/stretchr/testify/assert"
-)
-
-//go:embed .testdata/import.csv
-var CSVData_Comma []byte
-
-//go:embed .testdata/import.tsv
-var CSVData_Tab []byte
-
-func loadcsv() [][]string {
- reader := csv.NewReader(bytes.NewReader(CSVData_Comma))
-
- records, err := reader.ReadAll()
- if err != nil {
- panic(err)
- }
-
- return records
-}
-
-func Test_CorrectDateParsing(t *testing.T) {
- t.Parallel()
-
- expected := []time.Time{
- time.Date(2021, 10, 13, 0, 0, 0, 0, time.UTC),
- time.Date(2021, 10, 15, 0, 0, 0, 0, time.UTC),
- time.Date(2021, 10, 13, 0, 0, 0, 0, time.UTC),
- time.Date(2020, 10, 21, 0, 0, 0, 0, time.UTC),
- time.Date(2020, 10, 14, 0, 0, 0, 0, time.UTC),
- time.Date(2020, 9, 30, 0, 0, 0, 0, time.UTC),
- }
-
- records := loadcsv()
-
- for i, record := range records {
- if i == 0 {
- continue
- }
- entity := newCsvRow(record)
- expected := expected[i-1]
-
- assert.Equal(t, expected, entity.Item.PurchaseTime.Time(), fmt.Sprintf("Failed on row %d", i))
- assert.Equal(t, expected, entity.Item.WarrantyExpires.Time(), fmt.Sprintf("Failed on row %d", i))
- assert.Equal(t, expected, entity.Item.SoldTime.Time(), fmt.Sprintf("Failed on row %d", i))
- }
-}
-
-func Test_csvRow_getLabels(t *testing.T) {
- type fields struct {
- LabelStr string
- }
- tests := []struct {
- name string
- fields fields
- want []string
- }{
- {
- name: "basic test",
- fields: fields{
- LabelStr: "IOT;Home Assistant;Z-Wave",
- },
- want: []string{"IOT", "Home Assistant", "Z-Wave"},
- },
- {
- name: "no labels",
- fields: fields{
- LabelStr: "",
- },
- want: []string{},
- },
- {
- name: "single label",
- fields: fields{
- LabelStr: "IOT",
- },
- want: []string{"IOT"},
- },
- {
- name: "trailing semicolon",
- fields: fields{
- LabelStr: "IOT;",
- },
- want: []string{"IOT"},
- },
-
- {
- name: "whitespace",
- fields: fields{
- LabelStr: " IOT; Home Assistant; Z-Wave ",
- },
- want: []string{"IOT", "Home Assistant", "Z-Wave"},
- },
- }
- for _, tt := range tests {
- t.Run(tt.name, func(t *testing.T) {
- c := csvRow{
- LabelStr: tt.fields.LabelStr,
- }
- if got := c.getLabels(); !reflect.DeepEqual(got, tt.want) {
- t.Errorf("csvRow.getLabels() = %v, want %v", got, tt.want)
- }
- })
- }
-}
-
-func Test_determineSeparator(t *testing.T) {
- type args struct {
- data []byte
- }
- tests := []struct {
- name string
- args args
- want rune
- wantErr bool
- }{
- {
- name: "comma",
- args: args{
- data: CSVData_Comma,
- },
- want: ',',
- wantErr: false,
- },
- {
- name: "tab",
- args: args{
- data: CSVData_Tab,
- },
- want: '\t',
- wantErr: false,
- },
- {
- name: "invalid",
- args: args{
- data: []byte("a;b;c"),
- },
- want: 0,
- wantErr: true,
- },
- }
- for _, tt := range tests {
- t.Run(tt.name, func(t *testing.T) {
- got, err := determineSeparator(tt.args.data)
- if (err != nil) != tt.wantErr {
- t.Errorf("determineSeparator() error = %v, wantErr %v", err, tt.wantErr)
- return
- }
- if got != tt.want {
- t.Errorf("determineSeparator() = %v, want %v", got, tt.want)
- }
- })
- }
-}
diff --git a/backend/internal/core/services/service_items_test.go b/backend/internal/core/services/service_items_test.go
deleted file mode 100644
index 105c842..0000000
--- a/backend/internal/core/services/service_items_test.go
+++ /dev/null
@@ -1,78 +0,0 @@
-package services
-
-import (
- "context"
- "testing"
-
- "github.com/google/uuid"
- "github.com/hay-kot/homebox/backend/internal/data/repo"
- "github.com/stretchr/testify/assert"
-)
-
-func TestItemService_CsvImport(t *testing.T) {
- data := loadcsv()
- svc := &ItemService{
- repo: tRepos,
- }
- count, err := svc.CsvImport(context.Background(), tGroup.ID, data)
- assert.Equal(t, 6, count)
- assert.NoError(t, err)
-
- // Check import refs are deduplicated
- count, err = svc.CsvImport(context.Background(), tGroup.ID, data)
- assert.Equal(t, 0, count)
- assert.NoError(t, err)
-
- items, err := svc.repo.Items.GetAll(context.Background(), tGroup.ID)
- assert.NoError(t, err)
- t.Cleanup(func() {
- for _, item := range items {
- err := svc.repo.Items.Delete(context.Background(), item.ID)
- assert.NoError(t, err)
- }
- })
-
- assert.Equal(t, len(items), 6)
-
- dataCsv := []csvRow{}
- for _, item := range data {
- dataCsv = append(dataCsv, newCsvRow(item))
- }
-
- allLocation, err := tRepos.Locations.GetAll(context.Background(), tGroup.ID, repo.LocationQuery{})
- assert.NoError(t, err)
- locNames := []string{}
- for _, loc := range allLocation {
- locNames = append(locNames, loc.Name)
- }
-
- allLabels, err := tRepos.Labels.GetAll(context.Background(), tGroup.ID)
- assert.NoError(t, err)
- labelNames := []string{}
- for _, label := range allLabels {
- labelNames = append(labelNames, label.Name)
- }
-
- ids := []uuid.UUID{}
- t.Cleanup((func() {
- for _, id := range ids {
- err := svc.repo.Items.Delete(context.Background(), id)
- assert.NoError(t, err)
- }
- }))
-
- for _, item := range items {
- assert.Contains(t, locNames, item.Location.Name)
- for _, label := range item.Labels {
- assert.Contains(t, labelNames, label.Name)
- }
-
- for _, csvRow := range dataCsv {
- if csvRow.Item.Name == item.Name {
- assert.Equal(t, csvRow.Item.Description, item.Description)
- assert.Equal(t, csvRow.Item.Quantity, item.Quantity)
- assert.Equal(t, csvRow.Item.Insured, item.Insured)
- }
- }
- }
-}
diff --git a/backend/internal/data/repo/asset_id_type.go b/backend/internal/data/repo/asset_id_type.go
index 06d610e..678a510 100644
--- a/backend/internal/data/repo/asset_id_type.go
+++ b/backend/internal/data/repo/asset_id_type.go
@@ -32,10 +32,18 @@ func ParseAssetID(s string) (AID AssetID, ok bool) {
return ParseAssetIDBytes([]byte(s))
}
-func (aid AssetID) MarshalJSON() ([]byte, error) {
+func (aid AssetID) String() string {
+ if aid.Nil() {
+ return ""
+ }
+
aidStr := fmt.Sprintf("%06d", aid)
aidStr = fmt.Sprintf("%s-%s", aidStr[:3], aidStr[3:])
- return []byte(fmt.Sprintf(`"%s"`, aidStr)), nil
+ return aidStr
+}
+
+func (aid AssetID) MarshalJSON() ([]byte, error) {
+ return []byte(`"` + aid.String() + `"`), nil
}
func (aid *AssetID) UnmarshalJSON(d []byte) error {
@@ -50,3 +58,11 @@ func (aid *AssetID) UnmarshalJSON(d []byte) error {
*aid = AssetID(aidInt)
return nil
}
+
+func (aid AssetID) MarshalCSV() (string, error) {
+ return aid.String(), nil
+}
+
+func (aid *AssetID) UnmarshalCSV(d string) error {
+ return aid.UnmarshalJSON([]byte(d))
+}
diff --git a/backend/internal/data/repo/asset_id_type_test.go b/backend/internal/data/repo/asset_id_type_test.go
index 6a692d9..6aa7b99 100644
--- a/backend/internal/data/repo/asset_id_type_test.go
+++ b/backend/internal/data/repo/asset_id_type_test.go
@@ -21,7 +21,7 @@ func TestAssetID_MarshalJSON(t *testing.T) {
{
name: "zero test",
aid: 0,
- want: []byte(`"000-000"`),
+ want: []byte(`""`),
},
{
name: "large int",
diff --git a/frontend/pages/tools.vue b/frontend/pages/tools.vue
index 8606e9b..40f3960 100644
--- a/frontend/pages/tools.vue
+++ b/frontend/pages/tools.vue
@@ -45,10 +45,10 @@
Imports the standard CSV format for Homebox. This will not overwrite any existing items in your
inventory. It will only add new items.
-
+
@@ -103,7 +103,13 @@
const notify = useNotifier();
function getBillOfMaterials() {
- api.reports.billOfMaterialsURL();
+ const url = api.reports.billOfMaterialsURL();
+ window.open(url, "_blank");
+ }
+
+ function getExportTSV() {
+ const url = api.items.exportURL();
+ window.open(url, "_blank");
}
async function ensureAssetIDs() {