diff --git a/tar/asm/assemble_test.go b/tar/asm/assemble_test.go index e37d7f3..da515f2 100644 --- a/tar/asm/assemble_test.go +++ b/tar/asm/assemble_test.go @@ -113,137 +113,76 @@ func TestTarStreamMangledGetterPutter(t *testing.T) { } func TestTarStream(t *testing.T) { - var ( - expectedSHA1Sum = "1eb237ff69bca6e22789ecb05b45d35ca307adbd" - expectedSize int64 = 10240 - ) - - fh, err := os.Open("./testdata/t.tar.gz") - if err != nil { - t.Fatal(err) - } - defer fh.Close() - gzRdr, err := gzip.NewReader(fh) - if err != nil { - t.Fatal(err) - } - defer gzRdr.Close() - - // Setup where we'll store the metadata - w := bytes.NewBuffer([]byte{}) - sp := storage.NewJSONPacker(w) - fgp := storage.NewBufferFileGetPutter() - - // wrap the disassembly stream - tarStream, err := NewInputTarStream(gzRdr, sp, fgp) - if err != nil { - t.Fatal(err) + testCases := []struct { + path string + expectedSHA1Sum string + expectedSize int64 + }{ + {"./testdata/t.tar.gz", "1eb237ff69bca6e22789ecb05b45d35ca307adbd", 10240}, + {"./testdata/longlink.tar.gz", "d9f6babe107b7247953dff6b5b5ae31a3a880add", 20480}, + {"./testdata/fatlonglink.tar.gz", "8537f03f89aeef537382f8b0bb065d93e03b0be8", 26234880}, } - // get a sum of the stream after it has passed through to ensure it's the same. - h0 := sha1.New() - tRdr0 := io.TeeReader(tarStream, h0) + for _, tc := range testCases { + fh, err := os.Open(tc.path) + if err != nil { + t.Fatal(err) + } + defer fh.Close() + gzRdr, err := gzip.NewReader(fh) + if err != nil { + t.Fatal(err) + } + defer gzRdr.Close() - // read it all to the bit bucket - i, err := io.Copy(ioutil.Discard, tRdr0) - if err != nil { - t.Fatal(err) - } + // Setup where we'll store the metadata + w := bytes.NewBuffer([]byte{}) + sp := storage.NewJSONPacker(w) + fgp := storage.NewBufferFileGetPutter() - if i != expectedSize { - t.Errorf("size of tar: expected %d; got %d", expectedSize, i) - } - if fmt.Sprintf("%x", h0.Sum(nil)) != expectedSHA1Sum { - t.Fatalf("checksum of tar: expected %s; got %x", expectedSHA1Sum, h0.Sum(nil)) - } + // wrap the disassembly stream + tarStream, err := NewInputTarStream(gzRdr, sp, fgp) + if err != nil { + t.Fatal(err) + } - t.Logf("%s", w.String()) // if we fail, then show the packed info + // get a sum of the stream after it has passed through to ensure it's the same. + h0 := sha1.New() + tRdr0 := io.TeeReader(tarStream, h0) - // If we've made it this far, then we'll turn it around and create a tar - // stream from the packed metadata and buffered file contents. - r := bytes.NewBuffer(w.Bytes()) - sup := storage.NewJSONUnpacker(r) - // and reuse the fgp that we Put the payloads to. + // read it all to the bit bucket + i, err := io.Copy(ioutil.Discard, tRdr0) + if err != nil { + t.Fatal(err) + } - rc := NewOutputTarStream(fgp, sup) - h1 := sha1.New() - i, err = io.Copy(h1, rc) - if err != nil { - t.Fatal(err) - } + if i != tc.expectedSize { + t.Errorf("size of tar: expected %d; got %d", tc.expectedSize, i) + } + if fmt.Sprintf("%x", h0.Sum(nil)) != tc.expectedSHA1Sum { + t.Fatalf("checksum of tar: expected %s; got %x", tc.expectedSHA1Sum, h0.Sum(nil)) + } - if i != expectedSize { - t.Errorf("size of output tar: expected %d; got %d", expectedSize, i) - } - if fmt.Sprintf("%x", h1.Sum(nil)) != expectedSHA1Sum { - t.Fatalf("checksum of output tar: expected %s; got %x", expectedSHA1Sum, h1.Sum(nil)) - } -} - -func TestTarGNUTar(t *testing.T) { - var ( - expectedSHA1Sum = "d9f6babe107b7247953dff6b5b5ae31a3a880add" - expectedSize int64 = 20480 - ) - - fh, err := os.Open("./testdata/longlink.tar.gz") - if err != nil { - t.Fatal(err) - } - defer fh.Close() - gzRdr, err := gzip.NewReader(fh) - if err != nil { - t.Fatal(err) - } - defer gzRdr.Close() - - // Setup where we'll store the metadata - w := bytes.NewBuffer([]byte{}) - sp := storage.NewJSONPacker(w) - fgp := storage.NewBufferFileGetPutter() - - // wrap the disassembly stream - tarStream, err := NewInputTarStream(gzRdr, sp, fgp) - if err != nil { - t.Fatal(err) - } - - // get a sum of the stream after it has passed through to ensure it's the same. - h0 := sha1.New() - tRdr0 := io.TeeReader(tarStream, h0) - - // read it all to the bit bucket - i, err := io.Copy(ioutil.Discard, tRdr0) - if err != nil { - t.Fatal(err) - } - - if i != expectedSize { - t.Errorf("size of tar: expected %d; got %d", expectedSize, i) - } - if fmt.Sprintf("%x", h0.Sum(nil)) != expectedSHA1Sum { - t.Fatalf("checksum of tar: expected %s; got %x", expectedSHA1Sum, h0.Sum(nil)) - } - - t.Logf("%s", w.String()) // if we fail, then show the packed info - - // If we've made it this far, then we'll turn it around and create a tar - // stream from the packed metadata and buffered file contents. - r := bytes.NewBuffer(w.Bytes()) - sup := storage.NewJSONUnpacker(r) - // and reuse the fgp that we Put the payloads to. - - rc := NewOutputTarStream(fgp, sup) - h1 := sha1.New() - i, err = io.Copy(h1, rc) - if err != nil { - t.Fatal(err) - } - - if i != expectedSize { - t.Errorf("size of output tar: expected %d; got %d", expectedSize, i) - } - if fmt.Sprintf("%x", h1.Sum(nil)) != expectedSHA1Sum { - t.Fatalf("checksum of output tar: expected %s; got %x", expectedSHA1Sum, h1.Sum(nil)) + t.Logf("%s", w.String()) // if we fail, then show the packed info + + // If we've made it this far, then we'll turn it around and create a tar + // stream from the packed metadata and buffered file contents. + r := bytes.NewBuffer(w.Bytes()) + sup := storage.NewJSONUnpacker(r) + // and reuse the fgp that we Put the payloads to. + + rc := NewOutputTarStream(fgp, sup) + h1 := sha1.New() + i, err = io.Copy(h1, rc) + if err != nil { + t.Fatal(err) + } + + if i != tc.expectedSize { + t.Errorf("size of output tar: expected %d; got %d", tc.expectedSize, i) + } + if fmt.Sprintf("%x", h1.Sum(nil)) != tc.expectedSHA1Sum { + t.Fatalf("checksum of output tar: expected %s; got %x", tc.expectedSHA1Sum, h1.Sum(nil)) + } } } diff --git a/tar/asm/testdata/fatlonglink.tar.gz b/tar/asm/testdata/fatlonglink.tar.gz new file mode 100644 index 0000000..0d8ed14 Binary files /dev/null and b/tar/asm/testdata/fatlonglink.tar.gz differ