1
0
Fork 0
forked from mirrors/tar-split

tar/asm: now testing assemble and disassemble

passing a tar archive through disassembly, then reassembling a tar
stream from it's metadata. Checking size and sha1 of the whole stream.
This commit is contained in:
Vincent Batts 2015-03-05 14:09:17 -05:00
parent feaa049730
commit ab2fc5ec40
3 changed files with 45 additions and 99 deletions

View file

@ -60,6 +60,7 @@ func NewOutputTarStream(fg FileGetter, up storage.Unpacker) io.ReadCloser {
} }
} }
} }
pw.Close()
}() }()
return pr return pr
} }

View file

@ -17,12 +17,6 @@ var entries = []struct {
Entry storage.Entry Entry storage.Entry
Body []byte Body []byte
}{ }{
{
Entry: storage.Entry{
Type: storage.SegmentType,
Payload: []byte("y'all"), // FIXME need real header here
},
},
{ {
Entry: storage.Entry{ Entry: storage.Entry{
Type: storage.FileType, Type: storage.FileType,
@ -32,12 +26,6 @@ var entries = []struct {
}, },
Body: []byte("imma hurr til I derp"), Body: []byte("imma hurr til I derp"),
}, },
{
Entry: storage.Entry{
Type: storage.SegmentType,
Payload: []byte("doin"), // FIXME need real header here
},
},
{ {
Entry: storage.Entry{ Entry: storage.Entry{
Type: storage.FileType, Type: storage.FileType,
@ -48,72 +36,9 @@ var entries = []struct {
Body: []byte("café con leche, por favor"), Body: []byte("café con leche, por favor"),
}, },
{
Entry: storage.Entry{
Type: storage.SegmentType,
Payload: []byte{00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
00, 00},
},
},
} }
func TestTarStream(t *testing.T) { func TestTarStreamOld(t *testing.T) {
fgp := NewBufferFileGetPutter() fgp := NewBufferFileGetPutter()
// first lets prep a GetPutter and Packer // first lets prep a GetPutter and Packer
@ -143,7 +68,7 @@ func TestTarStream(t *testing.T) {
// TODO finish this // TODO finish this
} }
func TestInputTarStream(t *testing.T) { func TestTarStream(t *testing.T) {
var ( var (
expectedSum = "1eb237ff69bca6e22789ecb05b45d35ca307adbd" expectedSum = "1eb237ff69bca6e22789ecb05b45d35ca307adbd"
expectedSize int64 = 10240 expectedSize int64 = 10240
@ -165,22 +90,18 @@ func TestInputTarStream(t *testing.T) {
sp := storage.NewJsonPacker(w) sp := storage.NewJsonPacker(w)
fgp := NewBufferFileGetPutter() fgp := NewBufferFileGetPutter()
// check the tar on the front end too
h0 := sha1.New()
tRdr0 := io.TeeReader(gzRdr, h0)
// wrap the disassembly stream // wrap the disassembly stream
tarStream, err := NewInputTarStream(tRdr0, sp, fgp) tarStream, err := NewInputTarStream(gzRdr, sp, fgp)
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
// get a sum of the stream after it has passed through to ensure it's the same. // get a sum of the stream after it has passed through to ensure it's the same.
h1 := sha1.New() h0 := sha1.New()
tRdr1 := io.TeeReader(tarStream, h1) tRdr0 := io.TeeReader(tarStream, h0)
// read it all to the bit bucket // read it all to the bit bucket
i, err := io.Copy(ioutil.Discard, tRdr1) i, err := io.Copy(ioutil.Discard, tRdr0)
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
@ -188,10 +109,32 @@ func TestInputTarStream(t *testing.T) {
if i != expectedSize { if i != expectedSize {
t.Errorf("size of tar: expected %d; got %d", expectedSize, i) t.Errorf("size of tar: expected %d; got %d", expectedSize, i)
} }
if fmt.Sprintf("%x", h0.Sum(nil)) != expectedSum {
t.Fatalf("checksum of tar: expected %s; got %x", expectedSum, h0.Sum(nil))
}
t.Logf("%s", w.String()) // if we fail, then show the packed info
// If we've made it this far, then we'll turn it around and create a tar
// stream from the packed metadata and buffered file contents.
r := bytes.NewBuffer(w.Bytes())
sup := storage.NewJsonUnpacker(r)
// and reuse the fgp that we Put the payloads to.
rc := NewOutputTarStream(fgp, sup)
h1 := sha1.New()
tRdr1 := io.TeeReader(rc, h1)
// read it all to the bit bucket
i, err = io.Copy(ioutil.Discard, tRdr1)
if err != nil {
t.Fatal(err)
}
if i != expectedSize {
t.Errorf("size of output tar: expected %d; got %d", expectedSize, i)
}
if fmt.Sprintf("%x", h1.Sum(nil)) != expectedSum { if fmt.Sprintf("%x", h1.Sum(nil)) != expectedSum {
t.Logf("h0 was %x", h0.Sum(nil)) t.Fatalf("checksum of output tar: expected %s; got %x", expectedSum, h1.Sum(nil))
t.Errorf("checksum of tar: expected %q; got %x", expectedSum, h1.Sum(nil))
} }
} }

View file

@ -70,16 +70,16 @@ func NewInputTarStream(r io.Reader, p storage.Packer, fp FilePutter) (io.Reader,
pW.CloseWithError(err) pW.CloseWithError(err)
} }
var csum []byte sumChan := make(chan []byte)
if hdr.Size > 0 { if hdr.Size > 0 {
// if there is a file payload to write, then write the file to the FilePutter // if there is a file payload to write, then write the file to the FilePutter
fileRdr, fileWrtr := io.Pipe() fileRdr, fileWrtr := io.Pipe()
go func() { go func() {
var err error _, csum, err := fp.Put(hdr.Name, fileRdr)
_, csum, err = fp.Put(hdr.Name, fileRdr)
if err != nil { if err != nil {
pW.CloseWithError(err) pW.CloseWithError(err)
} }
sumChan <- csum
}() }()
if _, err = io.Copy(fileWrtr, tr); err != nil { if _, err = io.Copy(fileWrtr, tr); err != nil {
pW.CloseWithError(err) pW.CloseWithError(err)
@ -92,20 +92,22 @@ func NewInputTarStream(r io.Reader, p storage.Packer, fp FilePutter) (io.Reader,
Type: storage.FileType, Type: storage.FileType,
Name: hdr.Name, Name: hdr.Name,
Size: hdr.Size, Size: hdr.Size,
Payload: csum, Payload: <-sumChan,
}) })
if err != nil { if err != nil {
pW.CloseWithError(err) pW.CloseWithError(err)
} }
if b := tr.RawBytes(); len(b) > 0 {
_, err = p.AddEntry(storage.Entry{ _, err = p.AddEntry(storage.Entry{
Type: storage.SegmentType, Type: storage.SegmentType,
Payload: tr.RawBytes(), Payload: b,
}) })
if err != nil { if err != nil {
pW.CloseWithError(err) pW.CloseWithError(err)
} }
} }
}
// it is allowable, and not uncommon that there is further padding on the // it is allowable, and not uncommon that there is further padding on the
// end of an archive, apart from the expected 1024 null bytes. // end of an archive, apart from the expected 1024 null bytes.