2015-02-23 14:57:56 +00:00
|
|
|
package storage
|
|
|
|
|
|
|
|
import (
|
|
|
|
"bytes"
|
2015-02-24 20:57:14 +00:00
|
|
|
"compress/gzip"
|
2015-02-23 14:57:56 +00:00
|
|
|
"io"
|
2015-12-01 20:06:57 +00:00
|
|
|
"io/ioutil"
|
|
|
|
"os"
|
2015-02-23 14:57:56 +00:00
|
|
|
"testing"
|
|
|
|
)
|
|
|
|
|
2015-02-27 21:53:31 +00:00
|
|
|
func TestDuplicateFail(t *testing.T) {
|
|
|
|
e := []Entry{
|
|
|
|
Entry{
|
|
|
|
Type: FileType,
|
|
|
|
Name: "./hurr.txt",
|
|
|
|
Payload: []byte("abcde"),
|
|
|
|
},
|
|
|
|
Entry{
|
|
|
|
Type: FileType,
|
|
|
|
Name: "./hurr.txt",
|
|
|
|
Payload: []byte("deadbeef"),
|
|
|
|
},
|
|
|
|
Entry{
|
|
|
|
Type: FileType,
|
|
|
|
Name: "hurr.txt", // slightly different path, same file though
|
|
|
|
Payload: []byte("deadbeef"),
|
|
|
|
},
|
|
|
|
}
|
|
|
|
buf := []byte{}
|
|
|
|
b := bytes.NewBuffer(buf)
|
|
|
|
|
2015-03-09 18:11:11 +00:00
|
|
|
jp := NewJSONPacker(b)
|
2015-02-27 21:53:31 +00:00
|
|
|
if _, err := jp.AddEntry(e[0]); err != nil {
|
|
|
|
t.Error(err)
|
|
|
|
}
|
|
|
|
if _, err := jp.AddEntry(e[1]); err != ErrDuplicatePath {
|
|
|
|
t.Errorf("expected failure on duplicate path")
|
|
|
|
}
|
|
|
|
if _, err := jp.AddEntry(e[2]); err != ErrDuplicatePath {
|
|
|
|
t.Errorf("expected failure on duplicate path")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-03-09 18:11:11 +00:00
|
|
|
func TestJSONPackerUnpacker(t *testing.T) {
|
2015-02-23 14:57:56 +00:00
|
|
|
e := []Entry{
|
|
|
|
Entry{
|
|
|
|
Type: SegmentType,
|
|
|
|
Payload: []byte("how"),
|
|
|
|
},
|
|
|
|
Entry{
|
|
|
|
Type: SegmentType,
|
|
|
|
Payload: []byte("y'all"),
|
|
|
|
},
|
|
|
|
Entry{
|
|
|
|
Type: FileType,
|
|
|
|
Name: "./hurr.txt",
|
|
|
|
Payload: []byte("deadbeef"),
|
|
|
|
},
|
|
|
|
Entry{
|
|
|
|
Type: SegmentType,
|
|
|
|
Payload: []byte("doin"),
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
buf := []byte{}
|
|
|
|
b := bytes.NewBuffer(buf)
|
|
|
|
|
|
|
|
func() {
|
2015-03-09 18:11:11 +00:00
|
|
|
jp := NewJSONPacker(b)
|
2015-02-23 14:57:56 +00:00
|
|
|
for i := range e {
|
|
|
|
if _, err := jp.AddEntry(e[i]); err != nil {
|
|
|
|
t.Error(err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}()
|
|
|
|
|
2015-02-24 20:57:14 +00:00
|
|
|
// >> packer_test.go:43: uncompressed: 266
|
|
|
|
//t.Errorf("uncompressed: %d", len(b.Bytes()))
|
|
|
|
|
2015-02-23 14:57:56 +00:00
|
|
|
b = bytes.NewBuffer(b.Bytes())
|
2015-02-24 20:49:21 +00:00
|
|
|
entries := Entries{}
|
2015-02-23 14:57:56 +00:00
|
|
|
func() {
|
2015-03-09 18:11:11 +00:00
|
|
|
jup := NewJSONUnpacker(b)
|
2015-02-23 14:57:56 +00:00
|
|
|
for {
|
|
|
|
entry, err := jup.Next()
|
|
|
|
if err != nil {
|
|
|
|
if err == io.EOF {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
t.Error(err)
|
|
|
|
}
|
2015-02-24 20:49:21 +00:00
|
|
|
entries = append(entries, *entry)
|
|
|
|
t.Logf("got %#v", entry)
|
2015-02-23 14:57:56 +00:00
|
|
|
}
|
|
|
|
}()
|
2015-02-24 20:49:21 +00:00
|
|
|
if len(entries) != len(e) {
|
|
|
|
t.Errorf("expected %d entries, got %d", len(e), len(entries))
|
|
|
|
}
|
2015-02-24 20:57:14 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// you can use a compress Reader/Writer and make nice savings.
|
|
|
|
//
|
|
|
|
// For these two tests that are using the same set, it the difference of 266
|
|
|
|
// bytes uncompressed vs 138 bytes compressed.
|
|
|
|
func TestGzip(t *testing.T) {
|
|
|
|
e := []Entry{
|
|
|
|
Entry{
|
|
|
|
Type: SegmentType,
|
|
|
|
Payload: []byte("how"),
|
|
|
|
},
|
|
|
|
Entry{
|
|
|
|
Type: SegmentType,
|
|
|
|
Payload: []byte("y'all"),
|
|
|
|
},
|
|
|
|
Entry{
|
|
|
|
Type: FileType,
|
|
|
|
Name: "./hurr.txt",
|
|
|
|
Payload: []byte("deadbeef"),
|
|
|
|
},
|
|
|
|
Entry{
|
|
|
|
Type: SegmentType,
|
|
|
|
Payload: []byte("doin"),
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
buf := []byte{}
|
|
|
|
b := bytes.NewBuffer(buf)
|
|
|
|
gzW := gzip.NewWriter(b)
|
2015-03-09 18:11:11 +00:00
|
|
|
jp := NewJSONPacker(gzW)
|
2015-02-24 20:57:14 +00:00
|
|
|
for i := range e {
|
|
|
|
if _, err := jp.AddEntry(e[i]); err != nil {
|
|
|
|
t.Error(err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
gzW.Close()
|
|
|
|
|
|
|
|
// >> packer_test.go:99: compressed: 138
|
|
|
|
//t.Errorf("compressed: %d", len(b.Bytes()))
|
|
|
|
|
|
|
|
b = bytes.NewBuffer(b.Bytes())
|
|
|
|
gzR, err := gzip.NewReader(b)
|
|
|
|
if err != nil {
|
|
|
|
t.Fatal(err)
|
|
|
|
}
|
|
|
|
entries := Entries{}
|
|
|
|
func() {
|
2015-03-09 18:11:11 +00:00
|
|
|
jup := NewJSONUnpacker(gzR)
|
2015-02-24 20:57:14 +00:00
|
|
|
for {
|
|
|
|
entry, err := jup.Next()
|
|
|
|
if err != nil {
|
|
|
|
if err == io.EOF {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
t.Error(err)
|
|
|
|
}
|
|
|
|
entries = append(entries, *entry)
|
|
|
|
t.Logf("got %#v", entry)
|
|
|
|
}
|
|
|
|
}()
|
|
|
|
if len(entries) != len(e) {
|
|
|
|
t.Errorf("expected %d entries, got %d", len(e), len(entries))
|
|
|
|
}
|
2015-12-01 20:06:57 +00:00
|
|
|
}
|
2015-02-23 14:57:56 +00:00
|
|
|
|
2015-12-01 20:06:57 +00:00
|
|
|
func BenchmarkGetPut(b *testing.B) {
|
|
|
|
e := []Entry{
|
|
|
|
Entry{
|
|
|
|
Type: SegmentType,
|
|
|
|
Payload: []byte("how"),
|
|
|
|
},
|
|
|
|
Entry{
|
|
|
|
Type: SegmentType,
|
|
|
|
Payload: []byte("y'all"),
|
|
|
|
},
|
|
|
|
Entry{
|
|
|
|
Type: FileType,
|
|
|
|
Name: "./hurr.txt",
|
|
|
|
Payload: []byte("deadbeef"),
|
|
|
|
},
|
|
|
|
Entry{
|
|
|
|
Type: SegmentType,
|
|
|
|
Payload: []byte("doin"),
|
|
|
|
},
|
|
|
|
}
|
|
|
|
b.RunParallel(func(pb *testing.PB) {
|
|
|
|
for pb.Next() {
|
|
|
|
func() {
|
|
|
|
fh, err := ioutil.TempFile("", "tar-split.")
|
|
|
|
if err != nil {
|
|
|
|
b.Fatal(err)
|
|
|
|
}
|
|
|
|
defer os.Remove(fh.Name())
|
|
|
|
defer fh.Close()
|
|
|
|
|
|
|
|
jp := NewJSONPacker(fh)
|
|
|
|
for i := range e {
|
|
|
|
if _, err := jp.AddEntry(e[i]); err != nil {
|
|
|
|
b.Fatal(err)
|
|
|
|
}
|
|
|
|
}
|
2023-03-25 23:20:31 +00:00
|
|
|
if err := fh.Sync(); err != nil {
|
|
|
|
b.Fatal(err)
|
|
|
|
}
|
2015-12-01 20:06:57 +00:00
|
|
|
|
|
|
|
up := NewJSONUnpacker(fh)
|
|
|
|
for {
|
|
|
|
_, err := up.Next()
|
|
|
|
if err != nil {
|
|
|
|
if err == io.EOF {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
b.Fatal(err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
}()
|
|
|
|
}
|
|
|
|
})
|
2015-02-23 14:57:56 +00:00
|
|
|
}
|