forked from mirrors/tar-split
WIP
This commit is contained in:
parent
e2a62d6b0d
commit
5c8d5cacba
13 changed files with 265 additions and 50 deletions
|
@ -9,36 +9,41 @@ func (e Entries) Len() int { return len(e) }
|
|||
func (e Entries) Swap(i, j int) { e[i], e[j] = e[j], e[i] }
|
||||
func (e Entries) Less(i, j int) bool { return e[i].Position < e[j].Position }
|
||||
|
||||
// Type of Entry
|
||||
type Type int
|
||||
// EntryType is the type of Entry
|
||||
type EntryType int
|
||||
|
||||
const (
|
||||
// FileType represents a file payload from the tar stream.
|
||||
// FileCheckEntry represents a file payload from the tar stream.
|
||||
//
|
||||
// This will be used to map to relative paths on disk. Only Size > 0 will get
|
||||
// read into a resulting output stream (due to hardlinks).
|
||||
FileType Type = 1 + iota
|
||||
// SegmentType represents a raw bytes segment from the archive stream. These raw
|
||||
FileCheckEntry EntryType = 1 + iota
|
||||
|
||||
// SegmentEntry represents a raw bytes segment from the archive stream. These raw
|
||||
// byte segments consist of the raw headers and various padding.
|
||||
//
|
||||
// Its payload is to be marshalled base64 encoded.
|
||||
SegmentType
|
||||
SegmentEntry
|
||||
|
||||
// VerficationEntry is a structure of keywords for validating the on-disk
|
||||
// file attributes against the attributes of the Tar archive file headers
|
||||
VerficationEntry
|
||||
)
|
||||
|
||||
// Entry is the structure for packing and unpacking the information read from
|
||||
// the Tar archive.
|
||||
//
|
||||
// FileType Payload checksum is using `hash/crc64` for basic file integrity,
|
||||
// FileCheckEntry Payload checksum is using `hash/crc64` for basic file integrity,
|
||||
// _not_ for cryptography.
|
||||
// From http://www.backplane.com/matt/crc64.html, CRC32 has almost 40,000
|
||||
// collisions in a sample of 18.2 million, CRC64 had none.
|
||||
type Entry struct {
|
||||
Type Type `json:"type"`
|
||||
Name string `json:"name,omitempty"`
|
||||
NameRaw []byte `json:"name_raw,omitempty"`
|
||||
Size int64 `json:"size,omitempty"`
|
||||
Payload []byte `json:"payload"` // SegmentType stores payload here; FileType stores crc64 checksum here;
|
||||
Position int `json:"position"`
|
||||
Type EntryType `json:"type"`
|
||||
Name string `json:"name,omitempty"`
|
||||
NameRaw []byte `json:"name_raw,omitempty"`
|
||||
Size int64 `json:"size,omitempty"`
|
||||
Payload []byte `json:"payload"` // SegmentType stores payload here; FileType stores crc64 checksum here;
|
||||
Position int `json:"position"`
|
||||
}
|
||||
|
||||
// SetName will check name for valid UTF-8 string, and set the appropriate
|
||||
|
|
|
@ -9,23 +9,23 @@ import (
|
|||
func TestEntries(t *testing.T) {
|
||||
e := Entries{
|
||||
Entry{
|
||||
Type: SegmentType,
|
||||
Type: SegmentEntry,
|
||||
Payload: []byte("y'all"),
|
||||
Position: 1,
|
||||
},
|
||||
Entry{
|
||||
Type: SegmentType,
|
||||
Type: SegmentEntry,
|
||||
Payload: []byte("doin"),
|
||||
Position: 3,
|
||||
},
|
||||
Entry{
|
||||
Type: FileType,
|
||||
Type: FileCheckEntry,
|
||||
Name: "./hurr.txt",
|
||||
Payload: []byte("deadbeef"),
|
||||
Position: 2,
|
||||
},
|
||||
Entry{
|
||||
Type: SegmentType,
|
||||
Type: SegmentEntry,
|
||||
Payload: []byte("how"),
|
||||
Position: 0,
|
||||
},
|
||||
|
@ -38,7 +38,7 @@ func TestEntries(t *testing.T) {
|
|||
|
||||
func TestFile(t *testing.T) {
|
||||
f := Entry{
|
||||
Type: FileType,
|
||||
Type: FileCheckEntry,
|
||||
Size: 100,
|
||||
Position: 2,
|
||||
}
|
||||
|
@ -67,7 +67,7 @@ func TestFile(t *testing.T) {
|
|||
|
||||
func TestFileRaw(t *testing.T) {
|
||||
f := Entry{
|
||||
Type: FileType,
|
||||
Type: FileCheckEntry,
|
||||
Size: 100,
|
||||
Position: 2,
|
||||
}
|
||||
|
|
|
@ -44,7 +44,7 @@ func (jup *jsonUnpacker) Next() (*Entry, error) {
|
|||
}
|
||||
|
||||
// check for dup name
|
||||
if e.Type == FileType {
|
||||
if e.Type == FileCheckEntry {
|
||||
cName := filepath.Clean(e.GetName())
|
||||
if _, ok := jup.seen[cName]; ok {
|
||||
return nil, ErrDuplicatePath
|
||||
|
@ -55,8 +55,8 @@ func (jup *jsonUnpacker) Next() (*Entry, error) {
|
|||
return &e, err
|
||||
}
|
||||
|
||||
// NewJSONUnpacker provides an Unpacker that reads Entries (SegmentType and
|
||||
// FileType) as a json document.
|
||||
// NewJSONUnpacker provides an Unpacker that reads Entries (SegmentEntry and
|
||||
// FileCheckEntry) as a json document.
|
||||
//
|
||||
// Each Entry read are expected to be delimited by new line.
|
||||
func NewJSONUnpacker(r io.Reader) Unpacker {
|
||||
|
@ -85,7 +85,7 @@ func (jp *jsonPacker) AddEntry(e Entry) (int, error) {
|
|||
}
|
||||
|
||||
// check early for dup name
|
||||
if e.Type == FileType {
|
||||
if e.Type == FileCheckEntry {
|
||||
cName := filepath.Clean(e.GetName())
|
||||
if _, ok := jp.seen[cName]; ok {
|
||||
return -1, ErrDuplicatePath
|
||||
|
@ -104,8 +104,8 @@ func (jp *jsonPacker) AddEntry(e Entry) (int, error) {
|
|||
return e.Position, nil
|
||||
}
|
||||
|
||||
// NewJSONPacker provides a Packer that writes each Entry (SegmentType and
|
||||
// FileType) as a json document.
|
||||
// NewJSONPacker provides a Packer that writes each Entry (SegmentEntry and
|
||||
// FileCheckEntry) as a json document.
|
||||
//
|
||||
// The Entries are delimited by new line.
|
||||
func NewJSONPacker(w io.Writer) Packer {
|
||||
|
|
|
@ -12,17 +12,17 @@ import (
|
|||
func TestDuplicateFail(t *testing.T) {
|
||||
e := []Entry{
|
||||
Entry{
|
||||
Type: FileType,
|
||||
Type: FileCheckEntry,
|
||||
Name: "./hurr.txt",
|
||||
Payload: []byte("abcde"),
|
||||
},
|
||||
Entry{
|
||||
Type: FileType,
|
||||
Type: FileCheckEntry,
|
||||
Name: "./hurr.txt",
|
||||
Payload: []byte("deadbeef"),
|
||||
},
|
||||
Entry{
|
||||
Type: FileType,
|
||||
Type: FileCheckEntry,
|
||||
Name: "hurr.txt", // slightly different path, same file though
|
||||
Payload: []byte("deadbeef"),
|
||||
},
|
||||
|
@ -45,20 +45,20 @@ func TestDuplicateFail(t *testing.T) {
|
|||
func TestJSONPackerUnpacker(t *testing.T) {
|
||||
e := []Entry{
|
||||
Entry{
|
||||
Type: SegmentType,
|
||||
Type: SegmentEntry,
|
||||
Payload: []byte("how"),
|
||||
},
|
||||
Entry{
|
||||
Type: SegmentType,
|
||||
Type: SegmentEntry,
|
||||
Payload: []byte("y'all"),
|
||||
},
|
||||
Entry{
|
||||
Type: FileType,
|
||||
Type: FileCheckEntry,
|
||||
Name: "./hurr.txt",
|
||||
Payload: []byte("deadbeef"),
|
||||
},
|
||||
Entry{
|
||||
Type: SegmentType,
|
||||
Type: SegmentEntry,
|
||||
Payload: []byte("doin"),
|
||||
},
|
||||
}
|
||||
|
@ -106,20 +106,20 @@ func TestJSONPackerUnpacker(t *testing.T) {
|
|||
func TestGzip(t *testing.T) {
|
||||
e := []Entry{
|
||||
Entry{
|
||||
Type: SegmentType,
|
||||
Type: SegmentEntry,
|
||||
Payload: []byte("how"),
|
||||
},
|
||||
Entry{
|
||||
Type: SegmentType,
|
||||
Type: SegmentEntry,
|
||||
Payload: []byte("y'all"),
|
||||
},
|
||||
Entry{
|
||||
Type: FileType,
|
||||
Type: FileCheckEntry,
|
||||
Name: "./hurr.txt",
|
||||
Payload: []byte("deadbeef"),
|
||||
},
|
||||
Entry{
|
||||
Type: SegmentType,
|
||||
Type: SegmentEntry,
|
||||
Payload: []byte("doin"),
|
||||
},
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue