2014-07-30 13:42:12 +00:00
|
|
|
package tarsum
|
|
|
|
|
|
|
|
import (
|
|
|
|
"bytes"
|
|
|
|
"compress/gzip"
|
|
|
|
"crypto/sha256"
|
|
|
|
"encoding/hex"
|
|
|
|
"hash"
|
|
|
|
"io"
|
|
|
|
"sort"
|
|
|
|
"strconv"
|
|
|
|
"strings"
|
|
|
|
|
|
|
|
"github.com/docker/docker/vendor/src/code.google.com/p/go/src/pkg/archive/tar"
|
2014-07-24 20:37:44 +00:00
|
|
|
|
|
|
|
"github.com/docker/docker/pkg/log"
|
2014-07-30 13:42:12 +00:00
|
|
|
)
|
|
|
|
|
2014-09-06 12:29:19 +00:00
|
|
|
const (
|
|
|
|
buf8K = 8 * 1024
|
|
|
|
buf16K = 16 * 1024
|
|
|
|
buf32K = 32 * 1024
|
|
|
|
)
|
|
|
|
|
2014-08-21 20:12:52 +00:00
|
|
|
// NewTarSum creates a new interface for calculating a fixed time checksum of a
|
|
|
|
// tar archive.
|
|
|
|
//
|
|
|
|
// This is used for calculating checksums of layers of an image, in some cases
|
|
|
|
// including the byte payload of the image's json metadata as well, and for
|
|
|
|
// calculating the checksums for buildcache.
|
2014-09-11 01:56:20 +00:00
|
|
|
func NewTarSum(r io.Reader, dc bool, v Version) (TarSum, error) {
|
2014-08-21 20:12:52 +00:00
|
|
|
if _, ok := tarSumVersions[v]; !ok {
|
|
|
|
return nil, ErrVersionNotImplemented
|
|
|
|
}
|
2014-09-11 01:56:20 +00:00
|
|
|
return &tarSum{Reader: r, DisableCompression: dc, tarSumVersion: v}, nil
|
2014-08-21 20:12:52 +00:00
|
|
|
}
|
|
|
|
|
2014-09-11 01:56:20 +00:00
|
|
|
// TarSum is the generic interface for calculating fixed time
|
2014-08-21 20:12:52 +00:00
|
|
|
// checksums of a tar archive
|
2014-09-11 01:56:20 +00:00
|
|
|
type TarSum interface {
|
2014-08-21 20:12:52 +00:00
|
|
|
io.Reader
|
|
|
|
GetSums() map[string]string
|
|
|
|
Sum([]byte) string
|
|
|
|
Version() Version
|
|
|
|
}
|
|
|
|
|
2014-09-11 01:56:20 +00:00
|
|
|
// tarSum struct is the structure for a Version0 checksum calculation
|
|
|
|
type tarSum struct {
|
2014-07-30 13:42:12 +00:00
|
|
|
io.Reader
|
|
|
|
tarR *tar.Reader
|
|
|
|
tarW *tar.Writer
|
|
|
|
gz writeCloseFlusher
|
|
|
|
bufTar *bytes.Buffer
|
|
|
|
bufGz *bytes.Buffer
|
2014-09-06 12:29:19 +00:00
|
|
|
bufData []byte
|
2014-07-30 13:42:12 +00:00
|
|
|
h hash.Hash
|
|
|
|
sums map[string]string
|
|
|
|
currentFile string
|
|
|
|
finished bool
|
|
|
|
first bool
|
2014-08-21 20:12:52 +00:00
|
|
|
DisableCompression bool // false by default. When false, the output gzip compressed.
|
|
|
|
tarSumVersion Version // this field is not exported so it can not be mutated during use
|
2014-07-30 13:42:12 +00:00
|
|
|
}
|
|
|
|
|
2014-09-11 01:56:20 +00:00
|
|
|
func (ts tarSum) Version() Version {
|
2014-08-21 20:12:52 +00:00
|
|
|
return ts.tarSumVersion
|
2014-07-30 13:42:12 +00:00
|
|
|
}
|
|
|
|
|
2014-09-11 01:56:20 +00:00
|
|
|
func (ts tarSum) selectHeaders(h *tar.Header, v Version) (set [][2]string) {
|
2014-07-30 13:42:12 +00:00
|
|
|
for _, elem := range [][2]string{
|
|
|
|
{"name", h.Name},
|
|
|
|
{"mode", strconv.Itoa(int(h.Mode))},
|
|
|
|
{"uid", strconv.Itoa(h.Uid)},
|
|
|
|
{"gid", strconv.Itoa(h.Gid)},
|
|
|
|
{"size", strconv.Itoa(int(h.Size))},
|
|
|
|
{"mtime", strconv.Itoa(int(h.ModTime.UTC().Unix()))},
|
|
|
|
{"typeflag", string([]byte{h.Typeflag})},
|
|
|
|
{"linkname", h.Linkname},
|
|
|
|
{"uname", h.Uname},
|
|
|
|
{"gname", h.Gname},
|
|
|
|
{"devmajor", strconv.Itoa(int(h.Devmajor))},
|
|
|
|
{"devminor", strconv.Itoa(int(h.Devminor))},
|
|
|
|
} {
|
2014-08-21 20:12:52 +00:00
|
|
|
if v == VersionDev && elem[0] == "mtime" {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
set = append(set, elem)
|
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2014-09-11 01:56:20 +00:00
|
|
|
func (ts *tarSum) encodeHeader(h *tar.Header) error {
|
2014-08-21 20:12:52 +00:00
|
|
|
for _, elem := range ts.selectHeaders(h, ts.Version()) {
|
2014-07-30 13:42:12 +00:00
|
|
|
if _, err := ts.h.Write([]byte(elem[0] + elem[1])); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
2014-05-21 21:13:53 +00:00
|
|
|
|
|
|
|
// include the additional pax headers, from an ordered list
|
|
|
|
var keys []string
|
|
|
|
for k := range h.Xattrs {
|
|
|
|
keys = append(keys, k)
|
|
|
|
}
|
|
|
|
sort.Strings(keys)
|
|
|
|
for _, k := range keys {
|
|
|
|
if _, err := ts.h.Write([]byte(k + h.Xattrs[k])); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
2014-07-30 13:42:12 +00:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2014-09-11 01:56:20 +00:00
|
|
|
func (ts *tarSum) Read(buf []byte) (int, error) {
|
2014-07-30 13:42:12 +00:00
|
|
|
if ts.gz == nil {
|
|
|
|
ts.bufTar = bytes.NewBuffer([]byte{})
|
|
|
|
ts.bufGz = bytes.NewBuffer([]byte{})
|
|
|
|
ts.tarR = tar.NewReader(ts.Reader)
|
|
|
|
ts.tarW = tar.NewWriter(ts.bufTar)
|
|
|
|
if !ts.DisableCompression {
|
|
|
|
ts.gz = gzip.NewWriter(ts.bufGz)
|
|
|
|
} else {
|
|
|
|
ts.gz = &nopCloseFlusher{Writer: ts.bufGz}
|
|
|
|
}
|
|
|
|
ts.h = sha256.New()
|
|
|
|
ts.h.Reset()
|
|
|
|
ts.first = true
|
|
|
|
ts.sums = make(map[string]string)
|
|
|
|
}
|
|
|
|
|
|
|
|
if ts.finished {
|
|
|
|
return ts.bufGz.Read(buf)
|
|
|
|
}
|
2014-09-06 12:29:19 +00:00
|
|
|
if ts.bufData == nil {
|
|
|
|
switch {
|
|
|
|
case len(buf) <= buf8K:
|
|
|
|
ts.bufData = make([]byte, buf8K)
|
|
|
|
case len(buf) <= buf16K:
|
|
|
|
ts.bufData = make([]byte, buf16K)
|
|
|
|
case len(buf) <= buf32K:
|
|
|
|
ts.bufData = make([]byte, buf32K)
|
|
|
|
default:
|
|
|
|
ts.bufData = make([]byte, len(buf))
|
|
|
|
}
|
2014-08-17 11:29:46 +00:00
|
|
|
}
|
2014-09-06 12:29:19 +00:00
|
|
|
buf2 := ts.bufData[:len(buf)-1]
|
2014-07-30 13:42:12 +00:00
|
|
|
|
|
|
|
n, err := ts.tarR.Read(buf2)
|
|
|
|
if err != nil {
|
|
|
|
if err == io.EOF {
|
|
|
|
if _, err := ts.h.Write(buf2[:n]); err != nil {
|
|
|
|
return 0, err
|
|
|
|
}
|
|
|
|
if !ts.first {
|
|
|
|
ts.sums[ts.currentFile] = hex.EncodeToString(ts.h.Sum(nil))
|
|
|
|
ts.h.Reset()
|
|
|
|
} else {
|
|
|
|
ts.first = false
|
|
|
|
}
|
|
|
|
|
|
|
|
currentHeader, err := ts.tarR.Next()
|
|
|
|
if err != nil {
|
|
|
|
if err == io.EOF {
|
|
|
|
if err := ts.gz.Close(); err != nil {
|
|
|
|
return 0, err
|
|
|
|
}
|
|
|
|
ts.finished = true
|
|
|
|
return n, nil
|
|
|
|
}
|
|
|
|
return n, err
|
|
|
|
}
|
|
|
|
ts.currentFile = strings.TrimSuffix(strings.TrimPrefix(currentHeader.Name, "./"), "/")
|
|
|
|
if err := ts.encodeHeader(currentHeader); err != nil {
|
|
|
|
return 0, err
|
|
|
|
}
|
|
|
|
if err := ts.tarW.WriteHeader(currentHeader); err != nil {
|
|
|
|
return 0, err
|
|
|
|
}
|
|
|
|
if _, err := ts.tarW.Write(buf2[:n]); err != nil {
|
|
|
|
return 0, err
|
|
|
|
}
|
|
|
|
ts.tarW.Flush()
|
|
|
|
if _, err := io.Copy(ts.gz, ts.bufTar); err != nil {
|
|
|
|
return 0, err
|
|
|
|
}
|
|
|
|
ts.gz.Flush()
|
|
|
|
|
|
|
|
return ts.bufGz.Read(buf)
|
|
|
|
}
|
|
|
|
return n, err
|
|
|
|
}
|
|
|
|
|
|
|
|
// Filling the hash buffer
|
|
|
|
if _, err = ts.h.Write(buf2[:n]); err != nil {
|
|
|
|
return 0, err
|
|
|
|
}
|
|
|
|
|
|
|
|
// Filling the tar writter
|
|
|
|
if _, err = ts.tarW.Write(buf2[:n]); err != nil {
|
|
|
|
return 0, err
|
|
|
|
}
|
|
|
|
ts.tarW.Flush()
|
|
|
|
|
|
|
|
// Filling the gz writter
|
|
|
|
if _, err = io.Copy(ts.gz, ts.bufTar); err != nil {
|
|
|
|
return 0, err
|
|
|
|
}
|
|
|
|
ts.gz.Flush()
|
|
|
|
|
|
|
|
return ts.bufGz.Read(buf)
|
|
|
|
}
|
|
|
|
|
2014-09-11 01:56:20 +00:00
|
|
|
func (ts *tarSum) Sum(extra []byte) string {
|
2014-07-30 13:42:12 +00:00
|
|
|
var sums []string
|
|
|
|
|
|
|
|
for _, sum := range ts.sums {
|
|
|
|
sums = append(sums, sum)
|
|
|
|
}
|
|
|
|
sort.Strings(sums)
|
|
|
|
h := sha256.New()
|
|
|
|
if extra != nil {
|
|
|
|
h.Write(extra)
|
|
|
|
}
|
|
|
|
for _, sum := range sums {
|
2014-09-09 17:31:11 +00:00
|
|
|
log.Debugf("-->%s<--", sum)
|
2014-07-30 13:42:12 +00:00
|
|
|
h.Write([]byte(sum))
|
|
|
|
}
|
2014-08-21 20:12:52 +00:00
|
|
|
checksum := ts.Version().String() + "+sha256:" + hex.EncodeToString(h.Sum(nil))
|
2014-09-09 17:31:11 +00:00
|
|
|
log.Debugf("checksum processed: %s", checksum)
|
2014-07-30 13:42:12 +00:00
|
|
|
return checksum
|
|
|
|
}
|
|
|
|
|
2014-09-11 01:56:20 +00:00
|
|
|
func (ts *tarSum) GetSums() map[string]string {
|
2014-07-30 13:42:12 +00:00
|
|
|
return ts.sums
|
|
|
|
}
|