*: refactoring to support streams
when creating a manifest from, or validating, a stream like a tar archive, it requires thinking about some of the functions differently than walking a directory tree. This is the beginning of allowing for such features. Signed-off-by: Vincent Batts <vbatts@hashbangbash.com>
This commit is contained in:
parent
119cdc314c
commit
faa80931af
11 changed files with 534 additions and 133 deletions
12
check.go
12
check.go
|
@ -3,7 +3,6 @@ package mtree
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
|
||||||
"sort"
|
"sort"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -51,7 +50,8 @@ func Check(root string, dh *DirectoryHierarchy, keywords []string) (*Result, err
|
||||||
creator.curSet = nil
|
creator.curSet = nil
|
||||||
}
|
}
|
||||||
case RelativeType, FullType:
|
case RelativeType, FullType:
|
||||||
info, err := os.Lstat(e.Path())
|
filename := e.Path()
|
||||||
|
info, err := os.Lstat(filename)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
@ -71,10 +71,16 @@ func Check(root string, dh *DirectoryHierarchy, keywords []string) (*Result, err
|
||||||
if keywords != nil && !inSlice(kv.Keyword(), keywords) {
|
if keywords != nil && !inSlice(kv.Keyword(), keywords) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
curKeyVal, err := keywordFunc(filepath.Join(root, e.Path()), info)
|
fh, err := os.Open(filename)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
curKeyVal, err := keywordFunc(filename, info, fh)
|
||||||
|
if err != nil {
|
||||||
|
fh.Close()
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
fh.Close()
|
||||||
if string(kv) != curKeyVal {
|
if string(kv) != curKeyVal {
|
||||||
failure := Failure{Path: e.Path(), Keyword: kv.Keyword(), Expected: kv.Value(), Got: KeyVal(curKeyVal).Value()}
|
failure := Failure{Path: e.Path(), Keyword: kv.Keyword(), Expected: kv.Value(), Got: KeyVal(curKeyVal).Value()}
|
||||||
result.Failures = append(result.Failures, failure)
|
result.Failures = append(result.Failures, failure)
|
||||||
|
|
9
creator.go
Normal file
9
creator.go
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
package mtree
|
||||||
|
|
||||||
|
// dhCreator is used in when building a DirectoryHierarchy
|
||||||
|
type dhCreator struct {
|
||||||
|
DH *DirectoryHierarchy
|
||||||
|
curSet *Entry
|
||||||
|
curDir *Entry
|
||||||
|
curEnt *Entry
|
||||||
|
}
|
80
entry.go
Normal file
80
entry.go
Normal file
|
@ -0,0 +1,80 @@
|
||||||
|
package mtree
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
type byPos []Entry
|
||||||
|
|
||||||
|
func (bp byPos) Len() int { return len(bp) }
|
||||||
|
func (bp byPos) Less(i, j int) bool { return bp[i].Pos < bp[j].Pos }
|
||||||
|
func (bp byPos) Swap(i, j int) { bp[i], bp[j] = bp[j], bp[i] }
|
||||||
|
|
||||||
|
// Entry is each component of content in the mtree spec file
|
||||||
|
type Entry struct {
|
||||||
|
Parent *Entry // up
|
||||||
|
Children []*Entry // down
|
||||||
|
Prev, Next *Entry // left, right
|
||||||
|
Set *Entry // current `/set` for additional keywords
|
||||||
|
Pos int // order in the spec
|
||||||
|
Raw string // file or directory name
|
||||||
|
Name string // file or directory name
|
||||||
|
Keywords []string // TODO(vbatts) maybe a keyword typed set of values?
|
||||||
|
Type EntryType
|
||||||
|
}
|
||||||
|
|
||||||
|
// Path provides the full path of the file, despite RelativeType or FullType
|
||||||
|
func (e Entry) Path() string {
|
||||||
|
if e.Parent == nil || e.Type == FullType {
|
||||||
|
return filepath.Clean(e.Name)
|
||||||
|
}
|
||||||
|
return filepath.Clean(filepath.Join(e.Parent.Path(), e.Name))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e Entry) String() string {
|
||||||
|
if e.Raw != "" {
|
||||||
|
return e.Raw
|
||||||
|
}
|
||||||
|
if e.Type == BlankType {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
if e.Type == DotDotType {
|
||||||
|
return e.Name
|
||||||
|
}
|
||||||
|
// TODO(vbatts) if type is RelativeType and a keyword of not type=dir
|
||||||
|
if e.Type == SpecialType || e.Type == FullType || inSlice("type=dir", e.Keywords) {
|
||||||
|
return fmt.Sprintf("%s %s", e.Name, strings.Join(e.Keywords, " "))
|
||||||
|
}
|
||||||
|
return fmt.Sprintf(" %s %s", e.Name, strings.Join(e.Keywords, " "))
|
||||||
|
}
|
||||||
|
|
||||||
|
// EntryType are the formats of lines in an mtree spec file
|
||||||
|
type EntryType int
|
||||||
|
|
||||||
|
// The types of lines to be found in an mtree spec file
|
||||||
|
const (
|
||||||
|
SignatureType EntryType = iota // first line of the file, like `#mtree v2.0`
|
||||||
|
BlankType // blank lines are ignored
|
||||||
|
CommentType // Lines beginning with `#` are ignored
|
||||||
|
SpecialType // line that has `/` prefix issue a "special" command (currently only /set and /unset)
|
||||||
|
RelativeType // if the first white-space delimited word does not have a '/' in it. Options/keywords are applied.
|
||||||
|
DotDotType // .. - A relative path step. keywords/options are ignored
|
||||||
|
FullType // if the first word on the line has a `/` after the first character, it interpretted as a file pathname with options
|
||||||
|
)
|
||||||
|
|
||||||
|
// String returns the name of the EntryType
|
||||||
|
func (et EntryType) String() string {
|
||||||
|
return typeNames[et]
|
||||||
|
}
|
||||||
|
|
||||||
|
var typeNames = map[EntryType]string{
|
||||||
|
SignatureType: "SignatureType",
|
||||||
|
BlankType: "BlankType",
|
||||||
|
CommentType: "CommentType",
|
||||||
|
SpecialType: "SpecialType",
|
||||||
|
RelativeType: "RelativeType",
|
||||||
|
DotDotType: "DotDotType",
|
||||||
|
FullType: "FullType",
|
||||||
|
}
|
75
hierarchy.go
75
hierarchy.go
|
@ -1,11 +1,8 @@
|
||||||
package mtree
|
package mtree
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
|
||||||
"io"
|
"io"
|
||||||
"path/filepath"
|
|
||||||
"sort"
|
"sort"
|
||||||
"strings"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// DirectoryHierarchy is the mapped structure for an mtree directory hierarchy
|
// DirectoryHierarchy is the mapped structure for an mtree directory hierarchy
|
||||||
|
@ -27,75 +24,3 @@ func (dh DirectoryHierarchy) WriteTo(w io.Writer) (n int64, err error) {
|
||||||
}
|
}
|
||||||
return sum, nil
|
return sum, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
type byPos []Entry
|
|
||||||
|
|
||||||
func (bp byPos) Len() int { return len(bp) }
|
|
||||||
func (bp byPos) Less(i, j int) bool { return bp[i].Pos < bp[j].Pos }
|
|
||||||
func (bp byPos) Swap(i, j int) { bp[i], bp[j] = bp[j], bp[i] }
|
|
||||||
|
|
||||||
// Entry is each component of content in the mtree spec file
|
|
||||||
type Entry struct {
|
|
||||||
Parent, Child *Entry // up, down
|
|
||||||
Prev, Next *Entry // left, right
|
|
||||||
Set *Entry // current `/set` for additional keywords
|
|
||||||
Pos int // order in the spec
|
|
||||||
Raw string // file or directory name
|
|
||||||
Name string // file or directory name
|
|
||||||
Keywords []string // TODO(vbatts) maybe a keyword typed set of values?
|
|
||||||
Type EntryType
|
|
||||||
}
|
|
||||||
|
|
||||||
// Path provides the full path of the file, despite RelativeType or FullType
|
|
||||||
func (e Entry) Path() string {
|
|
||||||
if e.Parent == nil || e.Type == FullType {
|
|
||||||
return filepath.Clean(e.Name)
|
|
||||||
}
|
|
||||||
return filepath.Clean(filepath.Join(e.Parent.Path(), e.Name))
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e Entry) String() string {
|
|
||||||
if e.Raw != "" {
|
|
||||||
return e.Raw
|
|
||||||
}
|
|
||||||
if e.Type == BlankType {
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
if e.Type == DotDotType {
|
|
||||||
return e.Name
|
|
||||||
}
|
|
||||||
// TODO(vbatts) if type is RelativeType and a keyword of not type=dir
|
|
||||||
if e.Type == SpecialType || e.Type == FullType || inSlice("type=dir", e.Keywords) {
|
|
||||||
return fmt.Sprintf("%s %s", e.Name, strings.Join(e.Keywords, " "))
|
|
||||||
}
|
|
||||||
return fmt.Sprintf(" %s %s", e.Name, strings.Join(e.Keywords, " "))
|
|
||||||
}
|
|
||||||
|
|
||||||
// EntryType are the formats of lines in an mtree spec file
|
|
||||||
type EntryType int
|
|
||||||
|
|
||||||
// The types of lines to be found in an mtree spec file
|
|
||||||
const (
|
|
||||||
SignatureType EntryType = iota // first line of the file, like `#mtree v2.0`
|
|
||||||
BlankType // blank lines are ignored
|
|
||||||
CommentType // Lines beginning with `#` are ignored
|
|
||||||
SpecialType // line that has `/` prefix issue a "special" command (currently only /set and /unset)
|
|
||||||
RelativeType // if the first white-space delimited word does not have a '/' in it. Options/keywords are applied.
|
|
||||||
DotDotType // .. - A relative path step. keywords/options are ignored
|
|
||||||
FullType // if the first word on the line has a `/` after the first character, it interpretted as a file pathname with options
|
|
||||||
)
|
|
||||||
|
|
||||||
// String returns the name of the EntryType
|
|
||||||
func (et EntryType) String() string {
|
|
||||||
return typeNames[et]
|
|
||||||
}
|
|
||||||
|
|
||||||
var typeNames = map[EntryType]string{
|
|
||||||
SignatureType: "SignatureType",
|
|
||||||
BlankType: "BlankType",
|
|
||||||
CommentType: "CommentType",
|
|
||||||
SpecialType: "SpecialType",
|
|
||||||
RelativeType: "RelativeType",
|
|
||||||
DotDotType: "DotDotType",
|
|
||||||
FullType: "FullType",
|
|
||||||
}
|
|
||||||
|
|
41
keywords.go
41
keywords.go
|
@ -1,6 +1,7 @@
|
||||||
package mtree
|
package mtree
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"archive/tar"
|
||||||
"crypto/md5"
|
"crypto/md5"
|
||||||
"crypto/sha1"
|
"crypto/sha1"
|
||||||
"crypto/sha256"
|
"crypto/sha256"
|
||||||
|
@ -17,7 +18,10 @@ import (
|
||||||
// KeywordFunc is the type of a function called on each file to be included in
|
// KeywordFunc is the type of a function called on each file to be included in
|
||||||
// a DirectoryHierarchy, that will produce the string output of the keyword to
|
// a DirectoryHierarchy, that will produce the string output of the keyword to
|
||||||
// be included for the file entry. Otherwise, empty string.
|
// be included for the file entry. Otherwise, empty string.
|
||||||
type KeywordFunc func(path string, info os.FileInfo) (string, error)
|
// io.Reader `r` is to the file stream for the file payload. While this
|
||||||
|
// function takes an io.Reader, the caller needs to reset it to the beginning
|
||||||
|
// for each new KeywordFunc
|
||||||
|
type KeywordFunc func(path string, info os.FileInfo, r io.Reader) (string, error)
|
||||||
|
|
||||||
// KeyVal is a "keyword=value"
|
// KeyVal is a "keyword=value"
|
||||||
type KeyVal string
|
type KeyVal string
|
||||||
|
@ -165,7 +169,7 @@ var (
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
modeKeywordFunc = func(path string, info os.FileInfo) (string, error) {
|
modeKeywordFunc = func(path string, info os.FileInfo, r io.Reader) (string, error) {
|
||||||
permissions := info.Mode().Perm()
|
permissions := info.Mode().Perm()
|
||||||
if os.ModeSetuid&info.Mode() > 0 {
|
if os.ModeSetuid&info.Mode() > 0 {
|
||||||
permissions |= (1 << 11)
|
permissions |= (1 << 11)
|
||||||
|
@ -178,52 +182,43 @@ var (
|
||||||
}
|
}
|
||||||
return fmt.Sprintf("mode=%#o", permissions), nil
|
return fmt.Sprintf("mode=%#o", permissions), nil
|
||||||
}
|
}
|
||||||
sizeKeywordFunc = func(path string, info os.FileInfo) (string, error) {
|
sizeKeywordFunc = func(path string, info os.FileInfo, r io.Reader) (string, error) {
|
||||||
return fmt.Sprintf("size=%d", info.Size()), nil
|
return fmt.Sprintf("size=%d", info.Size()), nil
|
||||||
}
|
}
|
||||||
cksumKeywordFunc = func(path string, info os.FileInfo) (string, error) {
|
cksumKeywordFunc = func(path string, info os.FileInfo, r io.Reader) (string, error) {
|
||||||
if !info.Mode().IsRegular() {
|
if !info.Mode().IsRegular() {
|
||||||
return "", nil
|
return "", nil
|
||||||
}
|
}
|
||||||
|
sum, _, err := cksum(r)
|
||||||
fh, err := os.Open(path)
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
defer fh.Close()
|
|
||||||
sum, _, err := cksum(fh)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
return fmt.Sprintf("cksum=%d", sum), nil
|
return fmt.Sprintf("cksum=%d", sum), nil
|
||||||
}
|
}
|
||||||
hasherKeywordFunc = func(name string, newHash func() hash.Hash) KeywordFunc {
|
hasherKeywordFunc = func(name string, newHash func() hash.Hash) KeywordFunc {
|
||||||
return func(path string, info os.FileInfo) (string, error) {
|
return func(path string, info os.FileInfo, r io.Reader) (string, error) {
|
||||||
if !info.Mode().IsRegular() {
|
if !info.Mode().IsRegular() {
|
||||||
return "", nil
|
return "", nil
|
||||||
}
|
}
|
||||||
|
|
||||||
fh, err := os.Open(path)
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
defer fh.Close()
|
|
||||||
|
|
||||||
h := newHash()
|
h := newHash()
|
||||||
if _, err := io.Copy(h, fh); err != nil {
|
if _, err := io.Copy(h, r); err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
return fmt.Sprintf("%s=%x", name, h.Sum(nil)), nil
|
return fmt.Sprintf("%s=%x", name, h.Sum(nil)), nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
timeKeywordFunc = func(path string, info os.FileInfo) (string, error) {
|
timeKeywordFunc = func(path string, info os.FileInfo, r io.Reader) (string, error) {
|
||||||
t := info.ModTime().UnixNano()
|
t := info.ModTime().UnixNano()
|
||||||
if t == 0 {
|
if t == 0 {
|
||||||
return "time=0.000000000", nil
|
return "time=0.000000000", nil
|
||||||
}
|
}
|
||||||
return fmt.Sprintf("time=%d.%9.9d", (t / 1e9), (t % (t / 1e9))), nil
|
return fmt.Sprintf("time=%d.%9.9d", (t / 1e9), (t % (t / 1e9))), nil
|
||||||
}
|
}
|
||||||
linkKeywordFunc = func(path string, info os.FileInfo) (string, error) {
|
linkKeywordFunc = func(path string, info os.FileInfo, r io.Reader) (string, error) {
|
||||||
|
if sys, ok := info.Sys().(*tar.Header); ok {
|
||||||
|
return sys.Linkname, nil
|
||||||
|
}
|
||||||
|
|
||||||
if info.Mode()&os.ModeSymlink != 0 {
|
if info.Mode()&os.ModeSymlink != 0 {
|
||||||
str, err := os.Readlink(path)
|
str, err := os.Readlink(path)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -233,7 +228,7 @@ var (
|
||||||
}
|
}
|
||||||
return "", nil
|
return "", nil
|
||||||
}
|
}
|
||||||
typeKeywordFunc = func(path string, info os.FileInfo) (string, error) {
|
typeKeywordFunc = func(path string, info os.FileInfo, r io.Reader) (string, error) {
|
||||||
if info.Mode().IsDir() {
|
if info.Mode().IsDir() {
|
||||||
return "type=dir", nil
|
return "type=dir", nil
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,8 +3,10 @@
|
||||||
package mtree
|
package mtree
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"archive/tar"
|
||||||
"crypto/sha1"
|
"crypto/sha1"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"io"
|
||||||
"os"
|
"os"
|
||||||
"os/user"
|
"os/user"
|
||||||
"strings"
|
"strings"
|
||||||
|
@ -14,7 +16,11 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
unameKeywordFunc = func(path string, info os.FileInfo) (string, error) {
|
unameKeywordFunc = func(path string, info os.FileInfo, r io.Reader) (string, error) {
|
||||||
|
if hdr, ok := info.Sys().(*tar.Header); ok {
|
||||||
|
return fmt.Sprintf("uname=%s", hdr.Uname), nil
|
||||||
|
}
|
||||||
|
|
||||||
stat := info.Sys().(*syscall.Stat_t)
|
stat := info.Sys().(*syscall.Stat_t)
|
||||||
u, err := user.LookupId(fmt.Sprintf("%d", stat.Uid))
|
u, err := user.LookupId(fmt.Sprintf("%d", stat.Uid))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -22,19 +28,40 @@ var (
|
||||||
}
|
}
|
||||||
return fmt.Sprintf("uname=%s", u.Username), nil
|
return fmt.Sprintf("uname=%s", u.Username), nil
|
||||||
}
|
}
|
||||||
uidKeywordFunc = func(path string, info os.FileInfo) (string, error) {
|
uidKeywordFunc = func(path string, info os.FileInfo, r io.Reader) (string, error) {
|
||||||
|
if hdr, ok := info.Sys().(*tar.Header); ok {
|
||||||
|
return fmt.Sprintf("uid=%d", hdr.Uid), nil
|
||||||
|
}
|
||||||
stat := info.Sys().(*syscall.Stat_t)
|
stat := info.Sys().(*syscall.Stat_t)
|
||||||
return fmt.Sprintf("uid=%d", stat.Uid), nil
|
return fmt.Sprintf("uid=%d", stat.Uid), nil
|
||||||
}
|
}
|
||||||
gidKeywordFunc = func(path string, info os.FileInfo) (string, error) {
|
gidKeywordFunc = func(path string, info os.FileInfo, r io.Reader) (string, error) {
|
||||||
stat := info.Sys().(*syscall.Stat_t)
|
if hdr, ok := info.Sys().(*tar.Header); ok {
|
||||||
|
return fmt.Sprintf("gid=%d", hdr.Gid), nil
|
||||||
|
}
|
||||||
|
if stat, ok := info.Sys().(*syscall.Stat_t); ok {
|
||||||
return fmt.Sprintf("gid=%d", stat.Gid), nil
|
return fmt.Sprintf("gid=%d", stat.Gid), nil
|
||||||
}
|
}
|
||||||
nlinkKeywordFunc = func(path string, info os.FileInfo) (string, error) {
|
return "", nil
|
||||||
stat := info.Sys().(*syscall.Stat_t)
|
}
|
||||||
|
nlinkKeywordFunc = func(path string, info os.FileInfo, r io.Reader) (string, error) {
|
||||||
|
if stat, ok := info.Sys().(*syscall.Stat_t); ok {
|
||||||
return fmt.Sprintf("nlink=%d", stat.Nlink), nil
|
return fmt.Sprintf("nlink=%d", stat.Nlink), nil
|
||||||
}
|
}
|
||||||
xattrKeywordFunc = func(path string, info os.FileInfo) (string, error) {
|
return "", nil
|
||||||
|
}
|
||||||
|
xattrKeywordFunc = func(path string, info os.FileInfo, r io.Reader) (string, error) {
|
||||||
|
if hdr, ok := info.Sys().(*tar.Header); ok {
|
||||||
|
if len(hdr.Xattrs) == 0 {
|
||||||
|
return "", nil
|
||||||
|
}
|
||||||
|
klist := []string{}
|
||||||
|
for k, v := range hdr.Xattrs {
|
||||||
|
klist = append(klist, fmt.Sprintf("xattr.%s=%x", k, sha1.Sum([]byte(v))))
|
||||||
|
}
|
||||||
|
return strings.Join(klist, " "), nil
|
||||||
|
}
|
||||||
|
|
||||||
xlist, err := xattr.List(path)
|
xlist, err := xattr.List(path)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
|
|
|
@ -2,22 +2,36 @@
|
||||||
|
|
||||||
package mtree
|
package mtree
|
||||||
|
|
||||||
import "os"
|
import (
|
||||||
|
"archive/tar"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"os"
|
||||||
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
unameKeywordFunc = func(path string, info os.FileInfo) (string, error) {
|
unameKeywordFunc = func(path string, info os.FileInfo, r io.Reader) (string, error) {
|
||||||
|
if hdr, ok := info.Sys().(*tar.Header); ok {
|
||||||
|
return fmt.Sprintf("uname=%s", hdr.Uname), nil
|
||||||
|
}
|
||||||
return "", nil
|
return "", nil
|
||||||
}
|
}
|
||||||
uidKeywordFunc = func(path string, info os.FileInfo) (string, error) {
|
uidKeywordFunc = func(path string, info os.FileInfo, r io.Reader) (string, error) {
|
||||||
|
if hdr, ok := info.Sys().(*tar.Header); ok {
|
||||||
|
return fmt.Sprintf("uid=%d", hdr.Uid), nil
|
||||||
|
}
|
||||||
return "", nil
|
return "", nil
|
||||||
}
|
}
|
||||||
gidKeywordFunc = func(path string, info os.FileInfo) (string, error) {
|
gidKeywordFunc = func(path string, info os.FileInfo, r io.Reader) (string, error) {
|
||||||
|
if hdr, ok := info.Sys().(*tar.Header); ok {
|
||||||
|
return fmt.Sprintf("gid=%d", hdr.Gid), nil
|
||||||
|
}
|
||||||
return "", nil
|
return "", nil
|
||||||
}
|
}
|
||||||
nlinkKeywordFunc = func(path string, info os.FileInfo) (string, error) {
|
nlinkKeywordFunc = func(path string, info os.FileInfo, r io.Reader) (string, error) {
|
||||||
return "", nil
|
return "", nil
|
||||||
}
|
}
|
||||||
xattrKeywordFunc = func(path string, info os.FileInfo) (string, error) {
|
xattrKeywordFunc = func(path string, info os.FileInfo, r io.Reader) (string, error) {
|
||||||
return "", nil
|
return "", nil
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
185
tar.go
Normal file
185
tar.go
Normal file
|
@ -0,0 +1,185 @@
|
||||||
|
package mtree
|
||||||
|
|
||||||
|
import (
|
||||||
|
"archive/tar"
|
||||||
|
"io"
|
||||||
|
"io/ioutil"
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Streamer interface that wraps an io.ReadCloser with a function that will
|
||||||
|
// return it's Hierarchy
|
||||||
|
type Streamer interface {
|
||||||
|
io.ReadCloser
|
||||||
|
Hierarchy() (*DirectoryHierarchy, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewTarStreamer streams a tar archive and creates a file hierarchy based off
|
||||||
|
// of the tar metadata headers
|
||||||
|
func NewTarStreamer(r io.Reader, keywords []string) Streamer {
|
||||||
|
pR, pW := io.Pipe()
|
||||||
|
ts := &tarStream{
|
||||||
|
pipeReader: pR,
|
||||||
|
pipeWriter: pW,
|
||||||
|
creator: dhCreator{DH: &DirectoryHierarchy{}},
|
||||||
|
teeReader: io.TeeReader(r, pW),
|
||||||
|
tarReader: tar.NewReader(pR),
|
||||||
|
keywords: keywords,
|
||||||
|
}
|
||||||
|
go ts.readHeaders() // I don't like this
|
||||||
|
return ts
|
||||||
|
}
|
||||||
|
|
||||||
|
type tarStream struct {
|
||||||
|
creator dhCreator
|
||||||
|
pipeReader *io.PipeReader
|
||||||
|
pipeWriter *io.PipeWriter
|
||||||
|
teeReader io.Reader
|
||||||
|
tarReader *tar.Reader
|
||||||
|
keywords []string
|
||||||
|
err error
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ts *tarStream) readHeaders() {
|
||||||
|
// We have to start with the directory we're in, and anything beyond these
|
||||||
|
// items is determined at the time a tar is extracted.
|
||||||
|
e := Entry{
|
||||||
|
Name: ".",
|
||||||
|
Keywords: []string{"size=0", "type=dir"},
|
||||||
|
}
|
||||||
|
ts.creator.curDir = &e
|
||||||
|
ts.creator.DH.Entries = append(ts.creator.DH.Entries, e)
|
||||||
|
for {
|
||||||
|
hdr, err := ts.tarReader.Next()
|
||||||
|
if err != nil {
|
||||||
|
ts.pipeReader.CloseWithError(err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Because the content of the file may need to be read by several
|
||||||
|
// KeywordFuncs, it needs to be an io.Seeker as well. So, just reading from
|
||||||
|
// ts.tarReader is not enough.
|
||||||
|
tmpFile, err := ioutil.TempFile("", "ts.payload.")
|
||||||
|
if err != nil {
|
||||||
|
ts.pipeReader.CloseWithError(err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
// for good measure
|
||||||
|
if err := tmpFile.Chmod(0600); err != nil {
|
||||||
|
tmpFile.Close()
|
||||||
|
os.Remove(tmpFile.Name())
|
||||||
|
ts.pipeReader.CloseWithError(err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if _, err := io.Copy(tmpFile, ts.tarReader); err != nil {
|
||||||
|
tmpFile.Close()
|
||||||
|
os.Remove(tmpFile.Name())
|
||||||
|
ts.pipeReader.CloseWithError(err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Alright, it's either file or directory
|
||||||
|
e := Entry{
|
||||||
|
Name: filepath.Base(hdr.Name),
|
||||||
|
Pos: len(ts.creator.DH.Entries),
|
||||||
|
Type: RelativeType,
|
||||||
|
}
|
||||||
|
// now collect keywords on the file
|
||||||
|
for _, keyword := range ts.keywords {
|
||||||
|
if keyFunc, ok := KeywordFuncs[keyword]; ok {
|
||||||
|
val, err := keyFunc(hdr.Name, hdr.FileInfo(), tmpFile)
|
||||||
|
if err != nil {
|
||||||
|
ts.setErr(err)
|
||||||
|
}
|
||||||
|
e.Keywords = append(e.Keywords, val)
|
||||||
|
|
||||||
|
// don't forget to reset the reader
|
||||||
|
if _, err := tmpFile.Seek(0, 0); err != nil {
|
||||||
|
tmpFile.Close()
|
||||||
|
os.Remove(tmpFile.Name())
|
||||||
|
ts.pipeReader.CloseWithError(err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
tmpFile.Close()
|
||||||
|
os.Remove(tmpFile.Name())
|
||||||
|
|
||||||
|
// compare directories, to determine parent of the current entry
|
||||||
|
cd := compareDir(filepath.Dir(hdr.Name), ts.creator.curDir.Path())
|
||||||
|
switch {
|
||||||
|
case cd == sameDir:
|
||||||
|
e.Parent = ts.creator.curDir
|
||||||
|
if e.Parent != nil {
|
||||||
|
e.Parent.Children = append(e.Parent.Children, &e)
|
||||||
|
}
|
||||||
|
case cd == parentDir:
|
||||||
|
e.Parent = ts.creator.curDir.Parent
|
||||||
|
if e.Parent != nil {
|
||||||
|
e.Parent.Children = append(e.Parent.Children, &e)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if hdr.FileInfo().IsDir() {
|
||||||
|
ts.creator.curDir = &e
|
||||||
|
}
|
||||||
|
// TODO getting the parent child relationship of these entries!
|
||||||
|
if hdr.FileInfo().IsDir() {
|
||||||
|
log.Println(strings.Split(hdr.Name, "/"), strings.Split(ts.creator.curDir.Path(), "/"))
|
||||||
|
}
|
||||||
|
|
||||||
|
ts.creator.DH.Entries = append(ts.creator.DH.Entries, e)
|
||||||
|
|
||||||
|
// Now is the wacky part of building out the entries. Since we can not
|
||||||
|
// control how the archive was assembled, can only take in the order given.
|
||||||
|
// Using `/set` will be tough. Hopefully i can do the directory stepping
|
||||||
|
// with relative paths, but even then I may get a new directory, and not
|
||||||
|
// the files first, but its directories first. :-\
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type relationship int
|
||||||
|
|
||||||
|
const (
|
||||||
|
unknownDir relationship = iota
|
||||||
|
sameDir
|
||||||
|
childDir
|
||||||
|
parentDir
|
||||||
|
)
|
||||||
|
|
||||||
|
func compareDir(curDir, prevDir string) relationship {
|
||||||
|
curDir = filepath.Clean(curDir)
|
||||||
|
prevDir = filepath.Clean(prevDir)
|
||||||
|
if curDir == prevDir {
|
||||||
|
return sameDir
|
||||||
|
}
|
||||||
|
if filepath.Dir(curDir) == prevDir {
|
||||||
|
return childDir
|
||||||
|
}
|
||||||
|
if curDir == filepath.Dir(prevDir) {
|
||||||
|
return parentDir
|
||||||
|
}
|
||||||
|
return unknownDir
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ts *tarStream) setErr(err error) {
|
||||||
|
ts.err = err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ts *tarStream) Read(p []byte) (n int, err error) {
|
||||||
|
return ts.teeReader.Read(p)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ts *tarStream) Close() error {
|
||||||
|
return ts.pipeReader.Close()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ts *tarStream) Hierarchy() (*DirectoryHierarchy, error) {
|
||||||
|
if ts.err != nil && ts.err != io.EOF {
|
||||||
|
return nil, ts.err
|
||||||
|
}
|
||||||
|
return ts.creator.DH, nil
|
||||||
|
}
|
119
tar_test.go
Normal file
119
tar_test.go
Normal file
|
@ -0,0 +1,119 @@
|
||||||
|
package mtree
|
||||||
|
|
||||||
|
import (
|
||||||
|
"archive/tar"
|
||||||
|
"bytes"
|
||||||
|
"io"
|
||||||
|
"io/ioutil"
|
||||||
|
"os"
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func ExampleStreamer() {
|
||||||
|
fh, err := os.Open("./testdata/test.tar")
|
||||||
|
if err != nil {
|
||||||
|
// handle error ...
|
||||||
|
}
|
||||||
|
str := NewTarStreamer(fh, nil)
|
||||||
|
if err := extractTar("/tmp/dir", str); err != nil {
|
||||||
|
// handle error ...
|
||||||
|
}
|
||||||
|
|
||||||
|
dh, err := str.Hierarchy()
|
||||||
|
if err != nil {
|
||||||
|
// handle error ...
|
||||||
|
}
|
||||||
|
|
||||||
|
res, err := Check("/tmp/dir/", dh, nil)
|
||||||
|
if err != nil {
|
||||||
|
// handle error ...
|
||||||
|
}
|
||||||
|
if len(res.Failures) > 0 {
|
||||||
|
// handle validation issue ...
|
||||||
|
}
|
||||||
|
}
|
||||||
|
func extractTar(root string, tr io.Reader) error {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTar(t *testing.T) {
|
||||||
|
/*
|
||||||
|
data, err := makeTarStream()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
buf := bytes.NewBuffer(data)
|
||||||
|
str := NewTarStreamer(buf, append(DefaultKeywords, "sha1"))
|
||||||
|
*/
|
||||||
|
fh, err := os.Open("./testdata/test.tar")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
str := NewTarStreamer(fh, append(DefaultKeywords, "sha1"))
|
||||||
|
|
||||||
|
if _, err := io.Copy(ioutil.Discard, str); err != nil && err != io.EOF {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
if err := str.Close(); err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
defer fh.Close()
|
||||||
|
|
||||||
|
/*
|
||||||
|
fi, err := fh.Stat()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
if i != fi.Size() {
|
||||||
|
t.Errorf("expected length %d; got %d", fi.Size(), i)
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
dh, err := str.Hierarchy()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
if dh == nil {
|
||||||
|
t.Fatal("expected a DirectoryHierarchy struct, but got nil")
|
||||||
|
}
|
||||||
|
//dh.WriteTo(os.Stdout)
|
||||||
|
}
|
||||||
|
|
||||||
|
// minimal tar archive stream that mimics what is in ./testdata/test.tar
|
||||||
|
func makeTarStream() ([]byte, error) {
|
||||||
|
buf := new(bytes.Buffer)
|
||||||
|
|
||||||
|
// Create a new tar archive.
|
||||||
|
tw := tar.NewWriter(buf)
|
||||||
|
|
||||||
|
// Add some files to the archive.
|
||||||
|
var files = []struct {
|
||||||
|
Name, Body string
|
||||||
|
Mode int64
|
||||||
|
Type byte
|
||||||
|
Xattrs map[string]string
|
||||||
|
}{
|
||||||
|
{"x/", "", 0755, '5', nil},
|
||||||
|
{"x/files", "howdy\n", 0644, '0', nil},
|
||||||
|
}
|
||||||
|
for _, file := range files {
|
||||||
|
hdr := &tar.Header{
|
||||||
|
Name: file.Name,
|
||||||
|
Mode: file.Mode,
|
||||||
|
Size: int64(len(file.Body)),
|
||||||
|
Xattrs: file.Xattrs,
|
||||||
|
}
|
||||||
|
if err := tw.WriteHeader(hdr); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if len(file.Body) > 0 {
|
||||||
|
if _, err := tw.Write([]byte(file.Body)); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Make sure to check the error on Close.
|
||||||
|
if err := tw.Close(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return buf.Bytes(), nil
|
||||||
|
}
|
BIN
testdata/test.tar
vendored
Normal file
BIN
testdata/test.tar
vendored
Normal file
Binary file not shown.
69
walk.go
69
walk.go
|
@ -2,6 +2,7 @@ package mtree
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"io"
|
||||||
"os"
|
"os"
|
||||||
"os/user"
|
"os/user"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
@ -14,13 +15,6 @@ import (
|
||||||
// returns true, then the path is not included in the spec.
|
// returns true, then the path is not included in the spec.
|
||||||
type ExcludeFunc func(path string, info os.FileInfo) bool
|
type ExcludeFunc func(path string, info os.FileInfo) bool
|
||||||
|
|
||||||
type dhCreator struct {
|
|
||||||
DH *DirectoryHierarchy
|
|
||||||
curSet *Entry
|
|
||||||
curDir *Entry
|
|
||||||
curEnt *Entry
|
|
||||||
}
|
|
||||||
|
|
||||||
var defaultSetKeywords = []string{"type=file", "nlink=1", "flags=none", "mode=0664"}
|
var defaultSetKeywords = []string{"type=file", "nlink=1", "flags=none", "mode=0664"}
|
||||||
|
|
||||||
// Walk from root directory and assemble the DirectoryHierarchy. excludes
|
// Walk from root directory and assemble the DirectoryHierarchy. excludes
|
||||||
|
@ -76,11 +70,26 @@ func Walk(root string, exlcudes []ExcludeFunc, keywords []string) (*DirectoryHie
|
||||||
Keywords: keywordSelector(defaultSetKeywords, keywords),
|
Keywords: keywordSelector(defaultSetKeywords, keywords),
|
||||||
}
|
}
|
||||||
for _, keyword := range SetKeywords {
|
for _, keyword := range SetKeywords {
|
||||||
if str, err := KeywordFuncs[keyword](path, info); err == nil && str != "" {
|
err := func() error {
|
||||||
|
var r io.Reader
|
||||||
|
if info.Mode().IsRegular() {
|
||||||
|
fh, err := os.Open(path)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer fh.Close()
|
||||||
|
r = fh
|
||||||
|
}
|
||||||
|
if str, err := KeywordFuncs[keyword](path, info, r); err == nil && str != "" {
|
||||||
e.Keywords = append(e.Keywords, str)
|
e.Keywords = append(e.Keywords, str)
|
||||||
} else if err != nil {
|
} else if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
return nil
|
||||||
|
}()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
}
|
}
|
||||||
creator.curSet = &e
|
creator.curSet = &e
|
||||||
creator.DH.Entries = append(creator.DH.Entries, e)
|
creator.DH.Entries = append(creator.DH.Entries, e)
|
||||||
|
@ -88,9 +97,26 @@ func Walk(root string, exlcudes []ExcludeFunc, keywords []string) (*DirectoryHie
|
||||||
// check the attributes of the /set keywords and re-set if changed
|
// check the attributes of the /set keywords and re-set if changed
|
||||||
klist := []string{}
|
klist := []string{}
|
||||||
for _, keyword := range SetKeywords {
|
for _, keyword := range SetKeywords {
|
||||||
if str, err := KeywordFuncs[keyword](path, info); err == nil && str != "" {
|
err := func() error {
|
||||||
|
var r io.Reader
|
||||||
|
if info.Mode().IsRegular() {
|
||||||
|
fh, err := os.Open(path)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer fh.Close()
|
||||||
|
r = fh
|
||||||
|
}
|
||||||
|
str, err := KeywordFuncs[keyword](path, info, r)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if str != "" {
|
||||||
klist = append(klist, str)
|
klist = append(klist, str)
|
||||||
} else if err != nil {
|
}
|
||||||
|
return nil
|
||||||
|
}()
|
||||||
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -122,11 +148,26 @@ func Walk(root string, exlcudes []ExcludeFunc, keywords []string) (*DirectoryHie
|
||||||
Parent: creator.curDir,
|
Parent: creator.curDir,
|
||||||
}
|
}
|
||||||
for _, keyword := range keywords {
|
for _, keyword := range keywords {
|
||||||
if str, err := KeywordFuncs[keyword](path, info); err == nil && str != "" {
|
err := func() error {
|
||||||
if !inSlice(str, creator.curSet.Keywords) {
|
var r io.Reader
|
||||||
|
if info.Mode().IsRegular() {
|
||||||
|
fh, err := os.Open(path)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer fh.Close()
|
||||||
|
r = fh
|
||||||
|
}
|
||||||
|
str, err := KeywordFuncs[keyword](path, info, r)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if str != "" && !inSlice(str, creator.curSet.Keywords) {
|
||||||
e.Keywords = append(e.Keywords, str)
|
e.Keywords = append(e.Keywords, str)
|
||||||
}
|
}
|
||||||
} else if err != nil {
|
return nil
|
||||||
|
}()
|
||||||
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -245,7 +286,7 @@ func readOrderedDirNames(dirname string) ([]string, error) {
|
||||||
return append(names, dirnames...), nil
|
return append(names, dirnames...), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// signatureEntries is helper function that returns a slice of Entry's
|
// signatureEntries is a simple helper function that returns a slice of Entry's
|
||||||
// that describe the metadata signature about the host. Items like date, user,
|
// that describe the metadata signature about the host. Items like date, user,
|
||||||
// machine, and tree (which is specified by argument `root`), are considered.
|
// machine, and tree (which is specified by argument `root`), are considered.
|
||||||
// These Entry's construct comments in the mtree specification, so if there is
|
// These Entry's construct comments in the mtree specification, so if there is
|
||||||
|
|
Loading…
Reference in a new issue