2016-03-16 19:59:34 +00:00
|
|
|
package mtree
|
|
|
|
|
|
|
|
import (
|
2016-07-14 13:57:37 +00:00
|
|
|
"fmt"
|
2016-04-15 22:39:18 +00:00
|
|
|
"io"
|
2016-03-16 19:59:34 +00:00
|
|
|
"os"
|
2016-07-14 13:57:37 +00:00
|
|
|
"os/user"
|
2016-03-16 19:59:34 +00:00
|
|
|
"path/filepath"
|
2016-03-17 21:16:46 +00:00
|
|
|
"sort"
|
2016-11-12 04:03:20 +00:00
|
|
|
"strings"
|
2016-07-14 13:57:37 +00:00
|
|
|
"time"
|
2016-03-16 19:59:34 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
// ExcludeFunc is the type of function called on each path walked to determine
|
|
|
|
// whether to be excluded from the assembled DirectoryHierarchy. If the func
|
|
|
|
// returns true, then the path is not included in the spec.
|
|
|
|
type ExcludeFunc func(path string, info os.FileInfo) bool
|
|
|
|
|
2016-11-18 03:43:02 +00:00
|
|
|
// ExcludeNonDirectories is an ExcludeFunc for excluding all paths that are not directories
|
|
|
|
var ExcludeNonDirectories = func(path string, info os.FileInfo) bool {
|
|
|
|
return !info.IsDir()
|
|
|
|
}
|
|
|
|
|
2016-11-18 00:47:31 +00:00
|
|
|
var defaultSetKeywords = []KeyVal{"type=file", "nlink=1", "flags=none", "mode=0664"}
|
2016-03-18 17:38:32 +00:00
|
|
|
|
2016-03-23 20:58:16 +00:00
|
|
|
// Walk from root directory and assemble the DirectoryHierarchy. excludes
|
|
|
|
// provided are used to skip paths. keywords are the set to collect from the
|
|
|
|
// walked paths. The recommended default list is DefaultKeywords.
|
2016-11-18 00:47:31 +00:00
|
|
|
func Walk(root string, excludes []ExcludeFunc, keywords []Keyword) (*DirectoryHierarchy, error) {
|
2016-03-17 21:16:46 +00:00
|
|
|
creator := dhCreator{DH: &DirectoryHierarchy{}}
|
2016-07-14 13:57:37 +00:00
|
|
|
// insert signature and metadata comments first (user, machine, tree, date)
|
2016-11-12 04:03:20 +00:00
|
|
|
for _, e := range signatureEntries(root) {
|
2016-07-14 13:57:37 +00:00
|
|
|
e.Pos = len(creator.DH.Entries)
|
|
|
|
creator.DH.Entries = append(creator.DH.Entries, e)
|
|
|
|
}
|
2016-11-12 04:03:20 +00:00
|
|
|
// insert keyword metadata next
|
|
|
|
for _, e := range keywordEntries(keywords) {
|
|
|
|
e.Pos = len(creator.DH.Entries)
|
|
|
|
creator.DH.Entries = append(creator.DH.Entries, e)
|
|
|
|
}
|
|
|
|
// walk the directory and add entries
|
2016-03-17 21:16:46 +00:00
|
|
|
err := startWalk(&creator, root, func(path string, info os.FileInfo, err error) error {
|
2016-03-16 19:59:34 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2016-11-18 00:47:31 +00:00
|
|
|
for _, ex := range excludes {
|
2016-03-16 19:59:34 +00:00
|
|
|
if ex(path, info) {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
}
|
2016-03-17 21:16:46 +00:00
|
|
|
|
2016-04-13 17:55:34 +00:00
|
|
|
entryPathName := filepath.Base(path)
|
2016-03-17 21:16:46 +00:00
|
|
|
if info.IsDir() {
|
2016-03-22 16:56:36 +00:00
|
|
|
creator.DH.Entries = append(creator.DH.Entries, Entry{
|
|
|
|
Type: BlankType,
|
|
|
|
Pos: len(creator.DH.Entries),
|
|
|
|
})
|
|
|
|
|
2016-04-06 16:48:35 +00:00
|
|
|
// Insert a comment of the full path of the directory's name
|
2016-03-22 16:56:36 +00:00
|
|
|
if creator.curDir != nil {
|
2016-07-21 01:18:27 +00:00
|
|
|
dirname, err := creator.curDir.Path()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2016-03-22 16:56:36 +00:00
|
|
|
creator.DH.Entries = append(creator.DH.Entries, Entry{
|
|
|
|
Pos: len(creator.DH.Entries),
|
2016-07-21 01:18:27 +00:00
|
|
|
Raw: "# " + filepath.Join(dirname, entryPathName),
|
2016-03-22 16:56:36 +00:00
|
|
|
Type: CommentType,
|
|
|
|
})
|
|
|
|
} else {
|
2016-04-13 17:55:34 +00:00
|
|
|
entryPathName = "."
|
2016-03-22 16:56:36 +00:00
|
|
|
creator.DH.Entries = append(creator.DH.Entries, Entry{
|
|
|
|
Pos: len(creator.DH.Entries),
|
2016-04-13 17:55:34 +00:00
|
|
|
Raw: "# .",
|
2016-03-22 16:56:36 +00:00
|
|
|
Type: CommentType,
|
|
|
|
})
|
|
|
|
}
|
2016-03-18 20:30:54 +00:00
|
|
|
|
|
|
|
// set the initial /set keywords
|
2016-03-18 17:38:32 +00:00
|
|
|
if creator.curSet == nil {
|
2016-03-17 21:16:46 +00:00
|
|
|
e := Entry{
|
|
|
|
Name: "/set",
|
|
|
|
Type: SpecialType,
|
|
|
|
Pos: len(creator.DH.Entries),
|
2016-11-18 00:47:31 +00:00
|
|
|
Keywords: keyvalSelector(defaultSetKeywords, keywords),
|
2016-03-17 21:16:46 +00:00
|
|
|
}
|
|
|
|
for _, keyword := range SetKeywords {
|
2016-04-15 22:39:18 +00:00
|
|
|
err := func() error {
|
|
|
|
var r io.Reader
|
|
|
|
if info.Mode().IsRegular() {
|
|
|
|
fh, err := os.Open(path)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
defer fh.Close()
|
|
|
|
r = fh
|
|
|
|
}
|
2016-07-22 22:01:54 +00:00
|
|
|
keywordFunc, ok := KeywordFuncs[keyword]
|
|
|
|
if !ok {
|
|
|
|
return fmt.Errorf("Unknown keyword %q for file %q", keyword, path)
|
|
|
|
}
|
|
|
|
if str, err := keywordFunc(path, info, r); err == nil && str != "" {
|
2016-04-15 22:39:18 +00:00
|
|
|
e.Keywords = append(e.Keywords, str)
|
|
|
|
} else if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}()
|
|
|
|
if err != nil {
|
2016-03-17 21:16:46 +00:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
2016-03-18 17:38:32 +00:00
|
|
|
creator.curSet = &e
|
2016-03-17 21:16:46 +00:00
|
|
|
creator.DH.Entries = append(creator.DH.Entries, e)
|
2016-03-18 17:38:32 +00:00
|
|
|
} else if creator.curSet != nil {
|
2016-03-17 21:16:46 +00:00
|
|
|
// check the attributes of the /set keywords and re-set if changed
|
2016-11-18 00:47:31 +00:00
|
|
|
klist := []KeyVal{}
|
2016-03-17 21:16:46 +00:00
|
|
|
for _, keyword := range SetKeywords {
|
2016-04-15 22:39:18 +00:00
|
|
|
err := func() error {
|
|
|
|
var r io.Reader
|
|
|
|
if info.Mode().IsRegular() {
|
|
|
|
fh, err := os.Open(path)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
defer fh.Close()
|
|
|
|
r = fh
|
|
|
|
}
|
2016-07-22 22:01:54 +00:00
|
|
|
keywordFunc, ok := KeywordFuncs[keyword]
|
|
|
|
if !ok {
|
|
|
|
return fmt.Errorf("Unknown keyword %q for file %q", keyword, path)
|
|
|
|
}
|
|
|
|
str, err := keywordFunc(path, info, r)
|
2016-04-15 22:39:18 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
if str != "" {
|
|
|
|
klist = append(klist, str)
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}()
|
|
|
|
if err != nil {
|
2016-03-17 21:16:46 +00:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
needNewSet := false
|
|
|
|
for _, k := range klist {
|
2016-11-18 00:47:31 +00:00
|
|
|
if !inKeyValSlice(k, creator.curSet.Keywords) {
|
2016-03-17 21:16:46 +00:00
|
|
|
needNewSet = true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if needNewSet {
|
2016-03-18 17:38:32 +00:00
|
|
|
e := Entry{
|
2016-03-17 21:16:46 +00:00
|
|
|
Name: "/set",
|
|
|
|
Type: SpecialType,
|
|
|
|
Pos: len(creator.DH.Entries),
|
2016-11-18 00:47:31 +00:00
|
|
|
Keywords: keyvalSelector(append(defaultSetKeywords, klist...), keywords),
|
2016-03-18 17:38:32 +00:00
|
|
|
}
|
|
|
|
creator.curSet = &e
|
|
|
|
creator.DH.Entries = append(creator.DH.Entries, e)
|
2016-03-17 21:16:46 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2016-07-21 01:18:27 +00:00
|
|
|
encodedEntryName, err := Vis(entryPathName)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2016-03-17 21:16:46 +00:00
|
|
|
e := Entry{
|
2016-07-21 01:18:27 +00:00
|
|
|
Name: encodedEntryName,
|
2016-03-18 20:30:54 +00:00
|
|
|
Pos: len(creator.DH.Entries),
|
2016-03-23 20:58:16 +00:00
|
|
|
Type: RelativeType,
|
2016-03-18 20:30:54 +00:00
|
|
|
Set: creator.curSet,
|
|
|
|
Parent: creator.curDir,
|
2016-03-17 21:16:46 +00:00
|
|
|
}
|
2016-03-16 19:59:34 +00:00
|
|
|
for _, keyword := range keywords {
|
2016-04-15 22:39:18 +00:00
|
|
|
err := func() error {
|
|
|
|
var r io.Reader
|
|
|
|
if info.Mode().IsRegular() {
|
|
|
|
fh, err := os.Open(path)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
defer fh.Close()
|
|
|
|
r = fh
|
|
|
|
}
|
2016-07-22 22:01:54 +00:00
|
|
|
keywordFunc, ok := KeywordFuncs[keyword]
|
|
|
|
if !ok {
|
|
|
|
return fmt.Errorf("Unknown keyword %q for file %q", keyword, path)
|
|
|
|
}
|
|
|
|
str, err := keywordFunc(path, info, r)
|
2016-04-15 22:39:18 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2016-11-18 00:47:31 +00:00
|
|
|
if str != "" && !inKeyValSlice(str, creator.curSet.Keywords) {
|
2016-03-17 21:16:46 +00:00
|
|
|
e.Keywords = append(e.Keywords, str)
|
|
|
|
}
|
2016-04-15 22:39:18 +00:00
|
|
|
return nil
|
|
|
|
}()
|
|
|
|
if err != nil {
|
2016-03-16 19:59:34 +00:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
2016-03-18 17:38:32 +00:00
|
|
|
if info.IsDir() {
|
|
|
|
if creator.curDir != nil {
|
|
|
|
creator.curDir.Next = &e
|
|
|
|
}
|
|
|
|
e.Prev = creator.curDir
|
|
|
|
creator.curDir = &e
|
|
|
|
} else {
|
|
|
|
if creator.curEnt != nil {
|
|
|
|
creator.curEnt.Next = &e
|
|
|
|
}
|
|
|
|
e.Prev = creator.curEnt
|
|
|
|
creator.curEnt = &e
|
|
|
|
}
|
2016-03-17 21:16:46 +00:00
|
|
|
creator.DH.Entries = append(creator.DH.Entries, e)
|
|
|
|
return nil
|
|
|
|
})
|
|
|
|
return creator.DH, err
|
|
|
|
}
|
|
|
|
|
|
|
|
// startWalk walks the file tree rooted at root, calling walkFn for each file or
|
|
|
|
// directory in the tree, including root. All errors that arise visiting files
|
|
|
|
// and directories are filtered by walkFn. The files are walked in lexical
|
|
|
|
// order, which makes the output deterministic but means that for very
|
|
|
|
// large directories Walk can be inefficient.
|
|
|
|
// Walk does not follow symbolic links.
|
|
|
|
func startWalk(c *dhCreator, root string, walkFn filepath.WalkFunc) error {
|
|
|
|
info, err := os.Lstat(root)
|
|
|
|
if err != nil {
|
|
|
|
return walkFn(root, nil, err)
|
|
|
|
}
|
|
|
|
return walk(c, root, info, walkFn)
|
|
|
|
}
|
|
|
|
|
|
|
|
// walk recursively descends path, calling w.
|
|
|
|
func walk(c *dhCreator, path string, info os.FileInfo, walkFn filepath.WalkFunc) error {
|
|
|
|
err := walkFn(path, info, nil)
|
|
|
|
if err != nil {
|
|
|
|
if info.IsDir() && err == filepath.SkipDir {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
if !info.IsDir() {
|
2016-03-16 19:59:34 +00:00
|
|
|
return nil
|
2016-03-17 21:16:46 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
names, err := readOrderedDirNames(path)
|
|
|
|
if err != nil {
|
|
|
|
return walkFn(path, info, err)
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, name := range names {
|
|
|
|
filename := filepath.Join(path, name)
|
|
|
|
fileInfo, err := os.Lstat(filename)
|
|
|
|
if err != nil {
|
|
|
|
if err := walkFn(filename, fileInfo, err); err != nil && err != filepath.SkipDir {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
err = walk(c, filename, fileInfo, walkFn)
|
|
|
|
if err != nil {
|
|
|
|
if !fileInfo.IsDir() || err != filepath.SkipDir {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
c.DH.Entries = append(c.DH.Entries, Entry{
|
|
|
|
Name: "..",
|
|
|
|
Type: DotDotType,
|
|
|
|
Pos: len(c.DH.Entries),
|
2016-03-16 19:59:34 +00:00
|
|
|
})
|
2016-03-18 20:30:54 +00:00
|
|
|
if c.curDir != nil {
|
|
|
|
c.curDir = c.curDir.Parent
|
|
|
|
}
|
2016-03-17 21:16:46 +00:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// readOrderedDirNames reads the directory and returns a sorted list of all
|
|
|
|
// entries with non-directories first, followed by directories.
|
|
|
|
func readOrderedDirNames(dirname string) ([]string, error) {
|
|
|
|
f, err := os.Open(dirname)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
infos, err := f.Readdir(-1)
|
|
|
|
f.Close()
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
names := []string{}
|
|
|
|
dirnames := []string{}
|
|
|
|
for _, info := range infos {
|
|
|
|
if info.IsDir() {
|
|
|
|
dirnames = append(dirnames, info.Name())
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
names = append(names, info.Name())
|
|
|
|
}
|
|
|
|
sort.Strings(names)
|
|
|
|
sort.Strings(dirnames)
|
|
|
|
return append(names, dirnames...), nil
|
2016-03-16 19:59:34 +00:00
|
|
|
}
|
2016-07-14 13:57:37 +00:00
|
|
|
|
2016-04-15 22:39:18 +00:00
|
|
|
// signatureEntries is a simple helper function that returns a slice of Entry's
|
2016-07-14 13:57:37 +00:00
|
|
|
// that describe the metadata signature about the host. Items like date, user,
|
|
|
|
// machine, and tree (which is specified by argument `root`), are considered.
|
|
|
|
// These Entry's construct comments in the mtree specification, so if there is
|
|
|
|
// an error trying to obtain a particular metadata, we simply don't construct
|
|
|
|
// the Entry.
|
|
|
|
func signatureEntries(root string) []Entry {
|
|
|
|
var sigEntries []Entry
|
|
|
|
user, err := user.Current()
|
|
|
|
if err == nil {
|
|
|
|
userEntry := Entry{
|
|
|
|
Type: CommentType,
|
|
|
|
Raw: fmt.Sprintf("#%16s%s", "user: ", user.Username),
|
|
|
|
}
|
|
|
|
sigEntries = append(sigEntries, userEntry)
|
|
|
|
}
|
|
|
|
|
|
|
|
hostname, err := os.Hostname()
|
|
|
|
if err == nil {
|
|
|
|
hostEntry := Entry{
|
|
|
|
Type: CommentType,
|
|
|
|
Raw: fmt.Sprintf("#%16s%s", "machine: ", hostname),
|
|
|
|
}
|
|
|
|
sigEntries = append(sigEntries, hostEntry)
|
|
|
|
}
|
|
|
|
|
|
|
|
if tree := filepath.Clean(root); tree == "." || tree == ".." {
|
|
|
|
root, err := os.Getwd()
|
|
|
|
if err == nil {
|
|
|
|
// use parent directory of current directory
|
|
|
|
if tree == ".." {
|
|
|
|
root = filepath.Dir(root)
|
|
|
|
}
|
|
|
|
treeEntry := Entry{
|
|
|
|
Type: CommentType,
|
|
|
|
Raw: fmt.Sprintf("#%16s%s", "tree: ", filepath.Clean(root)),
|
|
|
|
}
|
|
|
|
sigEntries = append(sigEntries, treeEntry)
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
treeEntry := Entry{
|
|
|
|
Type: CommentType,
|
|
|
|
Raw: fmt.Sprintf("#%16s%s", "tree: ", filepath.Clean(root)),
|
|
|
|
}
|
|
|
|
sigEntries = append(sigEntries, treeEntry)
|
|
|
|
}
|
|
|
|
|
|
|
|
dateEntry := Entry{
|
|
|
|
Type: CommentType,
|
|
|
|
Raw: fmt.Sprintf("#%16s%s", "date: ", time.Now().Format("Mon Jan 2 15:04:05 2006")),
|
|
|
|
}
|
|
|
|
sigEntries = append(sigEntries, dateEntry)
|
|
|
|
|
|
|
|
return sigEntries
|
|
|
|
}
|
2016-11-12 04:03:20 +00:00
|
|
|
|
|
|
|
// keywordEntries returns a slice of entries including a comment of the
|
|
|
|
// keywords requested when generating this manifest.
|
|
|
|
func keywordEntries(keywords []Keyword) []Entry {
|
|
|
|
// Convert all of the keywords to zero-value keyvals.
|
|
|
|
return []Entry{
|
|
|
|
{
|
|
|
|
Type: CommentType,
|
|
|
|
Raw: fmt.Sprintf("#%16s%s", "keywords: ", strings.Join(FromKeywords(keywords), ",")),
|
|
|
|
},
|
|
|
|
}
|
|
|
|
}
|