mirror of https://github.com/docker/docs.git
Add ability to exclude files from tar
This commit is contained in:
parent
4d1a537433
commit
2c7f50a77d
|
@ -15,7 +15,15 @@ import (
|
||||||
|
|
||||||
type Archive io.Reader
|
type Archive io.Reader
|
||||||
|
|
||||||
type Compression uint32
|
type Compression int
|
||||||
|
|
||||||
|
type TarOptions struct {
|
||||||
|
Includes []string
|
||||||
|
Excludes []string
|
||||||
|
Recursive bool
|
||||||
|
Compression Compression
|
||||||
|
CreateFiles []string
|
||||||
|
}
|
||||||
|
|
||||||
const (
|
const (
|
||||||
Uncompressed Compression = iota
|
Uncompressed Compression = iota
|
||||||
|
@ -80,7 +88,7 @@ func (compression *Compression) Extension() string {
|
||||||
// Tar creates an archive from the directory at `path`, and returns it as a
|
// Tar creates an archive from the directory at `path`, and returns it as a
|
||||||
// stream of bytes.
|
// stream of bytes.
|
||||||
func Tar(path string, compression Compression) (io.Reader, error) {
|
func Tar(path string, compression Compression) (io.Reader, error) {
|
||||||
return TarFilter(path, compression, nil, true, nil)
|
return TarFilter(path, &TarOptions{Recursive: true, Compression: compression})
|
||||||
}
|
}
|
||||||
|
|
||||||
func escapeName(name string) string {
|
func escapeName(name string) string {
|
||||||
|
@ -101,25 +109,29 @@ func escapeName(name string) string {
|
||||||
|
|
||||||
// Tar creates an archive from the directory at `path`, only including files whose relative
|
// Tar creates an archive from the directory at `path`, only including files whose relative
|
||||||
// paths are included in `filter`. If `filter` is nil, then all files are included.
|
// paths are included in `filter`. If `filter` is nil, then all files are included.
|
||||||
func TarFilter(path string, compression Compression, filter []string, recursive bool, createFiles []string) (io.Reader, error) {
|
func TarFilter(path string, options *TarOptions) (io.Reader, error) {
|
||||||
args := []string{"tar", "--numeric-owner", "-f", "-", "-C", path, "-T", "-"}
|
args := []string{"tar", "--numeric-owner", "-f", "-", "-C", path, "-T", "-"}
|
||||||
if filter == nil {
|
if options.Includes == nil {
|
||||||
filter = []string{"."}
|
options.Includes = []string{"."}
|
||||||
}
|
}
|
||||||
args = append(args, "-c"+compression.Flag())
|
args = append(args, "-c"+options.Compression.Flag())
|
||||||
|
|
||||||
if !recursive {
|
for _, exclude := range options.Excludes {
|
||||||
|
args = append(args, fmt.Sprintf("--exclude=%s", exclude))
|
||||||
|
}
|
||||||
|
|
||||||
|
if !options.Recursive {
|
||||||
args = append(args, "--no-recursion")
|
args = append(args, "--no-recursion")
|
||||||
}
|
}
|
||||||
|
|
||||||
files := ""
|
files := ""
|
||||||
for _, f := range filter {
|
for _, f := range options.Includes {
|
||||||
files = files + escapeName(f) + "\n"
|
files = files + escapeName(f) + "\n"
|
||||||
}
|
}
|
||||||
|
|
||||||
tmpDir := ""
|
tmpDir := ""
|
||||||
|
|
||||||
if createFiles != nil {
|
if options.CreateFiles != nil {
|
||||||
var err error // Can't use := here or we override the outer tmpDir
|
var err error // Can't use := here or we override the outer tmpDir
|
||||||
tmpDir, err = ioutil.TempDir("", "docker-tar")
|
tmpDir, err = ioutil.TempDir("", "docker-tar")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -127,7 +139,7 @@ func TarFilter(path string, compression Compression, filter []string, recursive
|
||||||
}
|
}
|
||||||
|
|
||||||
files = files + "-C" + tmpDir + "\n"
|
files = files + "-C" + tmpDir + "\n"
|
||||||
for _, f := range createFiles {
|
for _, f := range options.CreateFiles {
|
||||||
path := filepath.Join(tmpDir, f)
|
path := filepath.Join(tmpDir, f)
|
||||||
err := os.MkdirAll(filepath.Dir(path), 0600)
|
err := os.MkdirAll(filepath.Dir(path), 0600)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -194,7 +206,7 @@ func Untar(archive io.Reader, path string) error {
|
||||||
// TarUntar aborts and returns the error.
|
// TarUntar aborts and returns the error.
|
||||||
func TarUntar(src string, filter []string, dst string) error {
|
func TarUntar(src string, filter []string, dst string) error {
|
||||||
utils.Debugf("TarUntar(%s %s %s)", src, filter, dst)
|
utils.Debugf("TarUntar(%s %s %s)", src, filter, dst)
|
||||||
archive, err := TarFilter(src, Uncompressed, filter, true, nil)
|
archive, err := TarFilter(src, &TarOptions{Compression: Uncompressed, Includes: filter, Recursive: true})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
|
@ -207,24 +207,22 @@ func ChangesDirs(newDir, oldDir string) ([]Change, error) {
|
||||||
return changes, nil
|
return changes, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
func ExportChanges(root, rw string) (Archive, error) {
|
func ExportChanges(root, rw string) (Archive, error) {
|
||||||
changes, err := ChangesDirs(root, rw)
|
changes, err := ChangesDirs(root, rw)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
files := make([]string, 0)
|
files := make([]string, 0)
|
||||||
deletions := make([]string, 0)
|
deletions := make([]string, 0)
|
||||||
for _, change := range changes {
|
for _, change := range changes {
|
||||||
if change.Kind == ChangeModify || change.Kind == ChangeAdd {
|
if change.Kind == ChangeModify || change.Kind == ChangeAdd {
|
||||||
files = append(files, change.Path)
|
files = append(files, change.Path)
|
||||||
}
|
}
|
||||||
if change.Kind == ChangeDelete {
|
if change.Kind == ChangeDelete {
|
||||||
base := filepath.Base(change.Path)
|
base := filepath.Base(change.Path)
|
||||||
dir := filepath.Dir(change.Path)
|
dir := filepath.Dir(change.Path)
|
||||||
deletions = append(deletions, filepath.Join(dir, ".wh."+base))
|
deletions = append(deletions, filepath.Join(dir, ".wh."+base))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return TarFilter(root, Uncompressed, files, false, deletions)
|
return TarFilter(root, &TarOptions{Compression: Uncompressed, Recursive: false, Includes: files, CreateFiles: deletions})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
16
aufs/aufs.go
16
aufs/aufs.go
|
@ -137,8 +137,7 @@ func (a *AufsDriver) createDirsFor(id string) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, p := range paths {
|
for _, p := range paths {
|
||||||
dir := path.Join(a.rootPath(), p, id)
|
if err := os.MkdirAll(path.Join(a.rootPath(), p, id), 0755); err != nil {
|
||||||
if err := os.MkdirAll(dir, 0755); err != nil {
|
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -201,11 +200,14 @@ func (a *AufsDriver) Get(id string) (string, error) {
|
||||||
|
|
||||||
// Returns an archive of the contents for the id
|
// Returns an archive of the contents for the id
|
||||||
func (a *AufsDriver) Diff(id string) (archive.Archive, error) {
|
func (a *AufsDriver) Diff(id string) (archive.Archive, error) {
|
||||||
p, err := a.Get(id)
|
// Exclude top level aufs metadata from the diff
|
||||||
if err != nil {
|
return archive.TarFilter(
|
||||||
return nil, err
|
path.Join(a.rootPath(), "diff", id),
|
||||||
}
|
&archive.TarOptions{
|
||||||
return archive.Tar(p, archive.Uncompressed)
|
Excludes: []string{".wh*"},
|
||||||
|
Recursive: true,
|
||||||
|
Compression: archive.Uncompressed,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
// Returns the size of the contents for the id
|
// Returns the size of the contents for the id
|
||||||
|
|
|
@ -1527,7 +1527,11 @@ func (container *Container) Copy(resource string) (archive.Archive, error) {
|
||||||
filter = []string{path.Base(basePath)}
|
filter = []string{path.Base(basePath)}
|
||||||
basePath = path.Dir(basePath)
|
basePath = path.Dir(basePath)
|
||||||
}
|
}
|
||||||
return archive.TarFilter(basePath, archive.Uncompressed, filter, true, nil)
|
return archive.TarFilter(basePath, &archive.TarOptions{
|
||||||
|
Compression: archive.Uncompressed,
|
||||||
|
Includes: filter,
|
||||||
|
Recursive: true,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
// Returns true if the container exposes a certain port
|
// Returns true if the container exposes a certain port
|
||||||
|
|
Loading…
Reference in New Issue