mirror of https://github.com/docker/docs.git
Merge pull request #3353 from creack/improve_add_cache
Improve add cache
This commit is contained in:
commit
194eb246ef
132
buildfile.go
132
buildfile.go
|
@ -1,7 +1,6 @@
|
||||||
package docker
|
package docker
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"archive/tar"
|
|
||||||
"crypto/sha256"
|
"crypto/sha256"
|
||||||
"encoding/hex"
|
"encoding/hex"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
|
@ -18,8 +17,8 @@ import (
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"reflect"
|
"reflect"
|
||||||
"regexp"
|
"regexp"
|
||||||
|
"sort"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
|
@ -36,10 +35,13 @@ type buildFile struct {
|
||||||
runtime *Runtime
|
runtime *Runtime
|
||||||
srv *Server
|
srv *Server
|
||||||
|
|
||||||
image string
|
image string
|
||||||
maintainer string
|
maintainer string
|
||||||
config *Config
|
config *Config
|
||||||
context string
|
|
||||||
|
contextPath string
|
||||||
|
context *utils.TarSum
|
||||||
|
|
||||||
verbose bool
|
verbose bool
|
||||||
utilizeCache bool
|
utilizeCache bool
|
||||||
rm bool
|
rm bool
|
||||||
|
@ -118,66 +120,6 @@ func (b *buildFile) probeCache() (bool, error) {
|
||||||
return false, nil
|
return false, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// hashPath calculates a strong hash (sha256) value for a file tree located
|
|
||||||
// at `basepth`/`pth`, including all attributes that would normally be
|
|
||||||
// captured by `tar`. The path to hash is passed in two pieces only to
|
|
||||||
// permit logging the second piece in isolation, assuming the first is a
|
|
||||||
// temporary directory in which docker is running. If `clobberTimes` is
|
|
||||||
// true and hashPath is applied to a single file, the ctime/atime/mtime of
|
|
||||||
// the file is considered to be unix time 0, for purposes of hashing.
|
|
||||||
func (b *buildFile) hashPath(basePth, pth string, clobberTimes bool) (string, error) {
|
|
||||||
|
|
||||||
p := path.Join(basePth, pth)
|
|
||||||
|
|
||||||
st, err := os.Stat(p)
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
|
|
||||||
h := sha256.New()
|
|
||||||
|
|
||||||
if st.IsDir() {
|
|
||||||
tarRd, err := archive.Tar(p, archive.Uncompressed)
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
_, err = io.Copy(h, tarRd)
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
|
|
||||||
} else {
|
|
||||||
hdr, err := tar.FileInfoHeader(st, "")
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
if clobberTimes {
|
|
||||||
hdr.AccessTime = time.Unix(0, 0)
|
|
||||||
hdr.ChangeTime = time.Unix(0, 0)
|
|
||||||
hdr.ModTime = time.Unix(0, 0)
|
|
||||||
}
|
|
||||||
hdr.Name = filepath.Base(p)
|
|
||||||
tarWr := tar.NewWriter(h)
|
|
||||||
if err := tarWr.WriteHeader(hdr); err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
|
|
||||||
fileRd, err := os.Open(p)
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, err = io.Copy(tarWr, fileRd); err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
tarWr.Close()
|
|
||||||
}
|
|
||||||
|
|
||||||
hstr := hex.EncodeToString(h.Sum(nil))
|
|
||||||
fmt.Fprintf(b.outStream, " ---> data at %s has sha256 %.12s...\n", pth, hstr)
|
|
||||||
return hstr, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *buildFile) CmdRun(args string) error {
|
func (b *buildFile) CmdRun(args string) error {
|
||||||
if b.image == "" {
|
if b.image == "" {
|
||||||
return fmt.Errorf("Please provide a source image with `from` prior to run")
|
return fmt.Errorf("Please provide a source image with `from` prior to run")
|
||||||
|
@ -347,8 +289,8 @@ func (b *buildFile) CmdVolume(args string) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *buildFile) checkPathForAddition(orig string) error {
|
func (b *buildFile) checkPathForAddition(orig string) error {
|
||||||
origPath := path.Join(b.context, orig)
|
origPath := path.Join(b.contextPath, orig)
|
||||||
if !strings.HasPrefix(origPath, b.context) {
|
if !strings.HasPrefix(origPath, b.contextPath) {
|
||||||
return fmt.Errorf("Forbidden path outside the build context: %s (%s)", orig, origPath)
|
return fmt.Errorf("Forbidden path outside the build context: %s (%s)", orig, origPath)
|
||||||
}
|
}
|
||||||
_, err := os.Stat(origPath)
|
_, err := os.Stat(origPath)
|
||||||
|
@ -359,8 +301,10 @@ func (b *buildFile) checkPathForAddition(orig string) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *buildFile) addContext(container *Container, orig, dest string) error {
|
func (b *buildFile) addContext(container *Container, orig, dest string) error {
|
||||||
origPath := path.Join(b.context, orig)
|
var (
|
||||||
destPath := path.Join(container.RootfsPath(), dest)
|
origPath = path.Join(b.contextPath, orig)
|
||||||
|
destPath = path.Join(container.RootfsPath(), dest)
|
||||||
|
)
|
||||||
// Preserve the trailing '/'
|
// Preserve the trailing '/'
|
||||||
if strings.HasSuffix(dest, "/") {
|
if strings.HasSuffix(dest, "/") {
|
||||||
destPath = destPath + "/"
|
destPath = destPath + "/"
|
||||||
|
@ -388,7 +332,7 @@ func (b *buildFile) addContext(container *Container, orig, dest string) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *buildFile) CmdAdd(args string) error {
|
func (b *buildFile) CmdAdd(args string) error {
|
||||||
if b.context == "" {
|
if b.context == nil {
|
||||||
return fmt.Errorf("No context given. Impossible to use ADD")
|
return fmt.Errorf("No context given. Impossible to use ADD")
|
||||||
}
|
}
|
||||||
tmp := strings.SplitN(args, " ", 2)
|
tmp := strings.SplitN(args, " ", 2)
|
||||||
|
@ -408,22 +352,20 @@ func (b *buildFile) CmdAdd(args string) error {
|
||||||
|
|
||||||
cmd := b.config.Cmd
|
cmd := b.config.Cmd
|
||||||
b.config.Cmd = []string{"/bin/sh", "-c", fmt.Sprintf("#(nop) ADD %s in %s", orig, dest)}
|
b.config.Cmd = []string{"/bin/sh", "-c", fmt.Sprintf("#(nop) ADD %s in %s", orig, dest)}
|
||||||
|
|
||||||
b.config.Image = b.image
|
b.config.Image = b.image
|
||||||
|
|
||||||
origPath := orig
|
// FIXME: do we really need this?
|
||||||
destPath := dest
|
var (
|
||||||
clobberTimes := false
|
origPath = orig
|
||||||
|
destPath = dest
|
||||||
|
)
|
||||||
|
|
||||||
if utils.IsURL(orig) {
|
if utils.IsURL(orig) {
|
||||||
|
|
||||||
clobberTimes = true
|
|
||||||
|
|
||||||
resp, err := utils.Download(orig)
|
resp, err := utils.Download(orig)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
tmpDirName, err := ioutil.TempDir(b.context, "docker-remote")
|
tmpDirName, err := ioutil.TempDir(b.contextPath, "docker-remote")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -464,9 +406,25 @@ func (b *buildFile) CmdAdd(args string) error {
|
||||||
|
|
||||||
// Hash path and check the cache
|
// Hash path and check the cache
|
||||||
if b.utilizeCache {
|
if b.utilizeCache {
|
||||||
hash, err := b.hashPath(b.context, origPath, clobberTimes)
|
var (
|
||||||
if err != nil {
|
hash string
|
||||||
|
sums = b.context.GetSums()
|
||||||
|
)
|
||||||
|
if fi, err := os.Stat(path.Join(b.contextPath, origPath)); err != nil {
|
||||||
return err
|
return err
|
||||||
|
} else if fi.IsDir() {
|
||||||
|
var subfiles []string
|
||||||
|
for file, sum := range sums {
|
||||||
|
if strings.HasPrefix(file, origPath) {
|
||||||
|
subfiles = append(subfiles, sum)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
sort.Strings(subfiles)
|
||||||
|
hasher := sha256.New()
|
||||||
|
hasher.Write([]byte(strings.Join(subfiles, ",")))
|
||||||
|
hash = "dir:" + hex.EncodeToString(hasher.Sum(nil))
|
||||||
|
} else {
|
||||||
|
hash = "file:" + sums[origPath]
|
||||||
}
|
}
|
||||||
b.config.Cmd = []string{"/bin/sh", "-c", fmt.Sprintf("#(nop) ADD %s in %s", hash, dest)}
|
b.config.Cmd = []string{"/bin/sh", "-c", fmt.Sprintf("#(nop) ADD %s in %s", hash, dest)}
|
||||||
hit, err := b.probeCache()
|
hit, err := b.probeCache()
|
||||||
|
@ -635,17 +593,17 @@ func (b *buildFile) commit(id string, autoCmd []string, comment string) error {
|
||||||
var lineContinuation = regexp.MustCompile(`\s*\\\s*\n`)
|
var lineContinuation = regexp.MustCompile(`\s*\\\s*\n`)
|
||||||
|
|
||||||
func (b *buildFile) Build(context io.Reader) (string, error) {
|
func (b *buildFile) Build(context io.Reader) (string, error) {
|
||||||
// FIXME: @creack "name" is a terrible variable name
|
tmpdirPath, err := ioutil.TempDir("", "docker-build")
|
||||||
name, err := ioutil.TempDir("", "docker-build")
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
if err := archive.Untar(context, name, nil); err != nil {
|
b.context = &utils.TarSum{Reader: context}
|
||||||
|
if err := archive.Untar(b.context, tmpdirPath, nil); err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
defer os.RemoveAll(name)
|
defer os.RemoveAll(tmpdirPath)
|
||||||
b.context = name
|
b.contextPath = tmpdirPath
|
||||||
filename := path.Join(name, "Dockerfile")
|
filename := path.Join(tmpdirPath, "Dockerfile")
|
||||||
if _, err := os.Stat(filename); os.IsNotExist(err) {
|
if _, err := os.Stat(filename); os.IsNotExist(err) {
|
||||||
return "", fmt.Errorf("Can't build a directory with no Dockerfile")
|
return "", fmt.Errorf("Can't build a directory with no Dockerfile")
|
||||||
}
|
}
|
||||||
|
|
12
server.go
12
server.go
|
@ -21,6 +21,7 @@ import (
|
||||||
"path"
|
"path"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"runtime"
|
"runtime"
|
||||||
|
"sort"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"sync"
|
"sync"
|
||||||
|
@ -1695,16 +1696,13 @@ func (srv *Server) ImageGetCached(imgID string, config *Config) (*Image, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Store the tree in a map of map (map[parentId][childId])
|
// Store the tree in a map of map (map[parentId][childId])
|
||||||
imageMap := make(map[string]map[string]struct{})
|
imageMap := make(map[string][]string)
|
||||||
for _, img := range images {
|
for _, img := range images {
|
||||||
if _, exists := imageMap[img.Parent]; !exists {
|
imageMap[img.Parent] = append(imageMap[img.Parent], img.ID)
|
||||||
imageMap[img.Parent] = make(map[string]struct{})
|
|
||||||
}
|
|
||||||
imageMap[img.Parent][img.ID] = struct{}{}
|
|
||||||
}
|
}
|
||||||
|
sort.Strings(imageMap[imgID])
|
||||||
// Loop on the children of the given image and check the config
|
// Loop on the children of the given image and check the config
|
||||||
for elem := range imageMap[imgID] {
|
for _, elem := range imageMap[imgID] {
|
||||||
img, err := srv.runtime.graph.Get(elem)
|
img, err := srv.runtime.graph.Get(elem)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
|
|
@ -1,38 +1,30 @@
|
||||||
package utils
|
package utils
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"archive/tar"
|
||||||
"bytes"
|
"bytes"
|
||||||
"compress/gzip"
|
"compress/gzip"
|
||||||
"crypto/sha256"
|
"crypto/sha256"
|
||||||
"encoding/hex"
|
"encoding/hex"
|
||||||
"archive/tar"
|
|
||||||
"hash"
|
"hash"
|
||||||
"io"
|
"io"
|
||||||
"sort"
|
"sort"
|
||||||
"strconv"
|
"strconv"
|
||||||
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
type verboseHash struct {
|
|
||||||
hash.Hash
|
|
||||||
}
|
|
||||||
|
|
||||||
func (h verboseHash) Write(buf []byte) (int, error) {
|
|
||||||
Debugf("--->%s<---", buf)
|
|
||||||
return h.Hash.Write(buf)
|
|
||||||
}
|
|
||||||
|
|
||||||
type TarSum struct {
|
type TarSum struct {
|
||||||
io.Reader
|
io.Reader
|
||||||
tarR *tar.Reader
|
tarR *tar.Reader
|
||||||
tarW *tar.Writer
|
tarW *tar.Writer
|
||||||
gz *gzip.Writer
|
gz *gzip.Writer
|
||||||
bufTar *bytes.Buffer
|
bufTar *bytes.Buffer
|
||||||
bufGz *bytes.Buffer
|
bufGz *bytes.Buffer
|
||||||
h hash.Hash
|
h hash.Hash
|
||||||
h2 verboseHash
|
sums map[string]string
|
||||||
sums []string
|
currentFile string
|
||||||
finished bool
|
finished bool
|
||||||
first bool
|
first bool
|
||||||
}
|
}
|
||||||
|
|
||||||
func (ts *TarSum) encodeHeader(h *tar.Header) error {
|
func (ts *TarSum) encodeHeader(h *tar.Header) error {
|
||||||
|
@ -52,7 +44,6 @@ func (ts *TarSum) encodeHeader(h *tar.Header) error {
|
||||||
// {"atime", strconv.Itoa(int(h.AccessTime.UTC().Unix()))},
|
// {"atime", strconv.Itoa(int(h.AccessTime.UTC().Unix()))},
|
||||||
// {"ctime", strconv.Itoa(int(h.ChangeTime.UTC().Unix()))},
|
// {"ctime", strconv.Itoa(int(h.ChangeTime.UTC().Unix()))},
|
||||||
} {
|
} {
|
||||||
// Debugf("-->%s<-- -->%s<--", elem[0], elem[1])
|
|
||||||
if _, err := ts.h.Write([]byte(elem[0] + elem[1])); err != nil {
|
if _, err := ts.h.Write([]byte(elem[0] + elem[1])); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -68,9 +59,9 @@ func (ts *TarSum) Read(buf []byte) (int, error) {
|
||||||
ts.tarW = tar.NewWriter(ts.bufTar)
|
ts.tarW = tar.NewWriter(ts.bufTar)
|
||||||
ts.gz = gzip.NewWriter(ts.bufGz)
|
ts.gz = gzip.NewWriter(ts.bufGz)
|
||||||
ts.h = sha256.New()
|
ts.h = sha256.New()
|
||||||
// ts.h = verboseHash{sha256.New()}
|
|
||||||
ts.h.Reset()
|
ts.h.Reset()
|
||||||
ts.first = true
|
ts.first = true
|
||||||
|
ts.sums = make(map[string]string)
|
||||||
}
|
}
|
||||||
|
|
||||||
if ts.finished {
|
if ts.finished {
|
||||||
|
@ -85,7 +76,7 @@ func (ts *TarSum) Read(buf []byte) (int, error) {
|
||||||
return 0, err
|
return 0, err
|
||||||
}
|
}
|
||||||
if !ts.first {
|
if !ts.first {
|
||||||
ts.sums = append(ts.sums, hex.EncodeToString(ts.h.Sum(nil)))
|
ts.sums[ts.currentFile] = hex.EncodeToString(ts.h.Sum(nil))
|
||||||
ts.h.Reset()
|
ts.h.Reset()
|
||||||
} else {
|
} else {
|
||||||
ts.first = false
|
ts.first = false
|
||||||
|
@ -102,6 +93,7 @@ func (ts *TarSum) Read(buf []byte) (int, error) {
|
||||||
}
|
}
|
||||||
return n, err
|
return n, err
|
||||||
}
|
}
|
||||||
|
ts.currentFile = strings.TrimSuffix(strings.TrimPrefix(currentHeader.Name, "./"), "/")
|
||||||
if err := ts.encodeHeader(currentHeader); err != nil {
|
if err := ts.encodeHeader(currentHeader); err != nil {
|
||||||
return 0, err
|
return 0, err
|
||||||
}
|
}
|
||||||
|
@ -143,12 +135,17 @@ func (ts *TarSum) Read(buf []byte) (int, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (ts *TarSum) Sum(extra []byte) string {
|
func (ts *TarSum) Sum(extra []byte) string {
|
||||||
sort.Strings(ts.sums)
|
var sums []string
|
||||||
|
|
||||||
|
for _, sum := range ts.sums {
|
||||||
|
sums = append(sums, sum)
|
||||||
|
}
|
||||||
|
sort.Strings(sums)
|
||||||
h := sha256.New()
|
h := sha256.New()
|
||||||
if extra != nil {
|
if extra != nil {
|
||||||
h.Write(extra)
|
h.Write(extra)
|
||||||
}
|
}
|
||||||
for _, sum := range ts.sums {
|
for _, sum := range sums {
|
||||||
Debugf("-->%s<--", sum)
|
Debugf("-->%s<--", sum)
|
||||||
h.Write([]byte(sum))
|
h.Write([]byte(sum))
|
||||||
}
|
}
|
||||||
|
@ -156,3 +153,7 @@ func (ts *TarSum) Sum(extra []byte) string {
|
||||||
Debugf("checksum processed: %s", checksum)
|
Debugf("checksum processed: %s", checksum)
|
||||||
return checksum
|
return checksum
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (ts *TarSum) GetSums() map[string]string {
|
||||||
|
return ts.sums
|
||||||
|
}
|
||||||
|
|
Loading…
Reference in New Issue