diff --git a/containers.go b/containers.go index 086cb8e53..5631e31c3 100644 --- a/containers.go +++ b/containers.go @@ -10,7 +10,7 @@ import ( "github.com/containers/storage/pkg/ioutils" "github.com/containers/storage/pkg/stringid" "github.com/containers/storage/pkg/truncindex" - + digest "github.com/opencontainers/go-digest" "github.com/pkg/errors" ) @@ -46,6 +46,10 @@ type Container struct { // that has been stored, if they're known. BigDataSizes map[string]int64 `json:"big-data-sizes,omitempty"` + // BigDataDigests maps the names in BigDataNames to the digests of the + // data that has been stored, if they're known. + BigDataDigests map[string]digest.Digest `json:"big-data-digests,omitempty"` + // Created is the datestamp for when this container was created. Older // versions of the library did not track this information, so callers // will likely want to use the IsZero() method to verify that a value @@ -253,15 +257,16 @@ func (r *containerStore) Create(id string, names []string, image, layer, metadat } if err == nil { container = &Container{ - ID: id, - Names: names, - ImageID: image, - LayerID: layer, - Metadata: metadata, - BigDataNames: []string{}, - BigDataSizes: make(map[string]int64), - Created: time.Now().UTC(), - Flags: make(map[string]interface{}), + ID: id, + Names: names, + ImageID: image, + LayerID: layer, + Metadata: metadata, + BigDataNames: []string{}, + BigDataSizes: make(map[string]int64), + BigDataDigests: make(map[string]digest.Digest), + Created: time.Now().UTC(), + Flags: make(map[string]interface{}), } r.containers = append(r.containers, container) r.byid[id] = container @@ -369,7 +374,7 @@ func (r *containerStore) Exists(id string) bool { func (r *containerStore) BigData(id, key string) ([]byte, error) { if key == "" { - return nil, errors.Wrapf(ErrInvalidBigDataName, "data name %q can not be used as a filename", key) + return nil, errors.Wrapf(ErrInvalidBigDataName, "can't retrieve container big data value for empty name") } c, ok := r.lookup(id) if !ok { @@ -380,7 +385,7 @@ func (r *containerStore) BigData(id, key string) ([]byte, error) { func (r *containerStore) BigDataSize(id, key string) (int64, error) { if key == "" { - return -1, errors.Wrapf(ErrInvalidBigDataName, "data name %q can not be used as a filename", key) + return -1, errors.Wrapf(ErrInvalidBigDataName, "can't retrieve size of container big data with empty name") } c, ok := r.lookup(id) if !ok { @@ -392,9 +397,48 @@ func (r *containerStore) BigDataSize(id, key string) (int64, error) { if size, ok := c.BigDataSizes[key]; ok { return size, nil } + if data, err := r.BigData(id, key); err == nil && data != nil { + if r.SetBigData(id, key, data) == nil { + c, ok := r.lookup(id) + if !ok { + return -1, ErrContainerUnknown + } + if size, ok := c.BigDataSizes[key]; ok { + return size, nil + } + } + } return -1, ErrSizeUnknown } +func (r *containerStore) BigDataDigest(id, key string) (digest.Digest, error) { + if key == "" { + return "", errors.Wrapf(ErrInvalidBigDataName, "can't retrieve digest of container big data value with empty name") + } + c, ok := r.lookup(id) + if !ok { + return "", ErrContainerUnknown + } + if c.BigDataDigests == nil { + c.BigDataDigests = make(map[string]digest.Digest) + } + if d, ok := c.BigDataDigests[key]; ok { + return d, nil + } + if data, err := r.BigData(id, key); err == nil && data != nil { + if r.SetBigData(id, key, data) == nil { + c, ok := r.lookup(id) + if !ok { + return "", ErrContainerUnknown + } + if d, ok := c.BigDataDigests[key]; ok { + return d, nil + } + } + } + return "", ErrDigestUnknown +} + func (r *containerStore) BigDataNames(id string) ([]string, error) { c, ok := r.lookup(id) if !ok { @@ -405,7 +449,7 @@ func (r *containerStore) BigDataNames(id string) ([]string, error) { func (r *containerStore) SetBigData(id, key string, data []byte) error { if key == "" { - return errors.Wrapf(ErrInvalidBigDataName, "data name %q can not be used as a filename", key) + return errors.Wrapf(ErrInvalidBigDataName, "can't set empty name for container big data item") } c, ok := r.lookup(id) if !ok { @@ -422,17 +466,23 @@ func (r *containerStore) SetBigData(id, key string, data []byte) error { } oldSize, sizeOk := c.BigDataSizes[key] c.BigDataSizes[key] = int64(len(data)) - if !sizeOk || oldSize != c.BigDataSizes[key] { + if c.BigDataDigests == nil { + c.BigDataDigests = make(map[string]digest.Digest) + } + oldDigest, digestOk := c.BigDataDigests[key] + newDigest := digest.Canonical.FromBytes(data) + c.BigDataDigests[key] = newDigest + if !sizeOk || oldSize != c.BigDataSizes[key] || !digestOk || oldDigest != newDigest { save = true } - add := true + addName := true for _, name := range c.BigDataNames { if name == key { - add = false + addName = false break } } - if add { + if addName { c.BigDataNames = append(c.BigDataNames, key) save = true } diff --git a/errors.go b/errors.go index 21590ef93..bed6f8cdc 100644 --- a/errors.go +++ b/errors.go @@ -51,4 +51,6 @@ var ( ErrDuplicateLayerNames = errors.New("read-only layer store assigns the same name to multiple layers") // ErrInvalidBigDataName indicates that the name for a big data item is not acceptable; it may be empty. ErrInvalidBigDataName = errors.New("not a valid name for a big data item") + // ErrDigestUnknown indicates that we were unable to compute the digest of a specified item. + ErrDigestUnknown = errors.New("could not compute digest of item") ) diff --git a/images.go b/images.go index 25a007f6c..ed22e131f 100644 --- a/images.go +++ b/images.go @@ -10,6 +10,7 @@ import ( "github.com/containers/storage/pkg/ioutils" "github.com/containers/storage/pkg/stringid" "github.com/containers/storage/pkg/truncindex" + digest "github.com/opencontainers/go-digest" "github.com/pkg/errors" ) @@ -42,6 +43,10 @@ type Image struct { // that has been stored, if they're known. BigDataSizes map[string]int64 `json:"big-data-sizes,omitempty"` + // BigDataDigests maps the names in BigDataNames to the digests of the + // data that has been stored, if they're known. + BigDataDigests map[string]digest.Digest `json:"big-data-digests,omitempty"` + // Created is the datestamp for when this image was created. Older // versions of the library did not track this information, so callers // will likely want to use the IsZero() method to verify that a value @@ -286,14 +291,15 @@ func (r *imageStore) Create(id string, names []string, layer, metadata string, c } if err == nil { image = &Image{ - ID: id, - Names: names, - TopLayer: layer, - Metadata: metadata, - BigDataNames: []string{}, - BigDataSizes: make(map[string]int64), - Created: created, - Flags: make(map[string]interface{}), + ID: id, + Names: names, + TopLayer: layer, + Metadata: metadata, + BigDataNames: []string{}, + BigDataSizes: make(map[string]int64), + BigDataDigests: make(map[string]digest.Digest), + Created: created, + Flags: make(map[string]interface{}), } r.images = append(r.images, image) r.idindex.Add(id) @@ -407,7 +413,7 @@ func (r *imageStore) Exists(id string) bool { func (r *imageStore) BigData(id, key string) ([]byte, error) { if key == "" { - return nil, errors.Wrapf(ErrInvalidBigDataName, "data name %q can not be used as a filename", key) + return nil, errors.Wrapf(ErrInvalidBigDataName, "can't retrieve image big data value for empty name") } image, ok := r.lookup(id) if !ok { @@ -418,7 +424,7 @@ func (r *imageStore) BigData(id, key string) ([]byte, error) { func (r *imageStore) BigDataSize(id, key string) (int64, error) { if key == "" { - return -1, errors.Wrapf(ErrInvalidBigDataName, "data name %q can not be used as a filename", key) + return -1, errors.Wrapf(ErrInvalidBigDataName, "can't retrieve size of image big data with empty name") } image, ok := r.lookup(id) if !ok { @@ -430,9 +436,48 @@ func (r *imageStore) BigDataSize(id, key string) (int64, error) { if size, ok := image.BigDataSizes[key]; ok { return size, nil } + if data, err := r.BigData(id, key); err == nil && data != nil { + if r.SetBigData(id, key, data) == nil { + image, ok := r.lookup(id) + if !ok { + return -1, ErrImageUnknown + } + if size, ok := image.BigDataSizes[key]; ok { + return size, nil + } + } + } return -1, ErrSizeUnknown } +func (r *imageStore) BigDataDigest(id, key string) (digest.Digest, error) { + if key == "" { + return "", errors.Wrapf(ErrInvalidBigDataName, "can't retrieve digest of image big data value with empty name") + } + image, ok := r.lookup(id) + if !ok { + return "", ErrImageUnknown + } + if image.BigDataDigests == nil { + image.BigDataDigests = make(map[string]digest.Digest) + } + if d, ok := image.BigDataDigests[key]; ok { + return d, nil + } + if data, err := r.BigData(id, key); err == nil && data != nil { + if r.SetBigData(id, key, data) == nil { + image, ok := r.lookup(id) + if !ok { + return "", ErrImageUnknown + } + if d, ok := image.BigDataDigests[key]; ok { + return d, nil + } + } + } + return "", ErrDigestUnknown +} + func (r *imageStore) BigDataNames(id string) ([]string, error) { image, ok := r.lookup(id) if !ok { @@ -442,6 +487,9 @@ func (r *imageStore) BigDataNames(id string) ([]string, error) { } func (r *imageStore) SetBigData(id, key string, data []byte) error { + if key == "" { + return errors.Wrapf(ErrInvalidBigDataName, "can't set empty name for image big data item") + } if !r.IsReadWrite() { return errors.Wrapf(ErrStoreIsReadOnly, "not allowed to save data items associated with images at %q", r.imagespath()) } @@ -454,23 +502,29 @@ func (r *imageStore) SetBigData(id, key string, data []byte) error { } err := ioutils.AtomicWriteFile(r.datapath(image.ID, key), data, 0600) if err == nil { - add := true save := false if image.BigDataSizes == nil { image.BigDataSizes = make(map[string]int64) } oldSize, sizeOk := image.BigDataSizes[key] image.BigDataSizes[key] = int64(len(data)) - if !sizeOk || oldSize != image.BigDataSizes[key] { + if image.BigDataDigests == nil { + image.BigDataDigests = make(map[string]digest.Digest) + } + oldDigest, digestOk := image.BigDataDigests[key] + newDigest := digest.Canonical.FromBytes(data) + image.BigDataDigests[key] = newDigest + if !sizeOk || oldSize != image.BigDataSizes[key] || !digestOk || oldDigest != newDigest { save = true } + addName := true for _, name := range image.BigDataNames { if name == key { - add = false + addName = false break } } - if add { + if addName { image.BigDataNames = append(image.BigDataNames, key) save = true } diff --git a/store.go b/store.go index fe39d8750..e97bff7c8 100644 --- a/store.go +++ b/store.go @@ -20,7 +20,7 @@ import ( "github.com/containers/storage/pkg/idtools" "github.com/containers/storage/pkg/ioutils" "github.com/containers/storage/pkg/stringid" - "github.com/opencontainers/go-digest" + digest "github.com/opencontainers/go-digest" "github.com/pkg/errors" ) @@ -87,6 +87,10 @@ type ROBigDataStore interface { // data associated with this ID, if it has previously been set. BigDataSize(id, key string) (int64, error) + // BigDataDigest retrieves the digest of a (potentially large) piece of + // data associated with this ID, if it has previously been set. + BigDataDigest(id, key string) (digest.Digest, error) + // BigDataNames() returns a list of the names of previously-stored pieces of // data. BigDataNames(id string) ([]string, error) @@ -327,6 +331,10 @@ type Store interface { // of named data associated with an image. ImageBigDataSize(id, key string) (int64, error) + // ImageBigDataDigest retrieves the digest of a (possibly large) chunk + // of named data associated with an image. + ImageBigDataDigest(id, key string) (digest.Digest, error) + // SetImageBigData stores a (possibly large) chunk of named data associated // with an image. SetImageBigData(id, key string, data []byte) error @@ -343,6 +351,10 @@ type Store interface { // chunk of named data associated with a container. ContainerBigDataSize(id, key string) (int64, error) + // ContainerBigDataDigest retrieves the digest of a (possibly large) + // chunk of named data associated with a container. + ContainerBigDataDigest(id, key string) (digest.Digest, error) + // SetContainerBigData stores a (possibly large) chunk of named data // associated with a container. SetContainerBigData(id, key string, data []byte) error @@ -1026,6 +1038,30 @@ func (s *store) ImageBigDataSize(id, key string) (int64, error) { return -1, ErrSizeUnknown } +func (s *store) ImageBigDataDigest(id, key string) (digest.Digest, error) { + ristore, err := s.ImageStore() + if err != nil { + return "", err + } + stores, err := s.ROImageStores() + if err != nil { + return "", err + } + stores = append([]ROImageStore{ristore}, stores...) + for _, ristore := range stores { + ristore.Lock() + defer ristore.Unlock() + if modified, err := ristore.Modified(); modified || err != nil { + ristore.Load() + } + d, err := ristore.BigDataDigest(id, key) + if err == nil && d.Validate() == nil { + return d, nil + } + } + return "", ErrDigestUnknown +} + func (s *store) ImageBigData(id, key string) ([]byte, error) { istore, err := s.ImageStore() if err != nil { @@ -1089,10 +1125,22 @@ func (s *store) ContainerBigDataSize(id, key string) (int64, error) { if modified, err := rcstore.Modified(); modified || err != nil { rcstore.Load() } - return rcstore.BigDataSize(id, key) } +func (s *store) ContainerBigDataDigest(id, key string) (digest.Digest, error) { + rcstore, err := s.ContainerStore() + if err != nil { + return "", err + } + rcstore.Lock() + defer rcstore.Unlock() + if modified, err := rcstore.Modified(); modified || err != nil { + rcstore.Load() + } + return rcstore.BigDataDigest(id, key) +} + func (s *store) ContainerBigData(id, key string) ([]byte, error) { rcstore, err := s.ContainerStore() if err != nil { @@ -1103,7 +1151,6 @@ func (s *store) ContainerBigData(id, key string) ([]byte, error) { if modified, err := rcstore.Modified(); modified || err != nil { rcstore.Load() } - return rcstore.BigData(id, key) } @@ -1117,7 +1164,6 @@ func (s *store) SetContainerBigData(id, key string, data []byte) error { if modified, err := rcstore.Modified(); modified || err != nil { rcstore.Load() } - return rcstore.SetBigData(id, key, data) }