Track the digests of "big data" items
Teach image and container store types to also track the digests of "big data" items that we have them store. Signed-off-by: Nalin Dahyabhai <nalin@redhat.com>
This commit is contained in:
parent
232429cdea
commit
eace836c44
|
|
@ -10,7 +10,7 @@ import (
|
||||||
"github.com/containers/storage/pkg/ioutils"
|
"github.com/containers/storage/pkg/ioutils"
|
||||||
"github.com/containers/storage/pkg/stringid"
|
"github.com/containers/storage/pkg/stringid"
|
||||||
"github.com/containers/storage/pkg/truncindex"
|
"github.com/containers/storage/pkg/truncindex"
|
||||||
|
digest "github.com/opencontainers/go-digest"
|
||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -46,6 +46,10 @@ type Container struct {
|
||||||
// that has been stored, if they're known.
|
// that has been stored, if they're known.
|
||||||
BigDataSizes map[string]int64 `json:"big-data-sizes,omitempty"`
|
BigDataSizes map[string]int64 `json:"big-data-sizes,omitempty"`
|
||||||
|
|
||||||
|
// BigDataDigests maps the names in BigDataNames to the digests of the
|
||||||
|
// data that has been stored, if they're known.
|
||||||
|
BigDataDigests map[string]digest.Digest `json:"big-data-digests,omitempty"`
|
||||||
|
|
||||||
// Created is the datestamp for when this container was created. Older
|
// Created is the datestamp for when this container was created. Older
|
||||||
// versions of the library did not track this information, so callers
|
// versions of the library did not track this information, so callers
|
||||||
// will likely want to use the IsZero() method to verify that a value
|
// will likely want to use the IsZero() method to verify that a value
|
||||||
|
|
@ -253,15 +257,16 @@ func (r *containerStore) Create(id string, names []string, image, layer, metadat
|
||||||
}
|
}
|
||||||
if err == nil {
|
if err == nil {
|
||||||
container = &Container{
|
container = &Container{
|
||||||
ID: id,
|
ID: id,
|
||||||
Names: names,
|
Names: names,
|
||||||
ImageID: image,
|
ImageID: image,
|
||||||
LayerID: layer,
|
LayerID: layer,
|
||||||
Metadata: metadata,
|
Metadata: metadata,
|
||||||
BigDataNames: []string{},
|
BigDataNames: []string{},
|
||||||
BigDataSizes: make(map[string]int64),
|
BigDataSizes: make(map[string]int64),
|
||||||
Created: time.Now().UTC(),
|
BigDataDigests: make(map[string]digest.Digest),
|
||||||
Flags: make(map[string]interface{}),
|
Created: time.Now().UTC(),
|
||||||
|
Flags: make(map[string]interface{}),
|
||||||
}
|
}
|
||||||
r.containers = append(r.containers, container)
|
r.containers = append(r.containers, container)
|
||||||
r.byid[id] = container
|
r.byid[id] = container
|
||||||
|
|
@ -369,7 +374,7 @@ func (r *containerStore) Exists(id string) bool {
|
||||||
|
|
||||||
func (r *containerStore) BigData(id, key string) ([]byte, error) {
|
func (r *containerStore) BigData(id, key string) ([]byte, error) {
|
||||||
if key == "" {
|
if key == "" {
|
||||||
return nil, errors.Wrapf(ErrInvalidBigDataName, "data name %q can not be used as a filename", key)
|
return nil, errors.Wrapf(ErrInvalidBigDataName, "can't retrieve container big data value for empty name")
|
||||||
}
|
}
|
||||||
c, ok := r.lookup(id)
|
c, ok := r.lookup(id)
|
||||||
if !ok {
|
if !ok {
|
||||||
|
|
@ -380,7 +385,7 @@ func (r *containerStore) BigData(id, key string) ([]byte, error) {
|
||||||
|
|
||||||
func (r *containerStore) BigDataSize(id, key string) (int64, error) {
|
func (r *containerStore) BigDataSize(id, key string) (int64, error) {
|
||||||
if key == "" {
|
if key == "" {
|
||||||
return -1, errors.Wrapf(ErrInvalidBigDataName, "data name %q can not be used as a filename", key)
|
return -1, errors.Wrapf(ErrInvalidBigDataName, "can't retrieve size of container big data with empty name")
|
||||||
}
|
}
|
||||||
c, ok := r.lookup(id)
|
c, ok := r.lookup(id)
|
||||||
if !ok {
|
if !ok {
|
||||||
|
|
@ -392,9 +397,48 @@ func (r *containerStore) BigDataSize(id, key string) (int64, error) {
|
||||||
if size, ok := c.BigDataSizes[key]; ok {
|
if size, ok := c.BigDataSizes[key]; ok {
|
||||||
return size, nil
|
return size, nil
|
||||||
}
|
}
|
||||||
|
if data, err := r.BigData(id, key); err == nil && data != nil {
|
||||||
|
if r.SetBigData(id, key, data) == nil {
|
||||||
|
c, ok := r.lookup(id)
|
||||||
|
if !ok {
|
||||||
|
return -1, ErrContainerUnknown
|
||||||
|
}
|
||||||
|
if size, ok := c.BigDataSizes[key]; ok {
|
||||||
|
return size, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
return -1, ErrSizeUnknown
|
return -1, ErrSizeUnknown
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (r *containerStore) BigDataDigest(id, key string) (digest.Digest, error) {
|
||||||
|
if key == "" {
|
||||||
|
return "", errors.Wrapf(ErrInvalidBigDataName, "can't retrieve digest of container big data value with empty name")
|
||||||
|
}
|
||||||
|
c, ok := r.lookup(id)
|
||||||
|
if !ok {
|
||||||
|
return "", ErrContainerUnknown
|
||||||
|
}
|
||||||
|
if c.BigDataDigests == nil {
|
||||||
|
c.BigDataDigests = make(map[string]digest.Digest)
|
||||||
|
}
|
||||||
|
if d, ok := c.BigDataDigests[key]; ok {
|
||||||
|
return d, nil
|
||||||
|
}
|
||||||
|
if data, err := r.BigData(id, key); err == nil && data != nil {
|
||||||
|
if r.SetBigData(id, key, data) == nil {
|
||||||
|
c, ok := r.lookup(id)
|
||||||
|
if !ok {
|
||||||
|
return "", ErrContainerUnknown
|
||||||
|
}
|
||||||
|
if d, ok := c.BigDataDigests[key]; ok {
|
||||||
|
return d, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return "", ErrDigestUnknown
|
||||||
|
}
|
||||||
|
|
||||||
func (r *containerStore) BigDataNames(id string) ([]string, error) {
|
func (r *containerStore) BigDataNames(id string) ([]string, error) {
|
||||||
c, ok := r.lookup(id)
|
c, ok := r.lookup(id)
|
||||||
if !ok {
|
if !ok {
|
||||||
|
|
@ -405,7 +449,7 @@ func (r *containerStore) BigDataNames(id string) ([]string, error) {
|
||||||
|
|
||||||
func (r *containerStore) SetBigData(id, key string, data []byte) error {
|
func (r *containerStore) SetBigData(id, key string, data []byte) error {
|
||||||
if key == "" {
|
if key == "" {
|
||||||
return errors.Wrapf(ErrInvalidBigDataName, "data name %q can not be used as a filename", key)
|
return errors.Wrapf(ErrInvalidBigDataName, "can't set empty name for container big data item")
|
||||||
}
|
}
|
||||||
c, ok := r.lookup(id)
|
c, ok := r.lookup(id)
|
||||||
if !ok {
|
if !ok {
|
||||||
|
|
@ -422,17 +466,23 @@ func (r *containerStore) SetBigData(id, key string, data []byte) error {
|
||||||
}
|
}
|
||||||
oldSize, sizeOk := c.BigDataSizes[key]
|
oldSize, sizeOk := c.BigDataSizes[key]
|
||||||
c.BigDataSizes[key] = int64(len(data))
|
c.BigDataSizes[key] = int64(len(data))
|
||||||
if !sizeOk || oldSize != c.BigDataSizes[key] {
|
if c.BigDataDigests == nil {
|
||||||
|
c.BigDataDigests = make(map[string]digest.Digest)
|
||||||
|
}
|
||||||
|
oldDigest, digestOk := c.BigDataDigests[key]
|
||||||
|
newDigest := digest.Canonical.FromBytes(data)
|
||||||
|
c.BigDataDigests[key] = newDigest
|
||||||
|
if !sizeOk || oldSize != c.BigDataSizes[key] || !digestOk || oldDigest != newDigest {
|
||||||
save = true
|
save = true
|
||||||
}
|
}
|
||||||
add := true
|
addName := true
|
||||||
for _, name := range c.BigDataNames {
|
for _, name := range c.BigDataNames {
|
||||||
if name == key {
|
if name == key {
|
||||||
add = false
|
addName = false
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if add {
|
if addName {
|
||||||
c.BigDataNames = append(c.BigDataNames, key)
|
c.BigDataNames = append(c.BigDataNames, key)
|
||||||
save = true
|
save = true
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -51,4 +51,6 @@ var (
|
||||||
ErrDuplicateLayerNames = errors.New("read-only layer store assigns the same name to multiple layers")
|
ErrDuplicateLayerNames = errors.New("read-only layer store assigns the same name to multiple layers")
|
||||||
// ErrInvalidBigDataName indicates that the name for a big data item is not acceptable; it may be empty.
|
// ErrInvalidBigDataName indicates that the name for a big data item is not acceptable; it may be empty.
|
||||||
ErrInvalidBigDataName = errors.New("not a valid name for a big data item")
|
ErrInvalidBigDataName = errors.New("not a valid name for a big data item")
|
||||||
|
// ErrDigestUnknown indicates that we were unable to compute the digest of a specified item.
|
||||||
|
ErrDigestUnknown = errors.New("could not compute digest of item")
|
||||||
)
|
)
|
||||||
|
|
|
||||||
82
images.go
82
images.go
|
|
@ -10,6 +10,7 @@ import (
|
||||||
"github.com/containers/storage/pkg/ioutils"
|
"github.com/containers/storage/pkg/ioutils"
|
||||||
"github.com/containers/storage/pkg/stringid"
|
"github.com/containers/storage/pkg/stringid"
|
||||||
"github.com/containers/storage/pkg/truncindex"
|
"github.com/containers/storage/pkg/truncindex"
|
||||||
|
digest "github.com/opencontainers/go-digest"
|
||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -42,6 +43,10 @@ type Image struct {
|
||||||
// that has been stored, if they're known.
|
// that has been stored, if they're known.
|
||||||
BigDataSizes map[string]int64 `json:"big-data-sizes,omitempty"`
|
BigDataSizes map[string]int64 `json:"big-data-sizes,omitempty"`
|
||||||
|
|
||||||
|
// BigDataDigests maps the names in BigDataNames to the digests of the
|
||||||
|
// data that has been stored, if they're known.
|
||||||
|
BigDataDigests map[string]digest.Digest `json:"big-data-digests,omitempty"`
|
||||||
|
|
||||||
// Created is the datestamp for when this image was created. Older
|
// Created is the datestamp for when this image was created. Older
|
||||||
// versions of the library did not track this information, so callers
|
// versions of the library did not track this information, so callers
|
||||||
// will likely want to use the IsZero() method to verify that a value
|
// will likely want to use the IsZero() method to verify that a value
|
||||||
|
|
@ -286,14 +291,15 @@ func (r *imageStore) Create(id string, names []string, layer, metadata string, c
|
||||||
}
|
}
|
||||||
if err == nil {
|
if err == nil {
|
||||||
image = &Image{
|
image = &Image{
|
||||||
ID: id,
|
ID: id,
|
||||||
Names: names,
|
Names: names,
|
||||||
TopLayer: layer,
|
TopLayer: layer,
|
||||||
Metadata: metadata,
|
Metadata: metadata,
|
||||||
BigDataNames: []string{},
|
BigDataNames: []string{},
|
||||||
BigDataSizes: make(map[string]int64),
|
BigDataSizes: make(map[string]int64),
|
||||||
Created: created,
|
BigDataDigests: make(map[string]digest.Digest),
|
||||||
Flags: make(map[string]interface{}),
|
Created: created,
|
||||||
|
Flags: make(map[string]interface{}),
|
||||||
}
|
}
|
||||||
r.images = append(r.images, image)
|
r.images = append(r.images, image)
|
||||||
r.idindex.Add(id)
|
r.idindex.Add(id)
|
||||||
|
|
@ -407,7 +413,7 @@ func (r *imageStore) Exists(id string) bool {
|
||||||
|
|
||||||
func (r *imageStore) BigData(id, key string) ([]byte, error) {
|
func (r *imageStore) BigData(id, key string) ([]byte, error) {
|
||||||
if key == "" {
|
if key == "" {
|
||||||
return nil, errors.Wrapf(ErrInvalidBigDataName, "data name %q can not be used as a filename", key)
|
return nil, errors.Wrapf(ErrInvalidBigDataName, "can't retrieve image big data value for empty name")
|
||||||
}
|
}
|
||||||
image, ok := r.lookup(id)
|
image, ok := r.lookup(id)
|
||||||
if !ok {
|
if !ok {
|
||||||
|
|
@ -418,7 +424,7 @@ func (r *imageStore) BigData(id, key string) ([]byte, error) {
|
||||||
|
|
||||||
func (r *imageStore) BigDataSize(id, key string) (int64, error) {
|
func (r *imageStore) BigDataSize(id, key string) (int64, error) {
|
||||||
if key == "" {
|
if key == "" {
|
||||||
return -1, errors.Wrapf(ErrInvalidBigDataName, "data name %q can not be used as a filename", key)
|
return -1, errors.Wrapf(ErrInvalidBigDataName, "can't retrieve size of image big data with empty name")
|
||||||
}
|
}
|
||||||
image, ok := r.lookup(id)
|
image, ok := r.lookup(id)
|
||||||
if !ok {
|
if !ok {
|
||||||
|
|
@ -430,9 +436,48 @@ func (r *imageStore) BigDataSize(id, key string) (int64, error) {
|
||||||
if size, ok := image.BigDataSizes[key]; ok {
|
if size, ok := image.BigDataSizes[key]; ok {
|
||||||
return size, nil
|
return size, nil
|
||||||
}
|
}
|
||||||
|
if data, err := r.BigData(id, key); err == nil && data != nil {
|
||||||
|
if r.SetBigData(id, key, data) == nil {
|
||||||
|
image, ok := r.lookup(id)
|
||||||
|
if !ok {
|
||||||
|
return -1, ErrImageUnknown
|
||||||
|
}
|
||||||
|
if size, ok := image.BigDataSizes[key]; ok {
|
||||||
|
return size, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
return -1, ErrSizeUnknown
|
return -1, ErrSizeUnknown
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (r *imageStore) BigDataDigest(id, key string) (digest.Digest, error) {
|
||||||
|
if key == "" {
|
||||||
|
return "", errors.Wrapf(ErrInvalidBigDataName, "can't retrieve digest of image big data value with empty name")
|
||||||
|
}
|
||||||
|
image, ok := r.lookup(id)
|
||||||
|
if !ok {
|
||||||
|
return "", ErrImageUnknown
|
||||||
|
}
|
||||||
|
if image.BigDataDigests == nil {
|
||||||
|
image.BigDataDigests = make(map[string]digest.Digest)
|
||||||
|
}
|
||||||
|
if d, ok := image.BigDataDigests[key]; ok {
|
||||||
|
return d, nil
|
||||||
|
}
|
||||||
|
if data, err := r.BigData(id, key); err == nil && data != nil {
|
||||||
|
if r.SetBigData(id, key, data) == nil {
|
||||||
|
image, ok := r.lookup(id)
|
||||||
|
if !ok {
|
||||||
|
return "", ErrImageUnknown
|
||||||
|
}
|
||||||
|
if d, ok := image.BigDataDigests[key]; ok {
|
||||||
|
return d, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return "", ErrDigestUnknown
|
||||||
|
}
|
||||||
|
|
||||||
func (r *imageStore) BigDataNames(id string) ([]string, error) {
|
func (r *imageStore) BigDataNames(id string) ([]string, error) {
|
||||||
image, ok := r.lookup(id)
|
image, ok := r.lookup(id)
|
||||||
if !ok {
|
if !ok {
|
||||||
|
|
@ -442,6 +487,9 @@ func (r *imageStore) BigDataNames(id string) ([]string, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *imageStore) SetBigData(id, key string, data []byte) error {
|
func (r *imageStore) SetBigData(id, key string, data []byte) error {
|
||||||
|
if key == "" {
|
||||||
|
return errors.Wrapf(ErrInvalidBigDataName, "can't set empty name for image big data item")
|
||||||
|
}
|
||||||
if !r.IsReadWrite() {
|
if !r.IsReadWrite() {
|
||||||
return errors.Wrapf(ErrStoreIsReadOnly, "not allowed to save data items associated with images at %q", r.imagespath())
|
return errors.Wrapf(ErrStoreIsReadOnly, "not allowed to save data items associated with images at %q", r.imagespath())
|
||||||
}
|
}
|
||||||
|
|
@ -454,23 +502,29 @@ func (r *imageStore) SetBigData(id, key string, data []byte) error {
|
||||||
}
|
}
|
||||||
err := ioutils.AtomicWriteFile(r.datapath(image.ID, key), data, 0600)
|
err := ioutils.AtomicWriteFile(r.datapath(image.ID, key), data, 0600)
|
||||||
if err == nil {
|
if err == nil {
|
||||||
add := true
|
|
||||||
save := false
|
save := false
|
||||||
if image.BigDataSizes == nil {
|
if image.BigDataSizes == nil {
|
||||||
image.BigDataSizes = make(map[string]int64)
|
image.BigDataSizes = make(map[string]int64)
|
||||||
}
|
}
|
||||||
oldSize, sizeOk := image.BigDataSizes[key]
|
oldSize, sizeOk := image.BigDataSizes[key]
|
||||||
image.BigDataSizes[key] = int64(len(data))
|
image.BigDataSizes[key] = int64(len(data))
|
||||||
if !sizeOk || oldSize != image.BigDataSizes[key] {
|
if image.BigDataDigests == nil {
|
||||||
|
image.BigDataDigests = make(map[string]digest.Digest)
|
||||||
|
}
|
||||||
|
oldDigest, digestOk := image.BigDataDigests[key]
|
||||||
|
newDigest := digest.Canonical.FromBytes(data)
|
||||||
|
image.BigDataDigests[key] = newDigest
|
||||||
|
if !sizeOk || oldSize != image.BigDataSizes[key] || !digestOk || oldDigest != newDigest {
|
||||||
save = true
|
save = true
|
||||||
}
|
}
|
||||||
|
addName := true
|
||||||
for _, name := range image.BigDataNames {
|
for _, name := range image.BigDataNames {
|
||||||
if name == key {
|
if name == key {
|
||||||
add = false
|
addName = false
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if add {
|
if addName {
|
||||||
image.BigDataNames = append(image.BigDataNames, key)
|
image.BigDataNames = append(image.BigDataNames, key)
|
||||||
save = true
|
save = true
|
||||||
}
|
}
|
||||||
|
|
|
||||||
54
store.go
54
store.go
|
|
@ -20,7 +20,7 @@ import (
|
||||||
"github.com/containers/storage/pkg/idtools"
|
"github.com/containers/storage/pkg/idtools"
|
||||||
"github.com/containers/storage/pkg/ioutils"
|
"github.com/containers/storage/pkg/ioutils"
|
||||||
"github.com/containers/storage/pkg/stringid"
|
"github.com/containers/storage/pkg/stringid"
|
||||||
"github.com/opencontainers/go-digest"
|
digest "github.com/opencontainers/go-digest"
|
||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -87,6 +87,10 @@ type ROBigDataStore interface {
|
||||||
// data associated with this ID, if it has previously been set.
|
// data associated with this ID, if it has previously been set.
|
||||||
BigDataSize(id, key string) (int64, error)
|
BigDataSize(id, key string) (int64, error)
|
||||||
|
|
||||||
|
// BigDataDigest retrieves the digest of a (potentially large) piece of
|
||||||
|
// data associated with this ID, if it has previously been set.
|
||||||
|
BigDataDigest(id, key string) (digest.Digest, error)
|
||||||
|
|
||||||
// BigDataNames() returns a list of the names of previously-stored pieces of
|
// BigDataNames() returns a list of the names of previously-stored pieces of
|
||||||
// data.
|
// data.
|
||||||
BigDataNames(id string) ([]string, error)
|
BigDataNames(id string) ([]string, error)
|
||||||
|
|
@ -327,6 +331,10 @@ type Store interface {
|
||||||
// of named data associated with an image.
|
// of named data associated with an image.
|
||||||
ImageBigDataSize(id, key string) (int64, error)
|
ImageBigDataSize(id, key string) (int64, error)
|
||||||
|
|
||||||
|
// ImageBigDataDigest retrieves the digest of a (possibly large) chunk
|
||||||
|
// of named data associated with an image.
|
||||||
|
ImageBigDataDigest(id, key string) (digest.Digest, error)
|
||||||
|
|
||||||
// SetImageBigData stores a (possibly large) chunk of named data associated
|
// SetImageBigData stores a (possibly large) chunk of named data associated
|
||||||
// with an image.
|
// with an image.
|
||||||
SetImageBigData(id, key string, data []byte) error
|
SetImageBigData(id, key string, data []byte) error
|
||||||
|
|
@ -343,6 +351,10 @@ type Store interface {
|
||||||
// chunk of named data associated with a container.
|
// chunk of named data associated with a container.
|
||||||
ContainerBigDataSize(id, key string) (int64, error)
|
ContainerBigDataSize(id, key string) (int64, error)
|
||||||
|
|
||||||
|
// ContainerBigDataDigest retrieves the digest of a (possibly large)
|
||||||
|
// chunk of named data associated with a container.
|
||||||
|
ContainerBigDataDigest(id, key string) (digest.Digest, error)
|
||||||
|
|
||||||
// SetContainerBigData stores a (possibly large) chunk of named data
|
// SetContainerBigData stores a (possibly large) chunk of named data
|
||||||
// associated with a container.
|
// associated with a container.
|
||||||
SetContainerBigData(id, key string, data []byte) error
|
SetContainerBigData(id, key string, data []byte) error
|
||||||
|
|
@ -1026,6 +1038,30 @@ func (s *store) ImageBigDataSize(id, key string) (int64, error) {
|
||||||
return -1, ErrSizeUnknown
|
return -1, ErrSizeUnknown
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (s *store) ImageBigDataDigest(id, key string) (digest.Digest, error) {
|
||||||
|
ristore, err := s.ImageStore()
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
stores, err := s.ROImageStores()
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
stores = append([]ROImageStore{ristore}, stores...)
|
||||||
|
for _, ristore := range stores {
|
||||||
|
ristore.Lock()
|
||||||
|
defer ristore.Unlock()
|
||||||
|
if modified, err := ristore.Modified(); modified || err != nil {
|
||||||
|
ristore.Load()
|
||||||
|
}
|
||||||
|
d, err := ristore.BigDataDigest(id, key)
|
||||||
|
if err == nil && d.Validate() == nil {
|
||||||
|
return d, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return "", ErrDigestUnknown
|
||||||
|
}
|
||||||
|
|
||||||
func (s *store) ImageBigData(id, key string) ([]byte, error) {
|
func (s *store) ImageBigData(id, key string) ([]byte, error) {
|
||||||
istore, err := s.ImageStore()
|
istore, err := s.ImageStore()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
@ -1089,10 +1125,22 @@ func (s *store) ContainerBigDataSize(id, key string) (int64, error) {
|
||||||
if modified, err := rcstore.Modified(); modified || err != nil {
|
if modified, err := rcstore.Modified(); modified || err != nil {
|
||||||
rcstore.Load()
|
rcstore.Load()
|
||||||
}
|
}
|
||||||
|
|
||||||
return rcstore.BigDataSize(id, key)
|
return rcstore.BigDataSize(id, key)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (s *store) ContainerBigDataDigest(id, key string) (digest.Digest, error) {
|
||||||
|
rcstore, err := s.ContainerStore()
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
rcstore.Lock()
|
||||||
|
defer rcstore.Unlock()
|
||||||
|
if modified, err := rcstore.Modified(); modified || err != nil {
|
||||||
|
rcstore.Load()
|
||||||
|
}
|
||||||
|
return rcstore.BigDataDigest(id, key)
|
||||||
|
}
|
||||||
|
|
||||||
func (s *store) ContainerBigData(id, key string) ([]byte, error) {
|
func (s *store) ContainerBigData(id, key string) ([]byte, error) {
|
||||||
rcstore, err := s.ContainerStore()
|
rcstore, err := s.ContainerStore()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
@ -1103,7 +1151,6 @@ func (s *store) ContainerBigData(id, key string) ([]byte, error) {
|
||||||
if modified, err := rcstore.Modified(); modified || err != nil {
|
if modified, err := rcstore.Modified(); modified || err != nil {
|
||||||
rcstore.Load()
|
rcstore.Load()
|
||||||
}
|
}
|
||||||
|
|
||||||
return rcstore.BigData(id, key)
|
return rcstore.BigData(id, key)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -1117,7 +1164,6 @@ func (s *store) SetContainerBigData(id, key string, data []byte) error {
|
||||||
if modified, err := rcstore.Modified(); modified || err != nil {
|
if modified, err := rcstore.Modified(); modified || err != nil {
|
||||||
rcstore.Load()
|
rcstore.Load()
|
||||||
}
|
}
|
||||||
|
|
||||||
return rcstore.SetBigData(id, key, data)
|
return rcstore.SetBigData(id, key, data)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue