chunked: store UncompressedDigest if validated
store the UncompressedDigest when the original tarball was converted to zstd:chunked, since its diffID was computed and validated. In this way the layer can be reused as any other layer that was fully retrieved and validated. Before this change, a layer that was converted to zstd:chunked was always retrieved since it has not a TOC Digest. Signed-off-by: Giuseppe Scrivano <gscrivan@redhat.com>
This commit is contained in:
parent
4eea64f958
commit
dc3f818a84
|
|
@ -73,11 +73,9 @@ type chunkedDiffer struct {
|
||||||
zstdReader *zstd.Decoder
|
zstdReader *zstd.Decoder
|
||||||
rawReader io.Reader
|
rawReader io.Reader
|
||||||
|
|
||||||
// contentDigest is the digest of the uncompressed content
|
// tocDigest is the digest of the TOC document when the layer
|
||||||
// (diffID) when the layer is fully retrieved. If the layer
|
// is partially pulled.
|
||||||
// is not fully retrieved, instead of using the digest of the
|
tocDigest digest.Digest
|
||||||
// uncompressed content, it refers to the digest of the TOC.
|
|
||||||
contentDigest digest.Digest
|
|
||||||
|
|
||||||
// convertedToZstdChunked is set to true if the layer needs to
|
// convertedToZstdChunked is set to true if the layer needs to
|
||||||
// be converted to the zstd:chunked format before it can be
|
// be converted to the zstd:chunked format before it can be
|
||||||
|
|
@ -292,7 +290,7 @@ func makeZstdChunkedDiffer(ctx context.Context, store storage.Store, blobSize in
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
contentDigest, err := digest.Parse(annotations[internal.ManifestChecksumKey])
|
tocDigest, err := digest.Parse(annotations[internal.ManifestChecksumKey])
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("parse TOC digest %q: %w", annotations[internal.ManifestChecksumKey], err)
|
return nil, fmt.Errorf("parse TOC digest %q: %w", annotations[internal.ManifestChecksumKey], err)
|
||||||
}
|
}
|
||||||
|
|
@ -300,7 +298,7 @@ func makeZstdChunkedDiffer(ctx context.Context, store storage.Store, blobSize in
|
||||||
return &chunkedDiffer{
|
return &chunkedDiffer{
|
||||||
fsVerityDigests: make(map[string]string),
|
fsVerityDigests: make(map[string]string),
|
||||||
blobSize: blobSize,
|
blobSize: blobSize,
|
||||||
contentDigest: contentDigest,
|
tocDigest: tocDigest,
|
||||||
copyBuffer: makeCopyBuffer(),
|
copyBuffer: makeCopyBuffer(),
|
||||||
fileType: fileTypeZstdChunked,
|
fileType: fileTypeZstdChunked,
|
||||||
layersCache: layersCache,
|
layersCache: layersCache,
|
||||||
|
|
@ -322,7 +320,7 @@ func makeEstargzChunkedDiffer(ctx context.Context, store storage.Store, blobSize
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
contentDigest, err := digest.Parse(annotations[estargz.TOCJSONDigestAnnotation])
|
tocDigest, err := digest.Parse(annotations[estargz.TOCJSONDigestAnnotation])
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("parse TOC digest %q: %w", annotations[estargz.TOCJSONDigestAnnotation], err)
|
return nil, fmt.Errorf("parse TOC digest %q: %w", annotations[estargz.TOCJSONDigestAnnotation], err)
|
||||||
}
|
}
|
||||||
|
|
@ -330,7 +328,7 @@ func makeEstargzChunkedDiffer(ctx context.Context, store storage.Store, blobSize
|
||||||
return &chunkedDiffer{
|
return &chunkedDiffer{
|
||||||
fsVerityDigests: make(map[string]string),
|
fsVerityDigests: make(map[string]string),
|
||||||
blobSize: blobSize,
|
blobSize: blobSize,
|
||||||
contentDigest: contentDigest,
|
tocDigest: tocDigest,
|
||||||
copyBuffer: makeCopyBuffer(),
|
copyBuffer: makeCopyBuffer(),
|
||||||
fileType: fileTypeEstargz,
|
fileType: fileTypeEstargz,
|
||||||
layersCache: layersCache,
|
layersCache: layersCache,
|
||||||
|
|
@ -1613,6 +1611,9 @@ func (c *chunkedDiffer) ApplyDiff(dest string, options *archive.TarOptions, diff
|
||||||
// stream to use for reading the zstd:chunked or Estargz file.
|
// stream to use for reading the zstd:chunked or Estargz file.
|
||||||
stream := c.stream
|
stream := c.stream
|
||||||
|
|
||||||
|
var uncompressedDigest digest.Digest
|
||||||
|
tocDigest := c.tocDigest
|
||||||
|
|
||||||
if c.convertToZstdChunked {
|
if c.convertToZstdChunked {
|
||||||
fd, err := unix.Open(dest, unix.O_TMPFILE|unix.O_RDWR|unix.O_CLOEXEC, 0o600)
|
fd, err := unix.Open(dest, unix.O_TMPFILE|unix.O_RDWR|unix.O_CLOEXEC, 0o600)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
@ -1663,13 +1664,13 @@ func (c *chunkedDiffer) ApplyDiff(dest string, options *archive.TarOptions, diff
|
||||||
c.fileType = fileTypeZstdChunked
|
c.fileType = fileTypeZstdChunked
|
||||||
c.manifest = manifest
|
c.manifest = manifest
|
||||||
c.tarSplit = tarSplit
|
c.tarSplit = tarSplit
|
||||||
|
|
||||||
// since we retrieved the whole file and it was validated, use the diffID instead of the TOC digest.
|
|
||||||
c.contentDigest = diffID
|
|
||||||
c.tocOffset = tocOffset
|
c.tocOffset = tocOffset
|
||||||
|
|
||||||
// the file was generated by us and the digest for each file was already computed, no need to validate it again.
|
// the file was generated by us and the digest for each file was already computed, no need to validate it again.
|
||||||
c.skipValidation = true
|
c.skipValidation = true
|
||||||
|
// since we retrieved the whole file and it was validated, do not use the TOC digest, but set the uncompressed digest.
|
||||||
|
tocDigest = ""
|
||||||
|
uncompressedDigest = diffID
|
||||||
}
|
}
|
||||||
|
|
||||||
lcd := chunkedLayerData{
|
lcd := chunkedLayerData{
|
||||||
|
|
@ -1698,7 +1699,8 @@ func (c *chunkedDiffer) ApplyDiff(dest string, options *archive.TarOptions, diff
|
||||||
Artifacts: map[string]interface{}{
|
Artifacts: map[string]interface{}{
|
||||||
tocKey: toc,
|
tocKey: toc,
|
||||||
},
|
},
|
||||||
TOCDigest: c.contentDigest,
|
TOCDigest: tocDigest,
|
||||||
|
UncompressedDigest: uncompressedDigest,
|
||||||
}
|
}
|
||||||
|
|
||||||
if !parseBooleanPullOption(c.storeOpts, "enable_partial_images", false) {
|
if !parseBooleanPullOption(c.storeOpts, "enable_partial_images", false) {
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue