mirror of https://github.com/docker/docs.git
Merge pull request #7078 from unclejack/fix_archive_tar_pax_headers
archive/tar: update to fix writing of PAX headers
This commit is contained in:
commit
1473374a2a
|
@ -55,7 +55,7 @@ clone hg code.google.com/p/gosqlite 74691fb6f837
|
||||||
|
|
||||||
# get Go tip's archive/tar, for xattr support and improved performance
|
# get Go tip's archive/tar, for xattr support and improved performance
|
||||||
# TODO after Go 1.4 drops, bump our minimum supported version and drop this vendored dep
|
# TODO after Go 1.4 drops, bump our minimum supported version and drop this vendored dep
|
||||||
clone hg code.google.com/p/go 17404efd6b02
|
clone hg code.google.com/p/go 1b17b3426e3c
|
||||||
mv src/code.google.com/p/go/src/pkg/archive/tar tmp-tar
|
mv src/code.google.com/p/go/src/pkg/archive/tar tmp-tar
|
||||||
rm -rf src/code.google.com/p/go
|
rm -rf src/code.google.com/p/go
|
||||||
mkdir -p src/code.google.com/p/go/src/pkg/archive
|
mkdir -p src/code.google.com/p/go/src/pkg/archive
|
||||||
|
|
|
@ -39,7 +39,8 @@ type Writer struct {
|
||||||
closed bool
|
closed bool
|
||||||
usedBinary bool // whether the binary numeric field extension was used
|
usedBinary bool // whether the binary numeric field extension was used
|
||||||
preferPax bool // use pax header instead of binary numeric header
|
preferPax bool // use pax header instead of binary numeric header
|
||||||
hdrBuff [blockSize]byte // buffer to use in writeHeader
|
hdrBuff [blockSize]byte // buffer to use in writeHeader when writing a regular header
|
||||||
|
paxHdrBuff [blockSize]byte // buffer to use in writeHeader when writing a pax header
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewWriter creates a new Writer writing to w.
|
// NewWriter creates a new Writer writing to w.
|
||||||
|
@ -161,7 +162,17 @@ func (tw *Writer) writeHeader(hdr *Header, allowPax bool) error {
|
||||||
// subsecond time resolution, but for now let's just capture
|
// subsecond time resolution, but for now let's just capture
|
||||||
// too long fields or non ascii characters
|
// too long fields or non ascii characters
|
||||||
|
|
||||||
header := tw.hdrBuff[:]
|
var header []byte
|
||||||
|
|
||||||
|
// We need to select which scratch buffer to use carefully,
|
||||||
|
// since this method is called recursively to write PAX headers.
|
||||||
|
// If allowPax is true, this is the non-recursive call, and we will use hdrBuff.
|
||||||
|
// If allowPax is false, we are being called by writePAXHeader, and hdrBuff is
|
||||||
|
// already being used by the non-recursive call, so we must use paxHdrBuff.
|
||||||
|
header = tw.hdrBuff[:]
|
||||||
|
if !allowPax {
|
||||||
|
header = tw.paxHdrBuff[:]
|
||||||
|
}
|
||||||
copy(header, zeroBlock)
|
copy(header, zeroBlock)
|
||||||
s := slicer(header)
|
s := slicer(header)
|
||||||
|
|
||||||
|
|
|
@ -454,3 +454,38 @@ func TestUSTARLongName(t *testing.T) {
|
||||||
t.Fatal("Couldn't recover long name")
|
t.Fatal("Couldn't recover long name")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestValidTypeflagWithPAXHeader(t *testing.T) {
|
||||||
|
var buffer bytes.Buffer
|
||||||
|
tw := NewWriter(&buffer)
|
||||||
|
|
||||||
|
fileName := strings.Repeat("ab", 100)
|
||||||
|
|
||||||
|
hdr := &Header{
|
||||||
|
Name: fileName,
|
||||||
|
Size: 4,
|
||||||
|
Typeflag: 0,
|
||||||
|
}
|
||||||
|
if err := tw.WriteHeader(hdr); err != nil {
|
||||||
|
t.Fatalf("Failed to write header: %s", err)
|
||||||
|
}
|
||||||
|
if _, err := tw.Write([]byte("fooo")); err != nil {
|
||||||
|
t.Fatalf("Failed to write the file's data: %s", err)
|
||||||
|
}
|
||||||
|
tw.Close()
|
||||||
|
|
||||||
|
tr := NewReader(&buffer)
|
||||||
|
|
||||||
|
for {
|
||||||
|
header, err := tr.Next()
|
||||||
|
if err == io.EOF {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Failed to read header: %s", err)
|
||||||
|
}
|
||||||
|
if header.Typeflag != 0 {
|
||||||
|
t.Fatalf("Typeflag should've been 0, found %d", header.Typeflag)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
Loading…
Reference in New Issue