mirror of https://github.com/docker/docs.git
commit
ca90842799
|
@ -140,7 +140,7 @@ func NewTarget(targetName string, targetPath string) (*Target, error) {
|
|||
return nil, err
|
||||
}
|
||||
|
||||
meta, err := data.NewFileMeta(bytes.NewBuffer(b))
|
||||
meta, err := data.NewFileMeta(bytes.NewBuffer(b), data.NotaryDefaultHashes...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
|
@ -2,7 +2,6 @@ package main
|
|||
|
||||
import (
|
||||
"bufio"
|
||||
"crypto/sha256"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"net"
|
||||
|
@ -12,8 +11,6 @@ import (
|
|||
"strings"
|
||||
"time"
|
||||
|
||||
"crypto/subtle"
|
||||
|
||||
"github.com/Sirupsen/logrus"
|
||||
"github.com/docker/distribution/registry/client/auth"
|
||||
"github.com/docker/distribution/registry/client/transport"
|
||||
|
@ -385,13 +382,10 @@ func (t *tufCommander) tufVerify(cmd *cobra.Command, args []string) error {
|
|||
return fmt.Errorf("error retrieving target by name:%s, error:%v", targetName, err)
|
||||
}
|
||||
|
||||
// Create hasher and hash data
|
||||
stdinHash := sha256.Sum256(payload)
|
||||
serverHash := target.Hashes["sha256"]
|
||||
|
||||
if subtle.ConstantTimeCompare(stdinHash[:], serverHash) == 0 {
|
||||
return fmt.Errorf("notary: data not present in the trusted collection")
|
||||
if err := data.CheckHashes(payload, target.Hashes); err != nil {
|
||||
return fmt.Errorf("data not present in the trusted collection, %v", err)
|
||||
}
|
||||
|
||||
_, _ = os.Stdout.Write(payload)
|
||||
return nil
|
||||
}
|
||||
|
|
4
const.go
4
const.go
|
@ -20,6 +20,10 @@ const (
|
|||
PubCertPerms = 0755
|
||||
// Sha256HexSize is how big a Sha256 hex is in number of characters
|
||||
Sha256HexSize = 64
|
||||
// SHA256 is the name of SHA256 hash algorithm
|
||||
SHA256 = "sha256"
|
||||
// SHA512 is the name of SHA512 hash algorithm
|
||||
SHA512 = "sha512"
|
||||
// TrustedCertsDir is the directory, under the notary repo base directory, where trusted certs are stored
|
||||
TrustedCertsDir = "trusted_certificates"
|
||||
// PrivDir is the directory, under the notary repo base directory, where private keys are stored
|
||||
|
|
|
@ -46,7 +46,6 @@ func GetOrCreateSnapshotKey(gun string, store storage.KeyStore, crypto signed.Cr
|
|||
// whatever the most recent snapshot is to create the next one, only updating
|
||||
// the expiry time and version.
|
||||
func GetOrCreateSnapshot(gun string, store storage.MetaStore, cryptoService signed.CryptoService) ([]byte, error) {
|
||||
|
||||
d, err := store.GetCurrent(gun, "snapshot")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
package timestamp
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
|
||||
"github.com/docker/go/canonical/json"
|
||||
"github.com/docker/notary/tuf/data"
|
||||
"github.com/docker/notary/tuf/signed"
|
||||
|
@ -95,16 +93,11 @@ func timestampExpired(ts *data.SignedTimestamp) bool {
|
|||
return signed.IsExpired(ts.Signed.Expires)
|
||||
}
|
||||
|
||||
// snapshotExpired verifies the checksum(s) for the given snapshot using metadata from the timestamp
|
||||
func snapshotExpired(ts *data.SignedTimestamp, snapshot []byte) bool {
|
||||
meta, err := data.NewFileMeta(bytes.NewReader(snapshot), "sha256")
|
||||
if err != nil {
|
||||
// if we can't generate FileMeta from the current snapshot, we should
|
||||
// continue to serve the old timestamp if it isn't time expired
|
||||
// because we won't be able to generate a new one.
|
||||
return false
|
||||
}
|
||||
hash := meta.Hashes["sha256"]
|
||||
return !bytes.Equal(hash, ts.Signed.Meta["snapshot"].Hashes["sha256"])
|
||||
// If this check failed, it means the current snapshot was not exactly what we expect
|
||||
// via the timestamp. So we can consider it to be "expired."
|
||||
return data.CheckHashes(snapshot, ts.Signed.Meta[data.CanonicalSnapshotRole].Hashes) != nil
|
||||
}
|
||||
|
||||
// CreateTimestamp creates a new timestamp. If a prev timestamp is provided, it
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
package client
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto/sha256"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"path"
|
||||
|
@ -92,16 +90,16 @@ func (c *Client) update() error {
|
|||
func (c Client) checkRoot() error {
|
||||
role := data.CanonicalRootRole
|
||||
size := c.local.Snapshot.Signed.Meta[role].Length
|
||||
hashSha256 := c.local.Snapshot.Signed.Meta[role].Hashes["sha256"]
|
||||
|
||||
expectedHashes := c.local.Snapshot.Signed.Meta[role].Hashes
|
||||
|
||||
raw, err := c.cache.GetMeta("root", size)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
hash := sha256.Sum256(raw)
|
||||
if !bytes.Equal(hash[:], hashSha256) {
|
||||
return fmt.Errorf("Cached root sha256 did not match snapshot root sha256")
|
||||
if err := data.CheckHashes(raw, expectedHashes); err != nil {
|
||||
return fmt.Errorf("Cached root hashes did not match snapshot root hashes")
|
||||
}
|
||||
|
||||
if int64(len(raw)) != size {
|
||||
|
@ -127,11 +125,19 @@ func (c *Client) downloadRoot() error {
|
|||
// We can't read an exact size for the root metadata without risking getting stuck in the TUF update cycle
|
||||
// since it's possible that downloading timestamp/snapshot metadata may fail due to a signature mismatch
|
||||
var size int64 = -1
|
||||
var expectedSha256 []byte
|
||||
|
||||
// We could not expect what the "snapshot" meta has specified.
|
||||
//
|
||||
// In some old clients, there is only the "sha256",
|
||||
// but both "sha256" and "sha512" in the newer ones.
|
||||
//
|
||||
// And possibly more in the future.
|
||||
var expectedHashes data.Hashes
|
||||
|
||||
if c.local.Snapshot != nil {
|
||||
if prevRootMeta, ok := c.local.Snapshot.Signed.Meta[role]; ok {
|
||||
size = prevRootMeta.Length
|
||||
expectedSha256 = prevRootMeta.Hashes["sha256"]
|
||||
expectedHashes = prevRootMeta.Hashes
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -144,8 +150,9 @@ func (c *Client) downloadRoot() error {
|
|||
old := &data.Signed{}
|
||||
version := 0
|
||||
|
||||
if expectedSha256 != nil {
|
||||
// can only trust cache if we have an expected sha256 to trust
|
||||
// Due to the same reason, we don't really know how many hashes are there.
|
||||
if len(expectedHashes) != 0 {
|
||||
// can only trust cache if we have an expected sha256(for example) to trust
|
||||
cachedRoot, err = c.cache.GetMeta(role, size)
|
||||
}
|
||||
|
||||
|
@ -153,11 +160,11 @@ func (c *Client) downloadRoot() error {
|
|||
logrus.Debug("didn't find a cached root, must download")
|
||||
download = true
|
||||
} else {
|
||||
hash := sha256.Sum256(cachedRoot)
|
||||
if !bytes.Equal(hash[:], expectedSha256) {
|
||||
if err := data.CheckHashes(cachedRoot, expectedHashes); err != nil {
|
||||
logrus.Debug("cached root's hash didn't match expected, must download")
|
||||
download = true
|
||||
}
|
||||
|
||||
err := json.Unmarshal(cachedRoot, old)
|
||||
if err == nil {
|
||||
root, err := data.RootFromSigned(old)
|
||||
|
@ -176,7 +183,7 @@ func (c *Client) downloadRoot() error {
|
|||
var raw []byte
|
||||
if download {
|
||||
// use consistent download if we have the checksum.
|
||||
raw, s, err = c.downloadSigned(role, size, expectedSha256)
|
||||
raw, s, err = c.downloadSigned(role, size, expectedHashes)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -322,8 +329,8 @@ func (c *Client) downloadSnapshot() error {
|
|||
return tuf.ErrNotLoaded{Role: data.CanonicalTimestampRole}
|
||||
}
|
||||
size := c.local.Timestamp.Signed.Meta[role].Length
|
||||
expectedSha256, ok := c.local.Timestamp.Signed.Meta[role].Hashes["sha256"]
|
||||
if !ok {
|
||||
expectedHashes := c.local.Timestamp.Signed.Meta[role].Hashes
|
||||
if len(expectedHashes) == 0 {
|
||||
return data.ErrMissingMeta{Role: "snapshot"}
|
||||
}
|
||||
|
||||
|
@ -336,11 +343,11 @@ func (c *Client) downloadSnapshot() error {
|
|||
download = true
|
||||
} else {
|
||||
// file may have been tampered with on disk. Always check the hash!
|
||||
genHash := sha256.Sum256(raw)
|
||||
if !bytes.Equal(genHash[:], expectedSha256) {
|
||||
if err := data.CheckHashes(raw, expectedHashes); err != nil {
|
||||
logrus.Debug("hash of snapshot in cache did not match expected hash, must download")
|
||||
download = true
|
||||
}
|
||||
|
||||
err := json.Unmarshal(raw, old)
|
||||
if err == nil {
|
||||
snap, err := data.SnapshotFromSigned(old)
|
||||
|
@ -357,7 +364,7 @@ func (c *Client) downloadSnapshot() error {
|
|||
}
|
||||
var s *data.Signed
|
||||
if download {
|
||||
raw, s, err = c.downloadSigned(role, size, expectedSha256)
|
||||
raw, s, err = c.downloadSigned(role, size, expectedHashes)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -439,18 +446,19 @@ func (c *Client) downloadTargets(role string) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func (c *Client) downloadSigned(role string, size int64, expectedSha256 []byte) ([]byte, *data.Signed, error) {
|
||||
rolePath := utils.ConsistentName(role, expectedSha256)
|
||||
func (c *Client) downloadSigned(role string, size int64, expectedHashes data.Hashes) ([]byte, *data.Signed, error) {
|
||||
rolePath := utils.ConsistentName(role, expectedHashes["sha256"])
|
||||
raw, err := c.remote.GetMeta(rolePath, size)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
if expectedSha256 != nil {
|
||||
genHash := sha256.Sum256(raw)
|
||||
if !bytes.Equal(genHash[:], expectedSha256) {
|
||||
|
||||
if expectedHashes != nil {
|
||||
if err := data.CheckHashes(raw, expectedHashes); err != nil {
|
||||
return nil, nil, ErrChecksumMismatch{role: role}
|
||||
}
|
||||
}
|
||||
|
||||
s := &data.Signed{}
|
||||
err = json.Unmarshal(raw, s)
|
||||
if err != nil {
|
||||
|
@ -465,8 +473,8 @@ func (c Client) getTargetsFile(role string, snapshotMeta data.Files, consistent
|
|||
if !ok {
|
||||
return nil, data.ErrMissingMeta{Role: role}
|
||||
}
|
||||
expectedSha256, ok := snapshotMeta[role].Hashes["sha256"]
|
||||
if !ok {
|
||||
expectedHashes := snapshotMeta[role].Hashes
|
||||
if len(expectedHashes) == 0 {
|
||||
return nil, data.ErrMissingMeta{Role: role}
|
||||
}
|
||||
|
||||
|
@ -480,10 +488,10 @@ func (c Client) getTargetsFile(role string, snapshotMeta data.Files, consistent
|
|||
download = true
|
||||
} else {
|
||||
// file may have been tampered with on disk. Always check the hash!
|
||||
genHash := sha256.Sum256(raw)
|
||||
if !bytes.Equal(genHash[:], expectedSha256) {
|
||||
if err := data.CheckHashes(raw, expectedHashes); err != nil {
|
||||
download = true
|
||||
}
|
||||
|
||||
err := json.Unmarshal(raw, old)
|
||||
if err == nil {
|
||||
targ, err := data.TargetsFromSigned(old, role)
|
||||
|
@ -500,7 +508,7 @@ func (c Client) getTargetsFile(role string, snapshotMeta data.Files, consistent
|
|||
size := snapshotMeta[role].Length
|
||||
var s *data.Signed
|
||||
if download {
|
||||
raw, s, err = c.downloadSigned(role, size, expectedSha256)
|
||||
raw, s, err = c.downloadSigned(role, size, expectedHashes)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
package client
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto/sha256"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
|
@ -250,12 +251,14 @@ func TestChecksumMismatch(t *testing.T) {
|
|||
|
||||
sampleTargets := data.NewTargets()
|
||||
orig, err := json.Marshal(sampleTargets)
|
||||
origSha256 := sha256.Sum256(orig)
|
||||
assert.NoError(t, err)
|
||||
|
||||
origHashes, err := GetSupportedHashes(orig)
|
||||
assert.NoError(t, err)
|
||||
|
||||
remoteStorage.SetMeta("targets", orig)
|
||||
|
||||
_, _, err = client.downloadSigned("targets", int64(len(orig)), origSha256[:])
|
||||
_, _, err = client.downloadSigned("targets", int64(len(orig)), origHashes)
|
||||
assert.IsType(t, ErrChecksumMismatch{}, err)
|
||||
}
|
||||
|
||||
|
@ -267,12 +270,14 @@ func TestChecksumMatch(t *testing.T) {
|
|||
|
||||
sampleTargets := data.NewTargets()
|
||||
orig, err := json.Marshal(sampleTargets)
|
||||
origSha256 := sha256.Sum256(orig)
|
||||
assert.NoError(t, err)
|
||||
|
||||
origHashes, err := GetSupportedHashes(orig)
|
||||
assert.NoError(t, err)
|
||||
|
||||
remoteStorage.SetMeta("targets", orig)
|
||||
|
||||
_, _, err = client.downloadSigned("targets", int64(len(orig)), origSha256[:])
|
||||
_, _, err = client.downloadSigned("targets", int64(len(orig)), origHashes)
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
|
||||
|
@ -284,13 +289,15 @@ func TestSizeMismatchLong(t *testing.T) {
|
|||
|
||||
sampleTargets := data.NewTargets()
|
||||
orig, err := json.Marshal(sampleTargets)
|
||||
origSha256 := sha256.Sum256(orig)
|
||||
assert.NoError(t, err)
|
||||
l := int64(len(orig))
|
||||
|
||||
origHashes, err := GetSupportedHashes(orig)
|
||||
assert.NoError(t, err)
|
||||
|
||||
remoteStorage.SetMeta("targets", orig)
|
||||
|
||||
_, _, err = client.downloadSigned("targets", l, origSha256[:])
|
||||
_, _, err = client.downloadSigned("targets", l, origHashes)
|
||||
// size just limits the data received, the error is caught
|
||||
// either during checksum verification or during json deserialization
|
||||
assert.IsType(t, ErrChecksumMismatch{}, err)
|
||||
|
@ -304,13 +311,15 @@ func TestSizeMismatchShort(t *testing.T) {
|
|||
|
||||
sampleTargets := data.NewTargets()
|
||||
orig, err := json.Marshal(sampleTargets)
|
||||
origSha256 := sha256.Sum256(orig)
|
||||
assert.NoError(t, err)
|
||||
l := int64(len(orig))
|
||||
|
||||
origHashes, err := GetSupportedHashes(orig)
|
||||
assert.NoError(t, err)
|
||||
|
||||
remoteStorage.SetMeta("targets", orig)
|
||||
|
||||
_, _, err = client.downloadSigned("targets", l, origSha256[:])
|
||||
_, _, err = client.downloadSigned("targets", l, origHashes)
|
||||
// size just limits the data received, the error is caught
|
||||
// either during checksum verification or during json deserialization
|
||||
assert.IsType(t, ErrChecksumMismatch{}, err)
|
||||
|
@ -512,6 +521,7 @@ func TestDownloadTargetsNoChecksum(t *testing.T) {
|
|||
assert.NoError(t, err)
|
||||
|
||||
delete(repo.Snapshot.Signed.Meta["targets"].Hashes, "sha256")
|
||||
delete(repo.Snapshot.Signed.Meta["targets"].Hashes, "sha512")
|
||||
|
||||
err = client.downloadTargets("targets")
|
||||
assert.IsType(t, data.ErrMissingMeta{}, err)
|
||||
|
@ -721,8 +731,6 @@ func TestDownloadSnapshotLarge(t *testing.T) {
|
|||
assert.NoError(t, err)
|
||||
}
|
||||
|
||||
// TestDownloadSnapshotNoChecksum: It should never be valid to download a
|
||||
// snapshot if we don't have a checksum
|
||||
func TestDownloadSnapshotNoTimestamp(t *testing.T) {
|
||||
repo, _, err := testutils.EmptyRepo("docker.com/notary")
|
||||
assert.NoError(t, err)
|
||||
|
@ -744,6 +752,8 @@ func TestDownloadSnapshotNoTimestamp(t *testing.T) {
|
|||
assert.IsType(t, tuf.ErrNotLoaded{}, err)
|
||||
}
|
||||
|
||||
// TestDownloadSnapshotNoChecksum: It should never be valid to download a
|
||||
// snapshot if we don't have a checksum
|
||||
func TestDownloadSnapshotNoChecksum(t *testing.T) {
|
||||
repo, _, err := testutils.EmptyRepo("docker.com/notary")
|
||||
assert.NoError(t, err)
|
||||
|
@ -760,6 +770,7 @@ func TestDownloadSnapshotNoChecksum(t *testing.T) {
|
|||
assert.NoError(t, err)
|
||||
|
||||
delete(repo.Timestamp.Signed.Meta["snapshot"].Hashes, "sha256")
|
||||
delete(repo.Timestamp.Signed.Meta["snapshot"].Hashes, "sha512")
|
||||
|
||||
err = client.downloadSnapshot()
|
||||
assert.IsType(t, data.ErrMissingMeta{}, err)
|
||||
|
@ -882,3 +893,15 @@ func TestDownloadTimestampLocalTimestampInvalidRemoteTimestamp(t *testing.T) {
|
|||
err = client.downloadTimestamp()
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
|
||||
// GetSupportedHashes is a helper function that returns
|
||||
// the checksums of all the supported hash algorithms
|
||||
// of the given payload.
|
||||
func GetSupportedHashes(payload []byte) (data.Hashes, error) {
|
||||
meta, err := data.NewFileMeta(bytes.NewReader(payload), data.NotaryDefaultHashes...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return meta.Hashes, nil
|
||||
}
|
||||
|
|
|
@ -2,12 +2,12 @@ package data
|
|||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto/sha256"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/Sirupsen/logrus"
|
||||
"github.com/docker/go/canonical/json"
|
||||
"github.com/docker/notary"
|
||||
)
|
||||
|
||||
// SignedSnapshot is a fully unpacked snapshot.json
|
||||
|
@ -39,10 +39,18 @@ func isValidSnapshotStructure(s Snapshot) error {
|
|||
// Meta is a map of FileMeta, so if the role isn't in the map it returns
|
||||
// an empty FileMeta, which has an empty map, and you can check on keys
|
||||
// from an empty map.
|
||||
if checksum, ok := s.Meta[role].Hashes["sha256"]; !ok || len(checksum) != sha256.Size {
|
||||
//
|
||||
// For now sha256 is required and sha512 is not.
|
||||
if _, ok := s.Meta[role].Hashes[notary.SHA256]; !ok {
|
||||
return ErrInvalidMetadata{
|
||||
role: CanonicalSnapshotRole,
|
||||
msg: fmt.Sprintf("missing or invalid %s sha256 checksum information", role),
|
||||
msg: fmt.Sprintf("missing %s sha256 checksum information", role),
|
||||
}
|
||||
}
|
||||
if err := CheckValidHashStructures(s.Meta[role].Hashes); err != nil {
|
||||
return ErrInvalidMetadata{
|
||||
role: CanonicalSnapshotRole,
|
||||
msg: fmt.Sprintf("invalid %s checksum information, %v", role, err),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -63,11 +71,11 @@ func NewSnapshot(root *Signed, targets *Signed) (*SignedSnapshot, error) {
|
|||
logrus.Debug("Error Marshalling Root")
|
||||
return nil, err
|
||||
}
|
||||
rootMeta, err := NewFileMeta(bytes.NewReader(rootJSON), "sha256")
|
||||
rootMeta, err := NewFileMeta(bytes.NewReader(rootJSON), NotaryDefaultHashes...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
targetsMeta, err := NewFileMeta(bytes.NewReader(targetsJSON), "sha256")
|
||||
targetsMeta, err := NewFileMeta(bytes.NewReader(targetsJSON), NotaryDefaultHashes...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
|
@ -2,11 +2,11 @@ package data
|
|||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto/sha256"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/docker/go/canonical/json"
|
||||
"github.com/docker/notary"
|
||||
)
|
||||
|
||||
// SignedTimestamp is a fully unpacked timestamp.json
|
||||
|
@ -37,10 +37,17 @@ func isValidTimestampStructure(t Timestamp) error {
|
|||
// Meta is a map of FileMeta, so if the role isn't in the map it returns
|
||||
// an empty FileMeta, which has an empty map, and you can check on keys
|
||||
// from an empty map.
|
||||
if cs, ok := t.Meta[CanonicalSnapshotRole].Hashes["sha256"]; !ok || len(cs) != sha256.Size {
|
||||
//
|
||||
// For now sha256 is required and sha512 is not.
|
||||
if _, ok := t.Meta[CanonicalSnapshotRole].Hashes[notary.SHA256]; !ok {
|
||||
return ErrInvalidMetadata{
|
||||
role: CanonicalTimestampRole, msg: "missing or invalid snapshot sha256 checksum information"}
|
||||
role: CanonicalTimestampRole, msg: "missing snapshot sha256 checksum information"}
|
||||
}
|
||||
if err := CheckValidHashStructures(t.Meta[CanonicalSnapshotRole].Hashes); err != nil {
|
||||
return ErrInvalidMetadata{
|
||||
role: CanonicalTimestampRole, msg: fmt.Sprintf("invalid snapshot checksum information, %v", err)}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
|
@ -50,7 +57,7 @@ func NewTimestamp(snapshot *Signed) (*SignedTimestamp, error) {
|
|||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
snapshotMeta, err := NewFileMeta(bytes.NewReader(snapshotJSON), "sha256")
|
||||
snapshotMeta, err := NewFileMeta(bytes.NewReader(snapshotJSON), NotaryDefaultHashes...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
|
@ -3,6 +3,7 @@ package data
|
|||
import (
|
||||
"crypto/sha256"
|
||||
"crypto/sha512"
|
||||
"crypto/subtle"
|
||||
"fmt"
|
||||
"hash"
|
||||
"io"
|
||||
|
@ -119,6 +120,9 @@ type Files map[string]FileMeta
|
|||
// and target file
|
||||
type Hashes map[string][]byte
|
||||
|
||||
// NotaryDefaultHashes contains the default supported hash algorithms.
|
||||
var NotaryDefaultHashes = []string{notary.SHA256, notary.SHA512}
|
||||
|
||||
// FileMeta contains the size and hashes for a metadata or target file. Custom
|
||||
// data can be optionally added.
|
||||
type FileMeta struct {
|
||||
|
@ -127,6 +131,62 @@ type FileMeta struct {
|
|||
Custom json.RawMessage `json:"custom,omitempty"`
|
||||
}
|
||||
|
||||
// CheckHashes verifies all the checksums specified by the "hashes" of the payload.
|
||||
func CheckHashes(payload []byte, hashes Hashes) error {
|
||||
cnt := 0
|
||||
|
||||
// k, v indicate the hash algorithm and the corresponding value
|
||||
for k, v := range hashes {
|
||||
switch k {
|
||||
case notary.SHA256:
|
||||
checksum := sha256.Sum256(payload)
|
||||
if subtle.ConstantTimeCompare(checksum[:], v) == 0 {
|
||||
return fmt.Errorf("%s checksum mismatched", k)
|
||||
}
|
||||
cnt++
|
||||
case notary.SHA512:
|
||||
checksum := sha512.Sum512(payload)
|
||||
if subtle.ConstantTimeCompare(checksum[:], v) == 0 {
|
||||
return fmt.Errorf("%s checksum mismatched", k)
|
||||
}
|
||||
cnt++
|
||||
}
|
||||
}
|
||||
|
||||
if cnt == 0 {
|
||||
return fmt.Errorf("at least one supported hash needed")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// CheckValidHashStructures returns an error, or nil, depending on whether
|
||||
// the content of the hashes is valid or not.
|
||||
func CheckValidHashStructures(hashes Hashes) error {
|
||||
cnt := 0
|
||||
|
||||
for k, v := range hashes {
|
||||
switch k {
|
||||
case notary.SHA256:
|
||||
if len(v) != sha256.Size {
|
||||
return fmt.Errorf("invalid %s checksum", notary.SHA256)
|
||||
}
|
||||
cnt++
|
||||
case notary.SHA512:
|
||||
if len(v) != sha512.Size {
|
||||
return fmt.Errorf("invalid %s checksum", notary.SHA512)
|
||||
}
|
||||
cnt++
|
||||
}
|
||||
}
|
||||
|
||||
if cnt == 0 {
|
||||
return fmt.Errorf("at least one supported hash needed")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// NewFileMeta generates a FileMeta object from the reader, using the
|
||||
// hash algorithms provided
|
||||
func NewFileMeta(r io.Reader, hashAlgorithms ...string) (FileMeta, error) {
|
||||
|
@ -137,12 +197,12 @@ func NewFileMeta(r io.Reader, hashAlgorithms ...string) (FileMeta, error) {
|
|||
for _, hashAlgorithm := range hashAlgorithms {
|
||||
var h hash.Hash
|
||||
switch hashAlgorithm {
|
||||
case "sha256":
|
||||
case notary.SHA256:
|
||||
h = sha256.New()
|
||||
case "sha512":
|
||||
case notary.SHA512:
|
||||
h = sha512.New()
|
||||
default:
|
||||
return FileMeta{}, fmt.Errorf("Unknown Hash Algorithm: %s", hashAlgorithm)
|
||||
return FileMeta{}, fmt.Errorf("Unknown hash algorithm: %s", hashAlgorithm)
|
||||
}
|
||||
hashes[hashAlgorithm] = h
|
||||
r = io.TeeReader(r, h)
|
||||
|
|
|
@ -53,3 +53,111 @@ func TestSignatureUnmarshalJSON(t *testing.T) {
|
|||
// Check that the method string is lowercased
|
||||
assert.Equal(t, sig.Method.String(), "rsa")
|
||||
}
|
||||
|
||||
func TestCheckHashes(t *testing.T) {
|
||||
var err error
|
||||
raw := []byte("Bumblebee")
|
||||
|
||||
// Since only provide an un-supported hash algorithm here,
|
||||
// it should be considered as fail.
|
||||
unSupported := make(Hashes)
|
||||
unSupported["Arthas"] = []byte("is past away.")
|
||||
err = CheckHashes(raw, unSupported)
|
||||
assert.Error(t, err)
|
||||
assert.Contains(t, err.Error(), "at least one supported hash needed")
|
||||
|
||||
// Expected to fail since there is no checksum at all.
|
||||
hashes := make(Hashes)
|
||||
err = CheckHashes(raw, hashes)
|
||||
assert.Error(t, err)
|
||||
assert.Contains(t, err.Error(), "at least one supported hash needed")
|
||||
|
||||
// The most standard one.
|
||||
hashes["sha256"], err = hex.DecodeString("d13e2b60d74c2e6f4f449b5e536814edf9a4827f5a9f4f957fc92e77609b9c92")
|
||||
assert.NoError(t, err)
|
||||
hashes["sha512"], err = hex.DecodeString("f2330f50d0f3ee56cf0d7f66aad8205e0cb9972c323208ffaa914ef7b3c240ae4774b5bbd1db2ce226ee967cfa9058173a853944f9b44e2e08abca385e2b7ed4")
|
||||
assert.NoError(t, err)
|
||||
err = CheckHashes(raw, hashes)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Expected as success since there are already supported hash here,
|
||||
// just ignore the unsupported one.
|
||||
hashes["Saar"] = []byte("survives again in CTM.")
|
||||
err = CheckHashes(raw, hashes)
|
||||
assert.NoError(t, err)
|
||||
|
||||
only256 := make(Hashes)
|
||||
only256["sha256"], err = hex.DecodeString("d13e2b60d74c2e6f4f449b5e536814edf9a4827f5a9f4f957fc92e77609b9c92")
|
||||
assert.NoError(t, err)
|
||||
err = CheckHashes(raw, only256)
|
||||
assert.NoError(t, err)
|
||||
|
||||
only512 := make(Hashes)
|
||||
only512["sha512"], err = hex.DecodeString("f2330f50d0f3ee56cf0d7f66aad8205e0cb9972c323208ffaa914ef7b3c240ae4774b5bbd1db2ce226ee967cfa9058173a853944f9b44e2e08abca385e2b7ed4")
|
||||
assert.NoError(t, err)
|
||||
err = CheckHashes(raw, only512)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Expected to fail due to the failure of sha256
|
||||
malicious256 := make(Hashes)
|
||||
malicious256["sha256"] = []byte("malicious data")
|
||||
err = CheckHashes(raw, malicious256)
|
||||
assert.Error(t, err)
|
||||
assert.Contains(t, err.Error(), "checksum mismatched")
|
||||
|
||||
// Expected to fail due to the failure of sha512
|
||||
malicious512 := make(Hashes)
|
||||
malicious512["sha512"] = []byte("malicious data")
|
||||
err = CheckHashes(raw, malicious512)
|
||||
assert.Error(t, err)
|
||||
assert.Contains(t, err.Error(), "checksum mismatched")
|
||||
|
||||
// Expected to fail because of the failure of sha512
|
||||
// even though the sha256 is OK.
|
||||
doubleFace := make(Hashes)
|
||||
doubleFace["sha256"], err = hex.DecodeString("d13e2b60d74c2e6f4f449b5e536814edf9a4827f5a9f4f957fc92e77609b9c92")
|
||||
assert.NoError(t, err)
|
||||
doubleFace["sha512"], err = hex.DecodeString("d13e2b60d74c2e6f4f449b5e536814edf9a4827f5a9f4f957fc92e77609b9c92")
|
||||
assert.NoError(t, err)
|
||||
err = CheckHashes(raw, doubleFace)
|
||||
assert.Error(t, err)
|
||||
assert.Contains(t, err.Error(), "checksum mismatched")
|
||||
}
|
||||
|
||||
func TestCheckValidHashStructures(t *testing.T) {
|
||||
var err error
|
||||
hashes := make(Hashes)
|
||||
|
||||
// Expected to fail since there is no checksum at all.
|
||||
err = CheckValidHashStructures(hashes)
|
||||
assert.Error(t, err)
|
||||
assert.Contains(t, err.Error(), "at least one supported hash needed")
|
||||
|
||||
// Expected to fail even though the checksum of sha384 is valid,
|
||||
// because we haven't provided a supported hash algorithm yet (ex: sha256).
|
||||
hashes["sha384"], err = hex.DecodeString("64becc3c23843942b1040ffd4743d1368d988ddf046d17d448a6e199c02c3044b425a680112b399d4dbe9b35b7ccc989")
|
||||
err = CheckValidHashStructures(hashes)
|
||||
assert.Error(t, err)
|
||||
assert.Contains(t, err.Error(), "at least one supported hash needed")
|
||||
|
||||
hashes["sha256"], err = hex.DecodeString("766af0ef090a4f2307e49160fa242db6fb95f071ad81a198eeb7d770e61cd6d8")
|
||||
assert.NoError(t, err)
|
||||
err = CheckValidHashStructures(hashes)
|
||||
assert.NoError(t, err)
|
||||
|
||||
hashes["sha512"], err = hex.DecodeString("795d9e95db099464b6730844f28effddb010b0d5abae5d5892a6ee04deacb09c9e622f89e816458b5a1a81761278d7d3a6a7c269d9707eff8858b16c51de0315")
|
||||
assert.NoError(t, err)
|
||||
err = CheckValidHashStructures(hashes)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Also should be succeed since only check the length of the checksum.
|
||||
hashes["sha256"], err = hex.DecodeString("01234567890a4f2307e49160fa242db6fb95f071ad81a198eeb7d770e61cd6d8")
|
||||
err = CheckValidHashStructures(hashes)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Should failed since the first '0' is missing.
|
||||
hashes["sha256"], err = hex.DecodeString("1234567890a4f2307e49160fa242db6fb95f071ad81a198eeb7d770e61cd6d8")
|
||||
err = CheckValidHashStructures(hashes)
|
||||
assert.Error(t, err)
|
||||
assert.Contains(t, err.Error(), "invalid")
|
||||
}
|
||||
|
|
|
@ -530,7 +530,7 @@ func (m *MetadataSwizzler) UpdateSnapshotHashes(roles ...string) error {
|
|||
return err
|
||||
}
|
||||
|
||||
meta, err := data.NewFileMeta(bytes.NewReader(metaBytes), "sha256")
|
||||
meta, err := data.NewFileMeta(bytes.NewReader(metaBytes), data.NotaryDefaultHashes...)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -575,7 +575,7 @@ func (m *MetadataSwizzler) UpdateTimestampHash() error {
|
|||
return err
|
||||
}
|
||||
|
||||
snapshotMeta, err := data.NewFileMeta(bytes.NewReader(metaBytes), "sha256")
|
||||
snapshotMeta, err := data.NewFileMeta(bytes.NewReader(metaBytes), data.NotaryDefaultHashes...)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
|
|
@ -760,7 +760,7 @@ func (tr *Repo) UpdateSnapshot(role string, s *data.Signed) error {
|
|||
if err != nil {
|
||||
return err
|
||||
}
|
||||
meta, err := data.NewFileMeta(bytes.NewReader(jsonData), "sha256")
|
||||
meta, err := data.NewFileMeta(bytes.NewReader(jsonData), data.NotaryDefaultHashes...)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -775,7 +775,7 @@ func (tr *Repo) UpdateTimestamp(s *data.Signed) error {
|
|||
if err != nil {
|
||||
return err
|
||||
}
|
||||
meta, err := data.NewFileMeta(bytes.NewReader(jsonData), "sha256")
|
||||
meta, err := data.NewFileMeta(bytes.NewReader(jsonData), data.NotaryDefaultHashes...)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue