client side of consistent downloads

Signed-off-by: David Lawrence <david.lawrence@docker.com> (github: endophage)
This commit is contained in:
David Lawrence 2016-01-28 16:20:02 -08:00
parent 3eac9a8185
commit 637a2331d4
9 changed files with 195 additions and 94 deletions

View File

@ -9,6 +9,7 @@ import (
"net/url"
"os"
"path/filepath"
"strings"
"time"
"github.com/Sirupsen/logrus"
@ -885,7 +886,10 @@ func (r *NotaryRepository) Update(forWrite bool) (*tufclient.Client, error) {
}
err = c.Update()
if err != nil {
if notFound, ok := err.(store.ErrMetaNotFound); ok && notFound.Resource == data.CanonicalRootRole {
// notFound.Resource may include a checksum so when the role is root,
// it will be root.json or root.<checksum>.json. Therefore best we can
// do it match a "root." prefix
if notFound, ok := err.(store.ErrMetaNotFound); ok && strings.HasPrefix(notFound.Resource, data.CanonicalRootRole+".") {
return nil, r.errRepositoryNotExist()
}
return nil, err

View File

@ -3,6 +3,8 @@ package client
import (
"bytes"
"crypto/rand"
"crypto/sha256"
"encoding/hex"
regJson "encoding/json"
"fmt"
"io/ioutil"
@ -1062,41 +1064,76 @@ func fakeServerData(t *testing.T, repo *NotaryRepository, mux *http.ServeMux,
data.DefaultExpires("timestamp"))
assert.NoError(t, err)
timestampJSON, _ := json.Marshal(signedTimestamp)
snapshotJSON, _ := json.Marshal(signedSnapshot)
targetsJSON, _ := json.Marshal(signedTargets)
level1JSON, _ := json.Marshal(signedLevel1)
level2JSON, _ := json.Marshal(signedLevel2)
cksmBytes := sha256.Sum256(rootFileBytes)
rootChecksum := hex.EncodeToString(cksmBytes[:])
cksmBytes = sha256.Sum256(snapshotJSON)
snapshotChecksum := hex.EncodeToString(cksmBytes[:])
cksmBytes = sha256.Sum256(targetsJSON)
targetsChecksum := hex.EncodeToString(cksmBytes[:])
cksmBytes = sha256.Sum256(level1JSON)
level1Checksum := hex.EncodeToString(cksmBytes[:])
cksmBytes = sha256.Sum256(level2JSON)
level2Checksum := hex.EncodeToString(cksmBytes[:])
mux.HandleFunc("/v2/docker.com/notary/_trust/tuf/root.json",
func(w http.ResponseWriter, r *http.Request) {
assert.NoError(t, err)
fmt.Fprint(w, string(rootFileBytes))
})
mux.HandleFunc("/v2/docker.com/notary/_trust/tuf/root."+rootChecksum+".json",
func(w http.ResponseWriter, r *http.Request) {
assert.NoError(t, err)
fmt.Fprint(w, string(rootFileBytes))
})
mux.HandleFunc("/v2/docker.com/notary/_trust/tuf/timestamp.json",
func(w http.ResponseWriter, r *http.Request) {
timestampJSON, _ := json.Marshal(signedTimestamp)
fmt.Fprint(w, string(timestampJSON))
})
mux.HandleFunc("/v2/docker.com/notary/_trust/tuf/snapshot.json",
func(w http.ResponseWriter, r *http.Request) {
snapshotJSON, _ := json.Marshal(signedSnapshot)
fmt.Fprint(w, string(snapshotJSON))
})
mux.HandleFunc("/v2/docker.com/notary/_trust/tuf/snapshot."+snapshotChecksum+".json",
func(w http.ResponseWriter, r *http.Request) {
fmt.Fprint(w, string(snapshotJSON))
})
mux.HandleFunc("/v2/docker.com/notary/_trust/tuf/targets.json",
func(w http.ResponseWriter, r *http.Request) {
targetsJSON, _ := json.Marshal(signedTargets)
fmt.Fprint(w, string(targetsJSON))
})
mux.HandleFunc("/v2/docker.com/notary/_trust/tuf/targets."+targetsChecksum+".json",
func(w http.ResponseWriter, r *http.Request) {
fmt.Fprint(w, string(targetsJSON))
})
mux.HandleFunc("/v2/docker.com/notary/_trust/tuf/targets/level1.json",
func(w http.ResponseWriter, r *http.Request) {
level1JSON, err := json.Marshal(signedLevel1)
assert.NoError(t, err)
fmt.Fprint(w, string(level1JSON))
})
mux.HandleFunc("/v2/docker.com/notary/_trust/tuf/targets/level1."+level1Checksum+".json",
func(w http.ResponseWriter, r *http.Request) {
fmt.Fprint(w, string(level1JSON))
})
mux.HandleFunc("/v2/docker.com/notary/_trust/tuf/targets/level2.json",
func(w http.ResponseWriter, r *http.Request) {
level2JSON, err := json.Marshal(signedLevel2)
assert.NoError(t, err)
fmt.Fprint(w, string(level2JSON))
})
mux.HandleFunc("/v2/docker.com/notary/_trust/tuf/targets/level2."+level2Checksum+".json",
func(w http.ResponseWriter, r *http.Request) {
fmt.Fprint(w, string(level2JSON))
})
}

View File

@ -9,6 +9,7 @@ import (
"net/http/httptest"
"os"
"reflect"
"strings"
"testing"
"time"
@ -61,17 +62,18 @@ func bumpVersions(t *testing.T, s *testutils.MetadataSwizzler, offset int) {
// create a server that just serves static metadata files from a metaStore
func readOnlyServer(t *testing.T, cache store.MetadataStore, notFoundStatus int) *httptest.Server {
m := mux.NewRouter()
m.HandleFunc("/v2/docker.com/notary/_trust/tuf/{role:.*}.json",
func(w http.ResponseWriter, r *http.Request) {
vars := mux.Vars(r)
metaBytes, err := cache.GetMeta(vars["role"], -1)
if _, ok := err.(store.ErrMetaNotFound); ok {
w.WriteHeader(notFoundStatus)
} else {
require.NoError(t, err)
w.Write(metaBytes)
}
})
handler := func(w http.ResponseWriter, r *http.Request) {
vars := mux.Vars(r)
metaBytes, err := cache.GetMeta(vars["role"], -1)
if _, ok := err.(store.ErrMetaNotFound); ok {
w.WriteHeader(notFoundStatus)
} else {
require.NoError(t, err)
w.Write(metaBytes)
}
}
m.HandleFunc("/v2/docker.com/notary/_trust/tuf/{role:.*}.{checksum:.*}.json", handler)
m.HandleFunc("/v2/docker.com/notary/_trust/tuf/{role:.*}.json", handler)
return httptest.NewServer(m)
}
@ -632,7 +634,7 @@ func testUpdateRemoteNon200Error(t *testing.T, opts updateOpts, errExpected inte
require.IsType(t, errExpected, err, "wrong update error when %s is %v (forWrite: %v)",
opts.role, opts.notFoundCode, opts.forWrite)
if notFound, ok := err.(store.ErrMetaNotFound); ok {
require.Equal(t, opts.role, notFound.Resource, "wrong resource missing (forWrite: %v)", opts.forWrite)
require.True(t, strings.HasPrefix(notFound.Resource, opts.role), "wrong resource missing (forWrite: %v)", opts.forWrite)
}
}
}

View File

@ -89,14 +89,14 @@ func RootHandler(ac auth.AccessController, ctx context.Context, trust signed.Cry
prometheus.InstrumentHandlerWithOpts(
prometheusOpts("UpdateTuf"),
hand(handlers.AtomicUpdateHandler, "push", "pull")))
r.Methods("GET").Path("/v2/{imageName:.*}/_trust/tuf/{tufRole:root|targets(?:/[^/\\s]+)*|snapshot|timestamp}.json").Handler(
prometheus.InstrumentHandlerWithOpts(
prometheusOpts("GetRole"),
hand(handlers.GetHandler, "pull")))
r.Methods("GET").Path("/v2/{imageName:.*}/_trust/tuf/{tufRole:root|targets(?:/[^/\\s]+)*|snapshot|timestamp}.{checksum:[a-fA-F0-9]{64}|[a-fA-F0-9]{96}|[a-fA-F0-9]{128}}.json").Handler(
prometheus.InstrumentHandlerWithOpts(
prometheusOpts("GetRoleByHash"),
hand(handlers.GetHandler, "pull")))
r.Methods("GET").Path("/v2/{imageName:.*}/_trust/tuf/{tufRole:root|targets(?:/[^/\\s]+)*|snapshot|timestamp}.json").Handler(
prometheus.InstrumentHandlerWithOpts(
prometheusOpts("GetRole"),
hand(handlers.GetHandler, "pull")))
r.Methods("GET").Path(
"/v2/{imageName:.*}/_trust/tuf/{tufRole:snapshot|timestamp}.key").Handler(
prometheus.InstrumentHandlerWithOpts(

View File

@ -7,8 +7,6 @@ import (
"encoding/json"
"fmt"
"io"
"path"
"strings"
"github.com/Sirupsen/logrus"
"github.com/docker/notary"
@ -179,7 +177,8 @@ func (c *Client) downloadRoot() error {
var s *data.Signed
var raw []byte
if download {
raw, s, err = c.downloadSigned(role, size, expectedSha256)
// use consistent download if we have the checksum.
raw, s, err = c.downloadSigned(role, size, expectedSha256, len(expectedSha256) > 0)
if err != nil {
return err
}
@ -267,7 +266,7 @@ func (c *Client) downloadTimestamp() error {
}
// unlike root, targets and snapshot, always try and download timestamps
// from remote, only using the cache one if we couldn't reach remote.
raw, s, err := c.downloadSigned(role, notary.MaxTimestampSize, nil)
raw, s, err := c.downloadSigned(role, notary.MaxTimestampSize, nil, false)
if err == nil {
ts, err = c.verifyTimestamp(s, version, c.keysDB)
if err == nil {
@ -344,7 +343,7 @@ func (c *Client) downloadSnapshot() error {
}
var s *data.Signed
if download {
raw, s, err = c.downloadSigned(role, size, expectedSha256)
raw, s, err = c.downloadSigned(role, size, expectedSha256, true)
if err != nil {
return err
}
@ -421,8 +420,9 @@ func (c *Client) downloadTargets(role string) error {
return nil
}
func (c *Client) downloadSigned(role string, size int64, expectedSha256 []byte) ([]byte, *data.Signed, error) {
raw, err := c.remote.GetMeta(role, size)
func (c *Client) downloadSigned(role string, size int64, expectedSha256 []byte, consistent bool) ([]byte, *data.Signed, error) {
rolePath := utils.URLFilePath(role, expectedSha256, consistent)
raw, err := c.remote.GetMeta(rolePath, size)
if err != nil {
return nil, nil, err
}
@ -481,11 +481,7 @@ func (c Client) getTargetsFile(role string, keyIDs []string, snapshotMeta data.F
size := snapshotMeta[role].Length
var s *data.Signed
if download {
rolePath, err := c.RoleTargetsPath(role, hex.EncodeToString(expectedSha256), consistent)
if err != nil {
return nil, err
}
raw, s, err = c.downloadSigned(rolePath, size, expectedSha256)
raw, s, err = c.downloadSigned(role, size, expectedSha256, true)
if err != nil {
return nil, err
}
@ -509,24 +505,6 @@ func (c Client) getTargetsFile(role string, keyIDs []string, snapshotMeta data.F
return s, nil
}
// RoleTargetsPath generates the appropriate HTTP URL for the targets file,
// based on whether the repo is marked as consistent.
func (c Client) RoleTargetsPath(role string, hashSha256 string, consistent bool) (string, error) {
if consistent {
// Use path instead of filepath since we refer to the TUF role directly instead of its target files
dir := path.Dir(role)
if strings.Contains(role, "/") {
lastSlashIdx := strings.LastIndex(role, "/")
role = role[lastSlashIdx+1:]
}
role = path.Join(
dir,
fmt.Sprintf("%s.%s.json", hashSha256, role),
)
}
return role, nil
}
// TargetMeta ensures the repo is up to date. It assumes downloadTargets
// has already downloaded all delegated roles
func (c Client) TargetMeta(role, path string, excludeRoles ...string) (*data.FileMeta, string) {

View File

@ -235,18 +235,17 @@ func TestCheckRootExpired(t *testing.T) {
func TestChecksumMismatch(t *testing.T) {
repo := tuf.NewRepo(nil, nil)
localStorage := store.NewMemoryStore(nil, nil)
remoteStorage := store.NewMemoryStore(nil, nil)
remoteStorage := testutils.NewCorruptingMemoryStore(nil, nil)
client := NewClient(repo, remoteStorage, nil, localStorage)
sampleTargets := data.NewTargets()
orig, err := json.Marshal(sampleTargets)
origSha256 := sha256.Sum256(orig)
orig[0] = '}' // corrupt data, should be a {
assert.NoError(t, err)
remoteStorage.SetMeta("targets", orig)
_, _, err = client.downloadSigned("targets", int64(len(orig)), origSha256[:])
_, _, err = client.downloadSigned("targets", int64(len(orig)), origSha256[:], false)
assert.IsType(t, ErrChecksumMismatch{}, err)
}
@ -263,7 +262,7 @@ func TestChecksumMatch(t *testing.T) {
remoteStorage.SetMeta("targets", orig)
_, _, err = client.downloadSigned("targets", int64(len(orig)), origSha256[:])
_, _, err = client.downloadSigned("targets", int64(len(orig)), origSha256[:], false)
assert.NoError(t, err)
}
@ -284,7 +283,7 @@ func TestSizeMismatchLong(t *testing.T) {
remoteStorage.SetMeta("targets", orig)
_, _, err = client.downloadSigned("targets", l, origSha256[:])
_, _, err = client.downloadSigned("targets", l, origSha256[:], false)
// size just limits the data received, the error is caught
// either during checksum verification or during json deserialization
assert.IsType(t, ErrChecksumMismatch{}, err)
@ -306,7 +305,7 @@ func TestSizeMismatchShort(t *testing.T) {
remoteStorage.SetMeta("targets", orig)
_, _, err = client.downloadSigned("targets", l, origSha256[:])
_, _, err = client.downloadSigned("targets", l, origSha256[:], false)
// size just limits the data received, the error is caught
// either during checksum verification or during json deserialization
assert.IsType(t, ErrChecksumMismatch{}, err)
@ -457,7 +456,7 @@ func TestDownloadTargetChecksumMismatch(t *testing.T) {
kdb, repo, _, err := testutils.EmptyRepo("docker.com/notary")
assert.NoError(t, err)
localStorage := store.NewMemoryStore(nil, nil)
remoteStorage := store.NewMemoryStore(nil, nil)
remoteStorage := testutils.NewCorruptingMemoryStore(nil, nil)
client := NewClient(repo, remoteStorage, kdb, localStorage)
// create and "upload" sample targets
@ -466,13 +465,10 @@ func TestDownloadTargetChecksumMismatch(t *testing.T) {
orig, err := json.Marshal(signedOrig)
assert.NoError(t, err)
origSha256 := sha256.Sum256(orig)
orig[0] = '}' // corrupt data, should be a {
err = remoteStorage.SetMeta("targets", orig)
assert.NoError(t, err)
// create local snapshot with targets file
// It's necessary to do it this way rather than calling repo.SignSnapshot
// so that we have the wrong sha256 in the snapshot.
snap := data.SignedSnapshot{
Signed: data.Snapshot{
Meta: data.Files{
@ -583,28 +579,52 @@ func TestUpdateDownloadRootHappy(t *testing.T) {
}
func TestUpdateDownloadRootBadChecksum(t *testing.T) {
remoteStore := testutils.NewCorruptingMemoryStore(nil, nil)
kdb, repo, _, err := testutils.EmptyRepo("docker.com/notary")
assert.NoError(t, err)
localStorage := store.NewMemoryStore(nil, nil)
remoteStorage := store.NewMemoryStore(nil, nil)
client := NewClient(repo, remoteStorage, kdb, localStorage)
client := NewClient(repo, remoteStore, kdb, localStorage)
// sign snapshot to make sure we have a checksum for root
_, err = repo.SignSnapshot(data.DefaultExpires("snapshot"))
assert.NoError(t, err)
// create and "upload" sample root, snapshot, and timestamp
// sign and "upload" sample root
signedOrig, err := repo.SignRoot(data.DefaultExpires("root"))
assert.NoError(t, err)
orig, err := json.Marshal(signedOrig)
assert.NoError(t, err)
err = remoteStorage.SetMeta("root", orig)
err = remoteStore.SetMeta("root", orig)
assert.NoError(t, err)
// sign snapshot to make sure we have current checksum for root
_, err = repo.SignSnapshot(data.DefaultExpires("snapshot"))
assert.NoError(t, err)
err = client.downloadRoot()
assert.IsType(t, ErrChecksumMismatch{}, err)
}
func TestUpdateDownloadRootChecksumNotFound(t *testing.T) {
remoteStore := store.NewMemoryStore(nil, nil)
kdb, repo, _, err := testutils.EmptyRepo("docker.com/notary")
assert.NoError(t, err)
localStorage := store.NewMemoryStore(nil, nil)
client := NewClient(repo, remoteStore, kdb, localStorage)
// sign snapshot to make sure we have current checksum for root
_, err = repo.SignSnapshot(data.DefaultExpires("snapshot"))
assert.NoError(t, err)
// sign and "upload" sample root
signedOrig, err := repo.SignRoot(data.DefaultExpires("root"))
assert.NoError(t, err)
orig, err := json.Marshal(signedOrig)
assert.NoError(t, err)
err = remoteStore.SetMeta("root", orig)
assert.NoError(t, err)
// don't sign snapshot again to ensure checksum is out of date (bad)
err = client.downloadRoot()
assert.IsType(t, ErrChecksumMismatch{}, err)
assert.IsType(t, store.ErrMetaNotFound{}, err)
}
func TestDownloadTimestampHappy(t *testing.T) {
@ -739,7 +759,7 @@ func TestDownloadSnapshotNoChecksum(t *testing.T) {
assert.IsType(t, ErrMissingMeta{}, err)
}
func TestDownloadSnapshotBadChecksum(t *testing.T) {
func TestDownloadSnapshotChecksumNotFound(t *testing.T) {
kdb, repo, _, err := testutils.EmptyRepo("docker.com/notary")
assert.NoError(t, err)
localStorage := store.NewMemoryStore(nil, nil)
@ -761,7 +781,7 @@ func TestDownloadSnapshotBadChecksum(t *testing.T) {
// by not signing timestamp again we ensure it has the wrong checksum
err = client.downloadSnapshot()
assert.IsType(t, ErrChecksumMismatch{}, err)
assert.IsType(t, store.ErrMetaNotFound{}, err)
}
// TargetMeta returns the file metadata for a file path in the role subtree,

View File

@ -2,6 +2,7 @@ package store
import (
"bytes"
"crypto/sha256"
"fmt"
"io"
@ -12,28 +13,40 @@ import (
// NewMemoryStore returns a MetadataStore that operates entirely in memory.
// Very useful for testing
func NewMemoryStore(meta map[string][]byte, files map[string][]byte) RemoteStore {
func NewMemoryStore(meta map[string][]byte, files map[string][]byte) *MemoryStore {
var consistent = make(map[string][]byte)
if meta == nil {
meta = make(map[string][]byte)
} else {
// add all seed meta to consistent
for name, data := range meta {
checksum := sha256.Sum256(data)
path := utils.URLFilePath(name, checksum[:], true)
consistent[path] = data
}
}
if files == nil {
files = make(map[string][]byte)
}
return &memoryStore{
meta: meta,
files: files,
keys: make(map[string][]data.PrivateKey),
return &MemoryStore{
meta: meta,
consistent: consistent,
files: files,
keys: make(map[string][]data.PrivateKey),
}
}
type memoryStore struct {
meta map[string][]byte
files map[string][]byte
keys map[string][]data.PrivateKey
// MemoryStore implements a mock RemoteStore entirely in memory.
// For testing purposes only.
type MemoryStore struct {
meta map[string][]byte
consistent map[string][]byte
files map[string][]byte
keys map[string][]data.PrivateKey
}
// If size is -1, this corresponds to "infinite," but we cut off at 100MB
func (m *memoryStore) GetMeta(name string, size int64) ([]byte, error) {
func (m *MemoryStore) GetMeta(name string, size int64) ([]byte, error) {
d, ok := m.meta[name]
if ok {
if size == -1 {
@ -44,15 +57,26 @@ func (m *memoryStore) GetMeta(name string, size int64) ([]byte, error) {
}
return d[:size], nil
}
d, ok = m.consistent[name]
if ok {
if int64(len(d)) < size {
return d, nil
}
return d[:size], nil
}
return nil, ErrMetaNotFound{Resource: name}
}
func (m *memoryStore) SetMeta(name string, meta []byte) error {
func (m *MemoryStore) SetMeta(name string, meta []byte) error {
m.meta[name] = meta
checksum := sha256.Sum256(meta)
path := utils.URLFilePath(name, checksum[:], true)
m.consistent[path] = meta
return nil
}
func (m *memoryStore) SetMultiMeta(metas map[string][]byte) error {
func (m *MemoryStore) SetMultiMeta(metas map[string][]byte) error {
for role, blob := range metas {
m.SetMeta(role, blob)
}
@ -61,16 +85,16 @@ func (m *memoryStore) SetMultiMeta(metas map[string][]byte) error {
// RemoveMeta removes the metadata for a single role - if the metadata doesn't
// exist, no error is returned
func (m *memoryStore) RemoveMeta(name string) error {
func (m *MemoryStore) RemoveMeta(name string) error {
delete(m.meta, name)
return nil
}
func (m *memoryStore) GetTarget(path string) (io.ReadCloser, error) {
func (m *MemoryStore) GetTarget(path string) (io.ReadCloser, error) {
return &utils.NoopCloser{Reader: bytes.NewReader(m.files[path])}, nil
}
func (m *memoryStore) WalkStagedTargets(paths []string, targetsFn targetsWalkFunc) error {
func (m *MemoryStore) WalkStagedTargets(paths []string, targetsFn targetsWalkFunc) error {
if len(paths) == 0 {
for path, dat := range m.files {
meta, err := data.NewFileMeta(bytes.NewReader(dat), "sha256")
@ -100,16 +124,16 @@ func (m *memoryStore) WalkStagedTargets(paths []string, targetsFn targetsWalkFun
return nil
}
func (m *memoryStore) Commit(map[string][]byte, bool, map[string]data.Hashes) error {
func (m *MemoryStore) Commit(map[string][]byte, bool, map[string]data.Hashes) error {
return nil
}
func (m *memoryStore) GetKey(role string) ([]byte, error) {
return nil, fmt.Errorf("GetKey is not implemented for the memoryStore")
func (m *MemoryStore) GetKey(role string) ([]byte, error) {
return nil, fmt.Errorf("GetKey is not implemented for the MemoryStore")
}
// Clear this existing memory store by setting this store as new empty one
func (m *memoryStore) RemoveAll() error {
func (m *MemoryStore) RemoveAll() error {
m.meta = make(map[string][]byte)
m.files = make(map[string][]byte)
m.keys = make(map[string][]data.PrivateKey)

View File

@ -0,0 +1,24 @@
package testutils
import (
"github.com/docker/notary/tuf/store"
)
// CorruptingMemoryStore corrupts all data returned by GetMeta
type CorruptingMemoryStore struct {
store.MemoryStore
}
func NewCorruptingMemoryStore(meta map[string][]byte, files map[string][]byte) *CorruptingMemoryStore {
s := store.NewMemoryStore(meta, files)
return &CorruptingMemoryStore{MemoryStore: *s}
}
func (cm CorruptingMemoryStore) GetMeta(name string, size int64) ([]byte, error) {
d, err := cm.MemoryStore.GetMeta(name, size)
if err != nil {
return nil, err
}
d[0] = '}' // all our content is JSON so must start with {
return d, err
}

View File

@ -5,6 +5,7 @@ import (
"crypto/sha256"
"crypto/sha512"
"crypto/tls"
"encoding/hex"
"fmt"
"io"
"net/http"
@ -146,3 +147,14 @@ func FindRoleIndex(rs []*data.Role, name string) int {
}
return -1
}
// URLFilePath generates the appropriate HTTP URL path for the role,
// based on whether the repo is marked as consistent. The RemoteStore
// is responsible for adding file extensions.
func URLFilePath(role string, hashSha256 []byte, consistent bool) string {
if consistent && len(hashSha256) > 0 {
hash := hex.EncodeToString(hashSha256)
return fmt.Sprintf("%s.%s", role, hash)
}
return role
}