mirror of https://github.com/docker/docs.git
Merge pull request #682 from endophage/store_storage
minor cleanup of filestore initialization
This commit is contained in:
commit
4d85f964bc
|
@ -15,14 +15,12 @@ type SimpleFileStore struct {
|
|||
perms os.FileMode
|
||||
}
|
||||
|
||||
// NewSimpleFileStore creates a directory with 755 permissions
|
||||
func NewSimpleFileStore(baseDir string, fileExt string) (*SimpleFileStore, error) {
|
||||
// NewFileStore creates a fully configurable file store
|
||||
func NewFileStore(baseDir, fileExt string, perms os.FileMode) (*SimpleFileStore, error) {
|
||||
baseDir = filepath.Clean(baseDir)
|
||||
|
||||
if err := CreateDirectory(baseDir); err != nil {
|
||||
if err := createDirectory(baseDir, perms); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if !strings.HasPrefix(fileExt, ".") {
|
||||
fileExt = "." + fileExt
|
||||
}
|
||||
|
@ -30,25 +28,20 @@ func NewSimpleFileStore(baseDir string, fileExt string) (*SimpleFileStore, error
|
|||
return &SimpleFileStore{
|
||||
baseDir: baseDir,
|
||||
fileExt: fileExt,
|
||||
perms: visible,
|
||||
perms: perms,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// NewPrivateSimpleFileStore creates a directory with 700 permissions
|
||||
func NewPrivateSimpleFileStore(baseDir string, fileExt string) (*SimpleFileStore, error) {
|
||||
if err := CreatePrivateDirectory(baseDir); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// NewSimpleFileStore is a convenience wrapper to create a world readable,
|
||||
// owner writeable filestore
|
||||
func NewSimpleFileStore(baseDir, fileExt string) (*SimpleFileStore, error) {
|
||||
return NewFileStore(baseDir, fileExt, visible)
|
||||
}
|
||||
|
||||
if !strings.HasPrefix(fileExt, ".") {
|
||||
fileExt = "." + fileExt
|
||||
}
|
||||
|
||||
return &SimpleFileStore{
|
||||
baseDir: baseDir,
|
||||
fileExt: fileExt,
|
||||
perms: private,
|
||||
}, nil
|
||||
// NewPrivateSimpleFileStore is a wrapper to create an owner readable/writeable
|
||||
// _only_ filestore
|
||||
func NewPrivateSimpleFileStore(baseDir, fileExt string) (*SimpleFileStore, error) {
|
||||
return NewFileStore(baseDir, fileExt, private)
|
||||
}
|
||||
|
||||
// Add writes data to a file with a given name
|
||||
|
@ -71,24 +64,6 @@ func (f *SimpleFileStore) Remove(name string) error {
|
|||
return os.Remove(filePath)
|
||||
}
|
||||
|
||||
// RemoveDir removes the directory identified by name
|
||||
func (f *SimpleFileStore) RemoveDir(name string) error {
|
||||
dirPath := filepath.Join(f.baseDir, name)
|
||||
|
||||
// Check to see if directory exists
|
||||
fi, err := os.Stat(dirPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Check to see if it is a directory
|
||||
if !fi.IsDir() {
|
||||
return fmt.Errorf("directory not found: %s", name)
|
||||
}
|
||||
|
||||
return os.RemoveAll(dirPath)
|
||||
}
|
||||
|
||||
// Get returns the data given a file name
|
||||
func (f *SimpleFileStore) Get(name string) ([]byte, error) {
|
||||
filePath, err := f.GetPath(name)
|
||||
|
@ -119,12 +94,6 @@ func (f *SimpleFileStore) ListFiles() []string {
|
|||
return f.list(f.baseDir)
|
||||
}
|
||||
|
||||
// ListDir lists all the files inside of a directory identified by a name
|
||||
func (f *SimpleFileStore) ListDir(name string) []string {
|
||||
fullPath := filepath.Join(f.baseDir, name)
|
||||
return f.list(fullPath)
|
||||
}
|
||||
|
||||
// list lists all the files in a directory given a full path. Ignores symlinks.
|
||||
func (f *SimpleFileStore) list(path string) []string {
|
||||
files := make([]string, 0, 0)
|
||||
|
@ -170,16 +139,6 @@ func (f *SimpleFileStore) BaseDir() string {
|
|||
return f.baseDir
|
||||
}
|
||||
|
||||
// CreateDirectory uses createDirectory to create a chmod 755 Directory
|
||||
func CreateDirectory(dir string) error {
|
||||
return createDirectory(dir, visible)
|
||||
}
|
||||
|
||||
// CreatePrivateDirectory uses createDirectory to create a chmod 700 Directory
|
||||
func CreatePrivateDirectory(dir string) error {
|
||||
return createDirectory(dir, private)
|
||||
}
|
||||
|
||||
// createDirectory receives a string of the path to a directory.
|
||||
// It does not support passing files, so the caller has to remove
|
||||
// the filename by doing filepath.Dir(full_path_to_file)
|
||||
|
|
|
@ -74,39 +74,6 @@ func TestRemoveFile(t *testing.T) {
|
|||
require.Error(t, err)
|
||||
}
|
||||
|
||||
func TestRemoveDir(t *testing.T) {
|
||||
testName := "docker.com/diogomonica/"
|
||||
testExt := ".key"
|
||||
perms := os.FileMode(0700)
|
||||
|
||||
// Temporary directory where test files will be created
|
||||
tempBaseDir, err := ioutil.TempDir("", "notary-test-")
|
||||
require.NoError(t, err)
|
||||
defer os.RemoveAll(tempBaseDir)
|
||||
|
||||
// Since we're generating this manually we need to add the extension '.'
|
||||
expectedFilePath := filepath.Join(tempBaseDir, testName+testExt)
|
||||
|
||||
_, err = generateRandomFile(expectedFilePath, perms)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Create our SimpleFileStore
|
||||
store := &SimpleFileStore{
|
||||
baseDir: tempBaseDir,
|
||||
fileExt: testExt,
|
||||
perms: perms,
|
||||
}
|
||||
|
||||
// Call the RemoveDir function
|
||||
err = store.RemoveDir(testName)
|
||||
require.NoError(t, err)
|
||||
|
||||
expectedDirectory := filepath.Dir(expectedFilePath)
|
||||
// Check to see if file exists
|
||||
_, err = os.Stat(expectedDirectory)
|
||||
require.Error(t, err)
|
||||
}
|
||||
|
||||
func TestListFiles(t *testing.T) {
|
||||
testName := "docker.com/notary/certificate"
|
||||
testExt := "crt"
|
||||
|
@ -139,42 +106,6 @@ func TestListFiles(t *testing.T) {
|
|||
require.Len(t, files, 10)
|
||||
}
|
||||
|
||||
func TestListDir(t *testing.T) {
|
||||
testName := "docker.com/notary/certificate"
|
||||
testExt := "crt"
|
||||
perms := os.FileMode(0755)
|
||||
|
||||
// Temporary directory where test files will be created
|
||||
tempBaseDir, err := ioutil.TempDir("", "notary-test-")
|
||||
require.NoError(t, err)
|
||||
defer os.RemoveAll(tempBaseDir)
|
||||
|
||||
var expectedFilePath string
|
||||
// Create 10 randomfiles
|
||||
for i := 1; i <= 10; i++ {
|
||||
// Since we're generating this manually we need to add the extension '.'
|
||||
fileName := fmt.Sprintf("%s-%s.%s", testName, strconv.Itoa(i), testExt)
|
||||
expectedFilePath = filepath.Join(tempBaseDir, fileName)
|
||||
_, err = generateRandomFile(expectedFilePath, perms)
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
// Create our SimpleFileStore
|
||||
store := &SimpleFileStore{
|
||||
baseDir: tempBaseDir,
|
||||
fileExt: testExt,
|
||||
perms: perms,
|
||||
}
|
||||
|
||||
// Call the ListDir function
|
||||
files := store.ListDir("docker.com/")
|
||||
require.Len(t, files, 10)
|
||||
files = store.ListDir("docker.com/notary")
|
||||
require.Len(t, files, 10)
|
||||
files = store.ListDir("fakedocker.com/")
|
||||
require.Len(t, files, 0)
|
||||
}
|
||||
|
||||
func TestGetPath(t *testing.T) {
|
||||
testExt := ".crt"
|
||||
perms := os.FileMode(0755)
|
||||
|
@ -282,7 +213,7 @@ func TestCreateDirectory(t *testing.T) {
|
|||
dirPath := filepath.Join(tempBaseDir, testDir)
|
||||
|
||||
// Call createDirectory
|
||||
CreateDirectory(dirPath)
|
||||
createDirectory(dirPath, visible)
|
||||
|
||||
// Check to see if file exists
|
||||
fi, err := os.Stat(dirPath)
|
||||
|
@ -306,7 +237,7 @@ func TestCreatePrivateDirectory(t *testing.T) {
|
|||
dirPath := filepath.Join(tempBaseDir, testDir)
|
||||
|
||||
// Call createDirectory
|
||||
CreatePrivateDirectory(dirPath)
|
||||
createDirectory(dirPath, private)
|
||||
|
||||
// Check to see if file exists
|
||||
fi, err := os.Stat(dirPath)
|
||||
|
@ -366,7 +297,7 @@ func TestFileStoreConsistency(t *testing.T) {
|
|||
file2Path := "path/file2"
|
||||
file3Path := "long/path/file3"
|
||||
|
||||
for _, s := range []LimitedFileStore{s, s2} {
|
||||
for _, s := range []Storage{s, s2} {
|
||||
s.Add(file1Path, file1Data)
|
||||
s.Add(file2Path, file2Data)
|
||||
s.Add(file3Path, file3Data)
|
||||
|
|
|
@ -62,7 +62,7 @@ func NewKeyFileStore(baseDir string, passphraseRetriever passphrase.Retriever) (
|
|||
return keyStore, nil
|
||||
}
|
||||
|
||||
func generateKeyInfoMap(s LimitedFileStore) map[string]KeyInfo {
|
||||
func generateKeyInfoMap(s Storage) map[string]KeyInfo {
|
||||
keyInfoMap := make(map[string]KeyInfo)
|
||||
for _, keyPath := range s.ListFiles() {
|
||||
d, err := s.Get(keyPath)
|
||||
|
@ -309,7 +309,7 @@ func KeyInfoFromPEM(pemBytes []byte, filename string) (string, KeyInfo, error) {
|
|||
return keyID, KeyInfo{Gun: gun, Role: role}, nil
|
||||
}
|
||||
|
||||
func addKey(s LimitedFileStore, passphraseRetriever passphrase.Retriever, cachedKeys map[string]*cachedKey, name, role string, privKey data.PrivateKey) error {
|
||||
func addKey(s Storage, passphraseRetriever passphrase.Retriever, cachedKeys map[string]*cachedKey, name, role string, privKey data.PrivateKey) error {
|
||||
|
||||
var (
|
||||
chosenPassphrase string
|
||||
|
@ -338,7 +338,7 @@ func addKey(s LimitedFileStore, passphraseRetriever passphrase.Retriever, cached
|
|||
// both in the newer format PEM headers, and also in the legacy filename
|
||||
// format. It returns: the role, whether it was found in the legacy format
|
||||
// (true == legacy), and an error
|
||||
func getKeyRole(s LimitedFileStore, keyID string) (string, bool, error) {
|
||||
func getKeyRole(s Storage, keyID string) (string, bool, error) {
|
||||
name := strings.TrimSpace(strings.TrimSuffix(filepath.Base(keyID), filepath.Ext(keyID)))
|
||||
|
||||
for _, file := range s.ListFiles() {
|
||||
|
@ -365,7 +365,7 @@ func getKeyRole(s LimitedFileStore, keyID string) (string, bool, error) {
|
|||
}
|
||||
|
||||
// GetKey returns the PrivateKey given a KeyID
|
||||
func getKey(s LimitedFileStore, passphraseRetriever passphrase.Retriever, cachedKeys map[string]*cachedKey, name string) (data.PrivateKey, string, error) {
|
||||
func getKey(s Storage, passphraseRetriever passphrase.Retriever, cachedKeys map[string]*cachedKey, name string) (data.PrivateKey, string, error) {
|
||||
cachedKeyEntry, ok := cachedKeys[name]
|
||||
if ok {
|
||||
return cachedKeyEntry.key, cachedKeyEntry.alias, nil
|
||||
|
@ -389,7 +389,7 @@ func getKey(s LimitedFileStore, passphraseRetriever passphrase.Retriever, cached
|
|||
}
|
||||
|
||||
// RemoveKey removes the key from the keyfilestore
|
||||
func removeKey(s LimitedFileStore, cachedKeys map[string]*cachedKey, name string) error {
|
||||
func removeKey(s Storage, cachedKeys map[string]*cachedKey, name string) error {
|
||||
role, legacy, err := getKeyRole(s, name)
|
||||
if err != nil {
|
||||
return err
|
||||
|
@ -419,7 +419,7 @@ func getSubdir(alias string) string {
|
|||
|
||||
// Given a key ID, gets the bytes and alias belonging to that key if the key
|
||||
// exists
|
||||
func getRawKey(s LimitedFileStore, name string) ([]byte, string, error) {
|
||||
func getRawKey(s Storage, name string) ([]byte, string, error) {
|
||||
role, legacy, err := getKeyRole(s, name)
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
|
@ -475,7 +475,7 @@ func GetPasswdDecryptBytes(passphraseRetriever passphrase.Retriever, pemBytes []
|
|||
return privKey, passwd, nil
|
||||
}
|
||||
|
||||
func encryptAndAddKey(s LimitedFileStore, passwd string, cachedKeys map[string]*cachedKey, name, role string, privKey data.PrivateKey) error {
|
||||
func encryptAndAddKey(s Storage, passwd string, cachedKeys map[string]*cachedKey, name, role string, privKey data.PrivateKey) error {
|
||||
|
||||
var (
|
||||
pemPrivKey []byte
|
||||
|
|
|
@ -5,7 +5,7 @@ import (
|
|||
"sync"
|
||||
)
|
||||
|
||||
// MemoryFileStore is an implementation of LimitedFileStore that keeps
|
||||
// MemoryFileStore is an implementation of Storage that keeps
|
||||
// the contents in memory.
|
||||
type MemoryFileStore struct {
|
||||
sync.Mutex
|
||||
|
|
|
@ -17,8 +17,8 @@ var (
|
|||
ErrPathOutsideStore = errors.New("path outside file store")
|
||||
)
|
||||
|
||||
// LimitedFileStore implements the bare bones primitives (no hierarchy)
|
||||
type LimitedFileStore interface {
|
||||
// Storage implements the bare bones primitives (no hierarchy)
|
||||
type Storage interface {
|
||||
// Add writes a file to the specified location, returning an error if this
|
||||
// is not possible (reasons may include permissions errors). The path is cleaned
|
||||
// before being made absolute against the store's base dir.
|
||||
|
@ -37,16 +37,6 @@ type LimitedFileStore interface {
|
|||
|
||||
// ListFiles returns a list of paths relative to the base directory of the
|
||||
// filestore. Any of these paths must be retrievable via the
|
||||
// LimitedFileStore.Get method.
|
||||
// Storage.Get method.
|
||||
ListFiles() []string
|
||||
}
|
||||
|
||||
// FileStore is the interface for full-featured FileStores
|
||||
type FileStore interface {
|
||||
LimitedFileStore
|
||||
|
||||
RemoveDir(directoryName string) error
|
||||
GetPath(fileName string) (string, error)
|
||||
ListDir(directoryName string) []string
|
||||
BaseDir() string
|
||||
}
|
||||
|
|
|
@ -15,7 +15,7 @@ type X509FileStore struct {
|
|||
fileMap map[CertID]string
|
||||
fingerprintMap map[CertID]*x509.Certificate
|
||||
nameMap map[string][]CertID
|
||||
fileStore FileStore
|
||||
fileStore Storage
|
||||
}
|
||||
|
||||
// NewX509FileStore returns a new X509FileStore.
|
||||
|
@ -88,11 +88,7 @@ func (s *X509FileStore) addNamedCert(cert *x509.Certificate) error {
|
|||
certBytes := CertToPEM(cert)
|
||||
|
||||
// Save the file to disk if not already there.
|
||||
filePath, err := s.fileStore.GetPath(fileName)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if _, err := os.Stat(filePath); os.IsNotExist(err) {
|
||||
if _, err = s.fileStore.Get(fileName); os.IsNotExist(err) {
|
||||
if err := s.fileStore.Add(fileName, certBytes); err != nil {
|
||||
return err
|
||||
}
|
||||
|
|
|
@ -236,20 +236,19 @@ func ParsePEMPrivateKey(pemBytes []byte, passphrase string) (data.PrivateKey, er
|
|||
return nil, errors.New("no valid private key found")
|
||||
}
|
||||
|
||||
var privKeyBytes []byte
|
||||
var err error
|
||||
if x509.IsEncryptedPEMBlock(block) {
|
||||
privKeyBytes, err = x509.DecryptPEMBlock(block, []byte(passphrase))
|
||||
if err != nil {
|
||||
return nil, errors.New("could not decrypt private key")
|
||||
}
|
||||
} else {
|
||||
privKeyBytes = block.Bytes
|
||||
}
|
||||
|
||||
switch block.Type {
|
||||
case "RSA PRIVATE KEY":
|
||||
var privKeyBytes []byte
|
||||
var err error
|
||||
|
||||
if x509.IsEncryptedPEMBlock(block) {
|
||||
privKeyBytes, err = x509.DecryptPEMBlock(block, []byte(passphrase))
|
||||
if err != nil {
|
||||
return nil, errors.New("could not decrypt private key")
|
||||
}
|
||||
} else {
|
||||
privKeyBytes = block.Bytes
|
||||
}
|
||||
|
||||
rsaPrivKey, err := x509.ParsePKCS1PrivateKey(privKeyBytes)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not parse DER encoded key: %v", err)
|
||||
|
@ -262,18 +261,6 @@ func ParsePEMPrivateKey(pemBytes []byte, passphrase string) (data.PrivateKey, er
|
|||
|
||||
return tufRSAPrivateKey, nil
|
||||
case "EC PRIVATE KEY":
|
||||
var privKeyBytes []byte
|
||||
var err error
|
||||
|
||||
if x509.IsEncryptedPEMBlock(block) {
|
||||
privKeyBytes, err = x509.DecryptPEMBlock(block, []byte(passphrase))
|
||||
if err != nil {
|
||||
return nil, errors.New("could not decrypt private key")
|
||||
}
|
||||
} else {
|
||||
privKeyBytes = block.Bytes
|
||||
}
|
||||
|
||||
ecdsaPrivKey, err := x509.ParseECPrivateKey(privKeyBytes)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not parse DER encoded private key: %v", err)
|
||||
|
@ -289,18 +276,6 @@ func ParsePEMPrivateKey(pemBytes []byte, passphrase string) (data.PrivateKey, er
|
|||
// We serialize ED25519 keys by concatenating the private key
|
||||
// to the public key and encoding with PEM. See the
|
||||
// ED25519ToPrivateKey function.
|
||||
var privKeyBytes []byte
|
||||
var err error
|
||||
|
||||
if x509.IsEncryptedPEMBlock(block) {
|
||||
privKeyBytes, err = x509.DecryptPEMBlock(block, []byte(passphrase))
|
||||
if err != nil {
|
||||
return nil, errors.New("could not decrypt private key")
|
||||
}
|
||||
} else {
|
||||
privKeyBytes = block.Bytes
|
||||
}
|
||||
|
||||
tufECDSAPrivateKey, err := ED25519ToPrivateKey(privKeyBytes)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not convert ecdsa.PrivateKey to data.PrivateKey: %v", err)
|
||||
|
|
|
@ -1,54 +0,0 @@
|
|||
package testutils
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"os"
|
||||
|
||||
"github.com/docker/notary/tuf/data"
|
||||
// need to initialize sqlite for tests
|
||||
_ "github.com/mattn/go-sqlite3"
|
||||
)
|
||||
|
||||
var counter = 1
|
||||
|
||||
// SampleMeta returns a static, fake (and invalid) FileMeta object
|
||||
func SampleMeta() data.FileMeta {
|
||||
meta := data.FileMeta{
|
||||
Length: 1,
|
||||
Hashes: data.Hashes{
|
||||
"sha256": []byte{0x01, 0x02},
|
||||
"sha512": []byte{0x03, 0x04},
|
||||
},
|
||||
}
|
||||
return meta
|
||||
}
|
||||
|
||||
// GetSqliteDB creates and initializes a sqlite db
|
||||
func GetSqliteDB() *sql.DB {
|
||||
os.Mkdir("/tmp/sqlite", 0755)
|
||||
conn, err := sql.Open("sqlite3", fmt.Sprintf("/tmp/sqlite/file%d.db", counter))
|
||||
if err != nil {
|
||||
panic("can't connect to db")
|
||||
}
|
||||
counter++
|
||||
tx, err := conn.Begin()
|
||||
if err != nil {
|
||||
panic("can't begin db transaction")
|
||||
}
|
||||
tx.Exec("CREATE TABLE keys (id int auto_increment, namespace varchar(255) not null, role varchar(255) not null, key text not null, primary key (id));")
|
||||
tx.Exec("CREATE TABLE filehashes(namespace varchar(255) not null, path varchar(255) not null, alg varchar(10) not null, hash varchar(128) not null, primary key (namespace, path, alg));")
|
||||
tx.Exec("CREATE TABLE filemeta(namespace varchar(255) not null, path varchar(255) not null, size int not null, custom text default null, primary key (namespace, path));")
|
||||
tx.Commit()
|
||||
return conn
|
||||
}
|
||||
|
||||
// FlushDB deletes a sqliteDB
|
||||
func FlushDB(db *sql.DB) {
|
||||
tx, _ := db.Begin()
|
||||
tx.Exec("DELETE FROM `filemeta`")
|
||||
tx.Exec("DELETE FROM `filehashes`")
|
||||
tx.Exec("DELETE FROM `keys`")
|
||||
tx.Commit()
|
||||
os.RemoveAll("/tmp/tuf")
|
||||
}
|
Loading…
Reference in New Issue