mirror of https://github.com/docker/docs.git
update go-tuf dependency
This commit is contained in:
parent
b2e089c6ee
commit
cdc0c59cbb
|
@ -16,7 +16,7 @@
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "github.com/endophage/go-tuf",
|
"ImportPath": "github.com/endophage/go-tuf",
|
||||||
"Rev": "a06029e9b42bff41f0277e5ceb482ad00299210a"
|
"Rev": "913d6f239a809f317bf3642019bc480d18e80cfc"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "github.com/gorilla/context",
|
"ImportPath": "github.com/gorilla/context",
|
||||||
|
|
|
@ -11,11 +11,11 @@ import (
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/flynn/go-tuf"
|
"github.com/flynn/go-tuf"
|
||||||
|
. "github.com/flynn/go-tuf/Godeps/_workspace/src/gopkg.in/check.v1"
|
||||||
"github.com/flynn/go-tuf/data"
|
"github.com/flynn/go-tuf/data"
|
||||||
"github.com/flynn/go-tuf/keys"
|
"github.com/flynn/go-tuf/keys"
|
||||||
"github.com/flynn/go-tuf/signed"
|
"github.com/flynn/go-tuf/signed"
|
||||||
"github.com/flynn/go-tuf/util"
|
"github.com/flynn/go-tuf/util"
|
||||||
. "gopkg.in/check.v1"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// Hook up gocheck into the "go test" runner.
|
// Hook up gocheck into the "go test" runner.
|
||||||
|
|
|
@ -12,11 +12,11 @@ import (
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/agl/ed25519"
|
|
||||||
"github.com/flynn/go-tuf"
|
"github.com/flynn/go-tuf"
|
||||||
|
"github.com/flynn/go-tuf/Godeps/_workspace/src/github.com/agl/ed25519"
|
||||||
|
. "github.com/flynn/go-tuf/Godeps/_workspace/src/gopkg.in/check.v1"
|
||||||
"github.com/flynn/go-tuf/data"
|
"github.com/flynn/go-tuf/data"
|
||||||
"github.com/flynn/go-tuf/util"
|
"github.com/flynn/go-tuf/util"
|
||||||
. "gopkg.in/check.v1"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type InteropSuite struct{}
|
type InteropSuite struct{}
|
||||||
|
|
|
@ -4,7 +4,7 @@ import (
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/boltdb/bolt"
|
"github.com/flynn/go-tuf/Godeps/_workspace/src/github.com/boltdb/bolt"
|
||||||
)
|
)
|
||||||
|
|
||||||
func MemoryLocalStore() LocalStore {
|
func MemoryLocalStore() LocalStore {
|
||||||
|
|
|
@ -4,7 +4,7 @@ import (
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
|
||||||
. "gopkg.in/check.v1"
|
. "github.com/flynn/go-tuf/Godeps/_workspace/src/gopkg.in/check.v1"
|
||||||
)
|
)
|
||||||
|
|
||||||
type LocalStoreSuite struct{}
|
type LocalStoreSuite struct{}
|
||||||
|
|
0
Godeps/_workspace/src/github.com/endophage/go-tuf/client/testdata/generate/generate.py
generated
vendored
Normal file → Executable file
0
Godeps/_workspace/src/github.com/endophage/go-tuf/client/testdata/generate/generate.py
generated
vendored
Normal file → Executable file
0
Godeps/_workspace/src/github.com/endophage/go-tuf/client/testdata/generate/generate.sh
generated
vendored
Normal file → Executable file
0
Godeps/_workspace/src/github.com/endophage/go-tuf/client/testdata/generate/generate.sh
generated
vendored
Normal file → Executable file
|
@ -5,9 +5,9 @@ import (
|
||||||
"log"
|
"log"
|
||||||
|
|
||||||
"github.com/endophage/go-tuf"
|
"github.com/endophage/go-tuf"
|
||||||
|
"github.com/endophage/go-tuf/Godeps/_workspace/src/github.com/flynn/go-docopt"
|
||||||
"github.com/endophage/go-tuf/store"
|
"github.com/endophage/go-tuf/store"
|
||||||
"github.com/endophage/go-tuf/util"
|
"github.com/endophage/go-tuf/util"
|
||||||
"github.com/flynn/go-docopt"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
|
|
|
@ -7,8 +7,8 @@ import (
|
||||||
"os"
|
"os"
|
||||||
|
|
||||||
"github.com/endophage/go-tuf"
|
"github.com/endophage/go-tuf"
|
||||||
|
"github.com/endophage/go-tuf/Godeps/_workspace/src/github.com/flynn/go-docopt"
|
||||||
"github.com/endophage/go-tuf/util"
|
"github.com/endophage/go-tuf/util"
|
||||||
"github.com/flynn/go-docopt"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
|
|
|
@ -5,7 +5,7 @@ import (
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
"os"
|
"os"
|
||||||
|
|
||||||
"github.com/flynn/go-docopt"
|
"github.com/flynn/go-tuf/Godeps/_workspace/src/github.com/flynn/go-docopt"
|
||||||
tuf "github.com/flynn/go-tuf/client"
|
tuf "github.com/flynn/go-tuf/client"
|
||||||
"github.com/flynn/go-tuf/util"
|
"github.com/flynn/go-tuf/util"
|
||||||
)
|
)
|
||||||
|
|
|
@ -5,7 +5,7 @@ import (
|
||||||
"io"
|
"io"
|
||||||
"os"
|
"os"
|
||||||
|
|
||||||
"github.com/flynn/go-docopt"
|
"github.com/flynn/go-tuf/Godeps/_workspace/src/github.com/flynn/go-docopt"
|
||||||
tuf "github.com/flynn/go-tuf/client"
|
tuf "github.com/flynn/go-tuf/client"
|
||||||
"github.com/flynn/go-tuf/data"
|
"github.com/flynn/go-tuf/data"
|
||||||
)
|
)
|
||||||
|
|
|
@ -5,8 +5,8 @@ import (
|
||||||
"os"
|
"os"
|
||||||
"text/tabwriter"
|
"text/tabwriter"
|
||||||
|
|
||||||
"github.com/dustin/go-humanize"
|
"github.com/flynn/go-tuf/Godeps/_workspace/src/github.com/dustin/go-humanize"
|
||||||
"github.com/flynn/go-docopt"
|
"github.com/flynn/go-tuf/Godeps/_workspace/src/github.com/flynn/go-docopt"
|
||||||
tuf "github.com/flynn/go-tuf/client"
|
tuf "github.com/flynn/go-tuf/client"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -4,7 +4,7 @@ import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"log"
|
"log"
|
||||||
|
|
||||||
"github.com/flynn/go-docopt"
|
"github.com/flynn/go-tuf/Godeps/_workspace/src/github.com/flynn/go-docopt"
|
||||||
tuf "github.com/flynn/go-tuf/client"
|
tuf "github.com/flynn/go-tuf/client"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"encoding/json"
|
// "encoding/json"
|
||||||
|
|
||||||
"github.com/flynn/go-docopt"
|
|
||||||
"github.com/flynn/go-tuf"
|
"github.com/flynn/go-tuf"
|
||||||
|
"github.com/flynn/go-tuf/Godeps/_workspace/src/github.com/flynn/go-docopt"
|
||||||
)
|
)
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
|
@ -20,17 +20,17 @@ Options:
|
||||||
}
|
}
|
||||||
|
|
||||||
func cmdAdd(args *docopt.Args, repo *tuf.Repo) error {
|
func cmdAdd(args *docopt.Args, repo *tuf.Repo) error {
|
||||||
var custom json.RawMessage
|
// var custom json.RawMessage
|
||||||
if c := args.String["--custom"]; c != "" {
|
// if c := args.String["--custom"]; c != "" {
|
||||||
custom = json.RawMessage(c)
|
// custom = json.RawMessage(c)
|
||||||
}
|
// }
|
||||||
paths := args.All["<path>"].([]string)
|
paths := args.All["<path>"].([]string)
|
||||||
if arg := args.String["--expires"]; arg != "" {
|
if arg := args.String["--expires"]; arg != "" {
|
||||||
expires, err := parseExpires(arg)
|
expires, err := parseExpires(arg)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
return repo.AddTargetsWithExpires(paths, custom, expires)
|
return repo.AddTargetsWithExpires(paths, nil, expires)
|
||||||
}
|
}
|
||||||
return repo.AddTargets(paths, custom)
|
return repo.AddTargets(paths, nil)
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/flynn/go-docopt"
|
|
||||||
"github.com/flynn/go-tuf"
|
"github.com/flynn/go-tuf"
|
||||||
|
"github.com/flynn/go-tuf/Godeps/_workspace/src/github.com/flynn/go-docopt"
|
||||||
)
|
)
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/flynn/go-docopt"
|
|
||||||
"github.com/flynn/go-tuf"
|
"github.com/flynn/go-tuf"
|
||||||
|
"github.com/flynn/go-tuf/Godeps/_workspace/src/github.com/flynn/go-docopt"
|
||||||
)
|
)
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
|
|
|
@ -3,8 +3,8 @@ package main
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
|
||||||
"github.com/flynn/go-docopt"
|
|
||||||
"github.com/flynn/go-tuf"
|
"github.com/flynn/go-tuf"
|
||||||
|
"github.com/flynn/go-tuf/Godeps/_workspace/src/github.com/flynn/go-docopt"
|
||||||
)
|
)
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/flynn/go-docopt"
|
|
||||||
"github.com/flynn/go-tuf"
|
"github.com/flynn/go-tuf"
|
||||||
|
"github.com/flynn/go-tuf/Godeps/_workspace/src/github.com/flynn/go-docopt"
|
||||||
)
|
)
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
|
|
|
@ -11,9 +11,11 @@ import (
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/docker/docker/pkg/term"
|
"github.com/endophage/go-tuf/signed"
|
||||||
"github.com/flynn/go-docopt"
|
"github.com/endophage/go-tuf/store"
|
||||||
"github.com/flynn/go-tuf"
|
"github.com/flynn/go-tuf"
|
||||||
|
"github.com/flynn/go-tuf/Godeps/_workspace/src/github.com/docker/docker/pkg/term"
|
||||||
|
"github.com/flynn/go-tuf/Godeps/_workspace/src/github.com/flynn/go-docopt"
|
||||||
"github.com/flynn/go-tuf/util"
|
"github.com/flynn/go-tuf/util"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -107,7 +109,8 @@ func runCommand(name string, args []string, dir string, insecure bool) error {
|
||||||
if !insecure {
|
if !insecure {
|
||||||
p = getPassphrase
|
p = getPassphrase
|
||||||
}
|
}
|
||||||
repo, err := tuf.NewRepo(tuf.FileSystemStore(dir, p))
|
signer := signed.Ed25519{}
|
||||||
|
repo, err := tuf.NewRepo(signer, store.FileSystemStore(dir, p), "sha256")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,8 +3,8 @@ package main
|
||||||
import (
|
import (
|
||||||
"log"
|
"log"
|
||||||
|
|
||||||
"github.com/flynn/go-docopt"
|
|
||||||
"github.com/flynn/go-tuf"
|
"github.com/flynn/go-tuf"
|
||||||
|
"github.com/flynn/go-tuf/Godeps/_workspace/src/github.com/flynn/go-docopt"
|
||||||
)
|
)
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
|
|
|
@ -3,8 +3,8 @@ package main
|
||||||
import (
|
import (
|
||||||
"errors"
|
"errors"
|
||||||
|
|
||||||
"github.com/flynn/go-docopt"
|
|
||||||
"github.com/flynn/go-tuf"
|
"github.com/flynn/go-tuf"
|
||||||
|
"github.com/flynn/go-tuf/Godeps/_workspace/src/github.com/flynn/go-docopt"
|
||||||
)
|
)
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/flynn/go-docopt"
|
|
||||||
"github.com/flynn/go-tuf"
|
"github.com/flynn/go-tuf"
|
||||||
|
"github.com/flynn/go-tuf/Godeps/_workspace/src/github.com/flynn/go-docopt"
|
||||||
)
|
)
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
|
|
|
@ -4,8 +4,8 @@ import (
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"os"
|
"os"
|
||||||
|
|
||||||
"github.com/flynn/go-docopt"
|
|
||||||
"github.com/flynn/go-tuf"
|
"github.com/flynn/go-tuf"
|
||||||
|
"github.com/flynn/go-tuf/Godeps/_workspace/src/github.com/flynn/go-docopt"
|
||||||
)
|
)
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/flynn/go-docopt"
|
|
||||||
"github.com/flynn/go-tuf"
|
"github.com/flynn/go-tuf"
|
||||||
|
"github.com/flynn/go-tuf/Godeps/_workspace/src/github.com/flynn/go-docopt"
|
||||||
)
|
)
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/flynn/go-docopt"
|
|
||||||
"github.com/flynn/go-tuf"
|
"github.com/flynn/go-tuf"
|
||||||
|
"github.com/flynn/go-tuf/Godeps/_workspace/src/github.com/flynn/go-docopt"
|
||||||
)
|
)
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/flynn/go-docopt"
|
|
||||||
"github.com/flynn/go-tuf"
|
"github.com/flynn/go-tuf"
|
||||||
|
"github.com/flynn/go-tuf/Godeps/_workspace/src/github.com/flynn/go-docopt"
|
||||||
)
|
)
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
|
|
|
@ -4,7 +4,7 @@ import (
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
. "gopkg.in/check.v1"
|
. "github.com/endophage/go-tuf/Godeps/_workspace/src/gopkg.in/check.v1"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Hook up gocheck into the "go test" runner.
|
// Hook up gocheck into the "go test" runner.
|
||||||
|
|
|
@ -6,20 +6,14 @@ import (
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/tent/canonical-json-go"
|
cjson "github.com/endophage/go-tuf/Godeps/_workspace/src/github.com/tent/canonical-json-go"
|
||||||
)
|
)
|
||||||
|
|
||||||
const KeyIDLength = sha256.Size * 2
|
const KeyIDLength = sha256.Size * 2
|
||||||
|
|
||||||
type Signed struct {
|
type KeyValue struct {
|
||||||
Signed json.RawMessage `json:"signed"`
|
Public HexBytes `json:"public"`
|
||||||
Signatures []Signature `json:"signatures"`
|
// Private HexBytes `json:"private,omitempty"`
|
||||||
}
|
|
||||||
|
|
||||||
type Signature struct {
|
|
||||||
KeyID string `json:"keyid"`
|
|
||||||
Method string `json:"method"`
|
|
||||||
Signature HexBytes `json:"sig"`
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type Key struct {
|
type Key struct {
|
||||||
|
@ -37,9 +31,15 @@ func (k *Key) ID() string {
|
||||||
return hex.EncodeToString(digest[:])
|
return hex.EncodeToString(digest[:])
|
||||||
}
|
}
|
||||||
|
|
||||||
type KeyValue struct {
|
type Signed struct {
|
||||||
Public HexBytes `json:"public"`
|
Signed json.RawMessage `json:"signed"`
|
||||||
//Private HexBytes `json:"private,omitempty"`
|
Signatures []Signature `json:"signatures"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Signature struct {
|
||||||
|
KeyID string `json:"keyid"`
|
||||||
|
Method string `json:"method"`
|
||||||
|
Signature HexBytes `json:"sig"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func DefaultExpires(role string) time.Time {
|
func DefaultExpires(role string) time.Time {
|
||||||
|
@ -82,6 +82,15 @@ type Role struct {
|
||||||
Threshold int `json:"threshold"`
|
Threshold int `json:"threshold"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (r *Role) ValidKey(id string) bool {
|
||||||
|
for _, key := range r.KeyIDs {
|
||||||
|
if key == id {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
type Files map[string]FileMeta
|
type Files map[string]FileMeta
|
||||||
|
|
||||||
type Snapshot struct {
|
type Snapshot struct {
|
||||||
|
|
|
@ -12,8 +12,8 @@ import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
|
|
||||||
"golang.org/x/crypto/nacl/secretbox"
|
"github.com/flynn/go-tuf/Godeps/_workspace/src/golang.org/x/crypto/nacl/secretbox"
|
||||||
"golang.org/x/crypto/scrypt"
|
"github.com/flynn/go-tuf/Godeps/_workspace/src/golang.org/x/crypto/scrypt"
|
||||||
)
|
)
|
||||||
|
|
||||||
const saltSize = 32
|
const saltSize = 32
|
||||||
|
|
|
@ -4,7 +4,7 @@ import (
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
. "gopkg.in/check.v1"
|
. "github.com/flynn/go-tuf/Godeps/_workspace/src/gopkg.in/check.v1"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Hook up gocheck into the "go test" runner.
|
// Hook up gocheck into the "go test" runner.
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
package tuf
|
package errors
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"errors"
|
"errors"
|
|
@ -1,10 +1,8 @@
|
||||||
package keys
|
package keys
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"crypto/rand"
|
|
||||||
"errors"
|
"errors"
|
||||||
|
|
||||||
"github.com/agl/ed25519"
|
|
||||||
"github.com/endophage/go-tuf/data"
|
"github.com/endophage/go-tuf/data"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -18,79 +16,59 @@ var (
|
||||||
ErrInvalidThreshold = errors.New("tuf: invalid role threshold")
|
ErrInvalidThreshold = errors.New("tuf: invalid role threshold")
|
||||||
)
|
)
|
||||||
|
|
||||||
func NewKey() (*Key, error) {
|
type PublicKey struct {
|
||||||
pub, priv, err := ed25519.GenerateKey(rand.Reader)
|
data.Key
|
||||||
if err != nil {
|
ID string
|
||||||
return nil, err
|
}
|
||||||
|
|
||||||
|
func NewPublicKey(keyType string, public []byte) *PublicKey {
|
||||||
|
// create a copy so the private key is not included
|
||||||
|
key := data.Key{
|
||||||
|
Type: keyType,
|
||||||
|
Value: data.KeyValue{Public: public},
|
||||||
}
|
}
|
||||||
k := &Key{
|
return &PublicKey{key, key.ID()}
|
||||||
Public: *pub,
|
|
||||||
Private: priv,
|
|
||||||
}
|
|
||||||
k.ID = k.Serialize().ID()
|
|
||||||
return k, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type Key struct {
|
type PrivateKey struct {
|
||||||
ID string
|
PublicKey
|
||||||
Public [ed25519.PublicKeySize]byte
|
Private []byte
|
||||||
//Private *[ed25519.PrivateKeySize]byte
|
|
||||||
}
|
|
||||||
|
|
||||||
func (k *Key) Serialize() *data.Key {
|
|
||||||
return &data.Key{
|
|
||||||
Type: "ed25519",
|
|
||||||
Value: data.KeyValue{Public: k.Public[:]},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (k *Key) SerializePrivate() *data.Key {
|
|
||||||
return &data.Key{
|
|
||||||
Type: "ed25519",
|
|
||||||
Value: data.KeyValue{
|
|
||||||
Public: k.Public[:],
|
|
||||||
Private: k.Private[:],
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type Role struct {
|
|
||||||
KeyIDs map[string]struct{}
|
|
||||||
Threshold int
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *Role) ValidKey(id string) bool {
|
|
||||||
_, ok := r.KeyIDs[id]
|
|
||||||
return ok
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type DB struct {
|
type DB struct {
|
||||||
roles map[string]*Role
|
types map[string]int
|
||||||
keys map[string]*Key
|
roles map[string]*data.Role
|
||||||
|
keys map[string]*PublicKey
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewDB() *DB {
|
func NewDB() *DB {
|
||||||
return &DB{
|
return &DB{
|
||||||
roles: make(map[string]*Role),
|
roles: make(map[string]*data.Role),
|
||||||
keys: make(map[string]*Key),
|
keys: make(map[string]*PublicKey),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (db *DB) AddKey(id string, k *data.Key) error {
|
func (db *DB) AddKey(k *PublicKey) error {
|
||||||
if k.Type != "ed25519" {
|
//if _, ok := db.types[k.Type]; !ok {
|
||||||
return ErrWrongType
|
// return ErrWrongType
|
||||||
}
|
//}
|
||||||
if id != k.ID() {
|
//if len(k.Value.Public) != ed25519.PublicKeySize {
|
||||||
return ErrWrongID
|
// return ErrInvalidKey
|
||||||
}
|
//}
|
||||||
if len(k.Value.Public) != ed25519.PublicKeySize {
|
|
||||||
return ErrInvalidKey
|
key := PublicKey{
|
||||||
|
Key: data.Key{
|
||||||
|
Type: k.Type,
|
||||||
|
Value: data.KeyValue{
|
||||||
|
Public: make([]byte, len(k.Value.Public)),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
ID: k.ID,
|
||||||
}
|
}
|
||||||
|
|
||||||
var key Key
|
copy(key.Value.Public, k.Value.Public)
|
||||||
copy(key.Public[:], k.Value.Public)
|
|
||||||
key.ID = id
|
db.keys[k.ID] = &key
|
||||||
db.keys[id] = &key
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -114,25 +92,21 @@ func (db *DB) AddRole(name string, r *data.Role) error {
|
||||||
return ErrInvalidThreshold
|
return ErrInvalidThreshold
|
||||||
}
|
}
|
||||||
|
|
||||||
role := &Role{
|
// validate all key ids have the correct length
|
||||||
KeyIDs: make(map[string]struct{}),
|
|
||||||
Threshold: r.Threshold,
|
|
||||||
}
|
|
||||||
for _, id := range r.KeyIDs {
|
for _, id := range r.KeyIDs {
|
||||||
if len(id) != data.KeyIDLength {
|
if len(id) != data.KeyIDLength {
|
||||||
return ErrInvalidKeyID
|
return ErrInvalidKeyID
|
||||||
}
|
}
|
||||||
role.KeyIDs[id] = struct{}{}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
db.roles[name] = role
|
db.roles[name] = r
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (db *DB) GetKey(id string) *Key {
|
func (db *DB) GetKey(id string) *PublicKey {
|
||||||
return db.keys[id]
|
return db.keys[id]
|
||||||
}
|
}
|
||||||
|
|
||||||
func (db *DB) GetRole(name string) *Role {
|
func (db *DB) GetRole(name string) *data.Role {
|
||||||
return db.roles[name]
|
return db.roles[name]
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,6 +10,7 @@ import (
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/endophage/go-tuf/data"
|
"github.com/endophage/go-tuf/data"
|
||||||
|
"github.com/endophage/go-tuf/errors"
|
||||||
"github.com/endophage/go-tuf/keys"
|
"github.com/endophage/go-tuf/keys"
|
||||||
"github.com/endophage/go-tuf/signed"
|
"github.com/endophage/go-tuf/signed"
|
||||||
"github.com/endophage/go-tuf/store"
|
"github.com/endophage/go-tuf/store"
|
||||||
|
@ -37,13 +38,14 @@ var snapshotManifests = []string{
|
||||||
}
|
}
|
||||||
|
|
||||||
type Repo struct {
|
type Repo struct {
|
||||||
|
trust signed.Signer
|
||||||
local store.LocalStore
|
local store.LocalStore
|
||||||
hashAlgorithms []string
|
hashAlgorithms []string
|
||||||
meta map[string]json.RawMessage
|
meta map[string]json.RawMessage
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewRepo(local store.LocalStore, hashAlgorithms ...string) (*Repo, error) {
|
func NewRepo(trust *signed.Signer, local store.LocalStore, hashAlgorithms ...string) (*Repo, error) {
|
||||||
r := &Repo{local: local, hashAlgorithms: hashAlgorithms}
|
r := &Repo{trust: *trust, local: local, hashAlgorithms: hashAlgorithms}
|
||||||
|
|
||||||
var err error
|
var err error
|
||||||
r.meta, err = local.GetMeta()
|
r.meta, err = local.GetMeta()
|
||||||
|
@ -59,7 +61,7 @@ func (r *Repo) Init(consistentSnapshot bool) error {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if len(t.Targets) > 0 {
|
if len(t.Targets) > 0 {
|
||||||
return ErrInitNotAllowed
|
return errors.ErrInitNotAllowed
|
||||||
}
|
}
|
||||||
root := data.NewRoot()
|
root := data.NewRoot()
|
||||||
root.ConsistentSnapshot = consistentSnapshot
|
root.ConsistentSnapshot = consistentSnapshot
|
||||||
|
@ -72,8 +74,8 @@ func (r *Repo) db() (*keys.DB, error) {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
for id, k := range root.Keys {
|
for _, k := range root.Keys {
|
||||||
if err := db.AddKey(id, k); err != nil {
|
if err := db.AddKey(&keys.PublicKey{*k, k.ID()}); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -94,7 +96,7 @@ func (r *Repo) root() (*data.Root, error) {
|
||||||
if err := json.Unmarshal(rootJSON, s); err != nil {
|
if err := json.Unmarshal(rootJSON, s); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
root := &data.Root{}
|
root := data.NewRoot()
|
||||||
if err := json.Unmarshal(s.Signed, root); err != nil {
|
if err := json.Unmarshal(s.Signed, root); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
@ -155,11 +157,11 @@ func (r *Repo) GenKey(role string) (string, error) {
|
||||||
|
|
||||||
func (r *Repo) GenKeyWithExpires(keyRole string, expires time.Time) (string, error) {
|
func (r *Repo) GenKeyWithExpires(keyRole string, expires time.Time) (string, error) {
|
||||||
if !keys.ValidRole(keyRole) {
|
if !keys.ValidRole(keyRole) {
|
||||||
return "", ErrInvalidRole{keyRole}
|
return "", errors.ErrInvalidRole{keyRole}
|
||||||
}
|
}
|
||||||
|
|
||||||
if !validExpires(expires) {
|
if !validExpires(expires) {
|
||||||
return "", ErrInvalidExpires{expires}
|
return "", errors.ErrInvalidExpires{expires}
|
||||||
}
|
}
|
||||||
|
|
||||||
root, err := r.root()
|
root, err := r.root()
|
||||||
|
@ -167,11 +169,11 @@ func (r *Repo) GenKeyWithExpires(keyRole string, expires time.Time) (string, err
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
|
|
||||||
key, err := keys.NewKey()
|
key, err := r.trust.NewKey()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
if err := r.local.SaveKey(keyRole, key.SerializePrivate()); err != nil {
|
if err := r.local.SaveKey(keyRole, &key.Key); err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -182,7 +184,7 @@ func (r *Repo) GenKeyWithExpires(keyRole string, expires time.Time) (string, err
|
||||||
}
|
}
|
||||||
role.KeyIDs = append(role.KeyIDs, key.ID)
|
role.KeyIDs = append(role.KeyIDs, key.ID)
|
||||||
|
|
||||||
root.Keys[key.ID] = key.Serialize()
|
root.Keys[key.ID] = &key.Key
|
||||||
root.Expires = expires.Round(time.Second)
|
root.Expires = expires.Round(time.Second)
|
||||||
root.Version++
|
root.Version++
|
||||||
|
|
||||||
|
@ -219,11 +221,11 @@ func (r *Repo) RevokeKey(role, id string) error {
|
||||||
|
|
||||||
func (r *Repo) RevokeKeyWithExpires(keyRole, id string, expires time.Time) error {
|
func (r *Repo) RevokeKeyWithExpires(keyRole, id string, expires time.Time) error {
|
||||||
if !keys.ValidRole(keyRole) {
|
if !keys.ValidRole(keyRole) {
|
||||||
return ErrInvalidRole{keyRole}
|
return errors.ErrInvalidRole{keyRole}
|
||||||
}
|
}
|
||||||
|
|
||||||
if !validExpires(expires) {
|
if !validExpires(expires) {
|
||||||
return ErrInvalidExpires{expires}
|
return errors.ErrInvalidExpires{expires}
|
||||||
}
|
}
|
||||||
|
|
||||||
root, err := r.root()
|
root, err := r.root()
|
||||||
|
@ -232,12 +234,12 @@ func (r *Repo) RevokeKeyWithExpires(keyRole, id string, expires time.Time) error
|
||||||
}
|
}
|
||||||
|
|
||||||
if _, ok := root.Keys[id]; !ok {
|
if _, ok := root.Keys[id]; !ok {
|
||||||
return ErrKeyNotFound{keyRole, id}
|
return errors.ErrKeyNotFound{keyRole, id}
|
||||||
}
|
}
|
||||||
|
|
||||||
role, ok := root.Roles[keyRole]
|
role, ok := root.Roles[keyRole]
|
||||||
if !ok {
|
if !ok {
|
||||||
return ErrKeyNotFound{keyRole, id}
|
return errors.ErrKeyNotFound{keyRole, id}
|
||||||
}
|
}
|
||||||
|
|
||||||
keyIDs := make([]string, 0, len(role.KeyIDs))
|
keyIDs := make([]string, 0, len(role.KeyIDs))
|
||||||
|
@ -248,7 +250,7 @@ func (r *Repo) RevokeKeyWithExpires(keyRole, id string, expires time.Time) error
|
||||||
keyIDs = append(keyIDs, keyID)
|
keyIDs = append(keyIDs, keyID)
|
||||||
}
|
}
|
||||||
if len(keyIDs) == len(role.KeyIDs) {
|
if len(keyIDs) == len(role.KeyIDs) {
|
||||||
return ErrKeyNotFound{keyRole, id}
|
return errors.ErrKeyNotFound{keyRole, id}
|
||||||
}
|
}
|
||||||
role.KeyIDs = keyIDs
|
role.KeyIDs = keyIDs
|
||||||
|
|
||||||
|
@ -265,7 +267,7 @@ func (r *Repo) setMeta(name string, meta interface{}) error {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
s, err := signed.Marshal(meta, keys...)
|
s, err := r.trust.Marshal(meta, keys...)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -280,7 +282,7 @@ func (r *Repo) setMeta(name string, meta interface{}) error {
|
||||||
func (r *Repo) Sign(name string) error {
|
func (r *Repo) Sign(name string) error {
|
||||||
role := strings.TrimSuffix(name, ".json")
|
role := strings.TrimSuffix(name, ".json")
|
||||||
if !keys.ValidRole(role) {
|
if !keys.ValidRole(role) {
|
||||||
return ErrInvalidRole{role}
|
return errors.ErrInvalidRole{role}
|
||||||
}
|
}
|
||||||
|
|
||||||
s, err := r.signedMeta(name)
|
s, err := r.signedMeta(name)
|
||||||
|
@ -293,12 +295,11 @@ func (r *Repo) Sign(name string) error {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if len(keys) == 0 {
|
if len(keys) == 0 {
|
||||||
return ErrInsufficientKeys{name}
|
return errors.ErrInsufficientKeys{name}
|
||||||
}
|
|
||||||
for _, k := range keys {
|
|
||||||
signed.Sign(s, k)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
r.trust.Sign(s, keys...)
|
||||||
|
|
||||||
b, err := json.Marshal(s)
|
b, err := json.Marshal(s)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
@ -313,13 +314,17 @@ func (r *Repo) Sign(name string) error {
|
||||||
// been revoked are omitted), except for the root role in which case all local
|
// been revoked are omitted), except for the root role in which case all local
|
||||||
// keys are returned (revoked root keys still need to sign new root metadata so
|
// keys are returned (revoked root keys still need to sign new root metadata so
|
||||||
// clients can verify the new root.json and update their keys db accordingly).
|
// clients can verify the new root.json and update their keys db accordingly).
|
||||||
func (r *Repo) getKeys(name string) ([]*data.Key, error) {
|
func (r *Repo) getKeys(name string) ([]*keys.PublicKey, error) {
|
||||||
localKeys, err := r.local.GetKeys(name)
|
localKeys, err := r.local.GetKeys(name)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
if name == "root" {
|
if name == "root" {
|
||||||
return localKeys, nil
|
rootkeys := make([]*keys.PublicKey, 0, len(localKeys))
|
||||||
|
for _, key := range localKeys {
|
||||||
|
rootkeys = append(rootkeys, &keys.PublicKey{*key, key.ID()})
|
||||||
|
}
|
||||||
|
return rootkeys, nil
|
||||||
}
|
}
|
||||||
db, err := r.db()
|
db, err := r.db()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -332,19 +337,19 @@ func (r *Repo) getKeys(name string) ([]*data.Key, error) {
|
||||||
if len(role.KeyIDs) == 0 {
|
if len(role.KeyIDs) == 0 {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
keys := make([]*data.Key, 0, len(role.KeyIDs))
|
rolekeys := make([]*keys.PublicKey, 0, len(role.KeyIDs))
|
||||||
for _, key := range localKeys {
|
for _, key := range localKeys {
|
||||||
if _, ok := role.KeyIDs[key.ID()]; ok {
|
if role.ValidKey(key.ID()) {
|
||||||
keys = append(keys, key)
|
rolekeys = append(rolekeys, &keys.PublicKey{*key, key.ID()})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return keys, nil
|
return rolekeys, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *Repo) signedMeta(name string) (*data.Signed, error) {
|
func (r *Repo) signedMeta(name string) (*data.Signed, error) {
|
||||||
b, ok := r.meta[name]
|
b, ok := r.meta[name]
|
||||||
if !ok {
|
if !ok {
|
||||||
return nil, ErrMissingMetadata{name}
|
return nil, errors.ErrMissingMetadata{name}
|
||||||
}
|
}
|
||||||
s := &data.Signed{}
|
s := &data.Signed{}
|
||||||
if err := json.Unmarshal(b, s); err != nil {
|
if err := json.Unmarshal(b, s); err != nil {
|
||||||
|
@ -376,7 +381,7 @@ func (r *Repo) AddTargetWithExpires(path string, custom json.RawMessage, expires
|
||||||
|
|
||||||
func (r *Repo) AddTargetsWithExpires(paths []string, custom json.RawMessage, expires time.Time) error {
|
func (r *Repo) AddTargetsWithExpires(paths []string, custom json.RawMessage, expires time.Time) error {
|
||||||
if !validExpires(expires) {
|
if !validExpires(expires) {
|
||||||
return ErrInvalidExpires{expires}
|
return errors.ErrInvalidExpires{expires}
|
||||||
}
|
}
|
||||||
|
|
||||||
t, err := r.targets()
|
t, err := r.targets()
|
||||||
|
@ -388,7 +393,7 @@ func (r *Repo) AddTargetsWithExpires(paths []string, custom json.RawMessage, exp
|
||||||
normalizedPaths[i] = util.NormalizeTarget(path)
|
normalizedPaths[i] = util.NormalizeTarget(path)
|
||||||
}
|
}
|
||||||
if err := r.local.WalkStagedTargets(normalizedPaths, func(path string, meta data.FileMeta) (err error) {
|
if err := r.local.WalkStagedTargets(normalizedPaths, func(path string, meta data.FileMeta) (err error) {
|
||||||
t.Targets[path] = meta
|
t.Targets[util.NormalizeTarget(path)] = meta
|
||||||
return nil
|
return nil
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
return err
|
return err
|
||||||
|
@ -413,7 +418,7 @@ func (r *Repo) RemoveTargetWithExpires(path string, expires time.Time) error {
|
||||||
// If paths is empty, all targets will be removed.
|
// If paths is empty, all targets will be removed.
|
||||||
func (r *Repo) RemoveTargetsWithExpires(paths []string, expires time.Time) error {
|
func (r *Repo) RemoveTargetsWithExpires(paths []string, expires time.Time) error {
|
||||||
if !validExpires(expires) {
|
if !validExpires(expires) {
|
||||||
return ErrInvalidExpires{expires}
|
return errors.ErrInvalidExpires{expires}
|
||||||
}
|
}
|
||||||
|
|
||||||
t, err := r.targets()
|
t, err := r.targets()
|
||||||
|
@ -447,7 +452,7 @@ func (r *Repo) Snapshot(t CompressionType) error {
|
||||||
|
|
||||||
func (r *Repo) SnapshotWithExpires(t CompressionType, expires time.Time) error {
|
func (r *Repo) SnapshotWithExpires(t CompressionType, expires time.Time) error {
|
||||||
if !validExpires(expires) {
|
if !validExpires(expires) {
|
||||||
return ErrInvalidExpires{expires}
|
return errors.ErrInvalidExpires{expires}
|
||||||
}
|
}
|
||||||
|
|
||||||
snapshot, err := r.snapshot()
|
snapshot, err := r.snapshot()
|
||||||
|
@ -480,7 +485,7 @@ func (r *Repo) Timestamp() error {
|
||||||
|
|
||||||
func (r *Repo) TimestampWithExpires(expires time.Time) error {
|
func (r *Repo) TimestampWithExpires(expires time.Time) error {
|
||||||
if !validExpires(expires) {
|
if !validExpires(expires) {
|
||||||
return ErrInvalidExpires{expires}
|
return errors.ErrInvalidExpires{expires}
|
||||||
}
|
}
|
||||||
|
|
||||||
db, err := r.db()
|
db, err := r.db()
|
||||||
|
@ -535,7 +540,7 @@ func (r *Repo) Commit() error {
|
||||||
// check we have all the metadata
|
// check we have all the metadata
|
||||||
for _, name := range topLevelManifests {
|
for _, name := range topLevelManifests {
|
||||||
if _, ok := r.meta[name]; !ok {
|
if _, ok := r.meta[name]; !ok {
|
||||||
return ErrMissingMetadata{name}
|
return errors.ErrMissingMetadata{name}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -546,7 +551,7 @@ func (r *Repo) Commit() error {
|
||||||
}
|
}
|
||||||
for name, role := range root.Roles {
|
for name, role := range root.Roles {
|
||||||
if len(role.KeyIDs) < role.Threshold {
|
if len(role.KeyIDs) < role.Threshold {
|
||||||
return ErrNotEnoughKeys{name, len(role.KeyIDs), role.Threshold}
|
return errors.ErrNotEnoughKeys{name, len(role.KeyIDs), role.Threshold}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -611,7 +616,7 @@ func (r *Repo) verifySignature(name string, db *keys.DB) error {
|
||||||
}
|
}
|
||||||
role := strings.TrimSuffix(name, ".json")
|
role := strings.TrimSuffix(name, ".json")
|
||||||
if err := signed.Verify(s, role, 0, db); err != nil {
|
if err := signed.Verify(s, role, 0, db); err != nil {
|
||||||
return ErrInsufficientSignatures{name, err}
|
return errors.ErrInsufficientSignatures{name, err}
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
@ -619,7 +624,7 @@ func (r *Repo) verifySignature(name string, db *keys.DB) error {
|
||||||
func (r *Repo) fileMeta(name string) (data.FileMeta, error) {
|
func (r *Repo) fileMeta(name string) (data.FileMeta, error) {
|
||||||
b, ok := r.meta[name]
|
b, ok := r.meta[name]
|
||||||
if !ok {
|
if !ok {
|
||||||
return data.FileMeta{}, ErrMissingMetadata{name}
|
return data.FileMeta{}, errors.ErrMissingMetadata{name}
|
||||||
}
|
}
|
||||||
return util.GenerateFileMeta(bytes.NewReader(b), r.hashAlgorithms...)
|
return util.GenerateFileMeta(bytes.NewReader(b), r.hashAlgorithms...)
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,12 +10,12 @@ import (
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/agl/ed25519"
|
"github.com/endophage/go-tuf/Godeps/_workspace/src/github.com/agl/ed25519"
|
||||||
|
. "github.com/endophage/go-tuf/Godeps/_workspace/src/gopkg.in/check.v1"
|
||||||
"github.com/endophage/go-tuf/data"
|
"github.com/endophage/go-tuf/data"
|
||||||
"github.com/endophage/go-tuf/store"
|
"github.com/endophage/go-tuf/store"
|
||||||
. "gopkg.in/check.v1"
|
|
||||||
// "github.com/endophage/go-tuf/encrypted"
|
// "github.com/endophage/go-tuf/encrypted"
|
||||||
"github.com/endophage/go-tuf/keys"
|
tuferr "github.com/endophage/go-tuf/errors"
|
||||||
"github.com/endophage/go-tuf/signed"
|
"github.com/endophage/go-tuf/signed"
|
||||||
"github.com/endophage/go-tuf/util"
|
"github.com/endophage/go-tuf/util"
|
||||||
)
|
)
|
||||||
|
@ -28,6 +28,9 @@ type RepoSuite struct{}
|
||||||
var _ = Suite(&RepoSuite{})
|
var _ = Suite(&RepoSuite{})
|
||||||
|
|
||||||
func (RepoSuite) TestNewRepo(c *C) {
|
func (RepoSuite) TestNewRepo(c *C) {
|
||||||
|
trust := signed.NewEd25519()
|
||||||
|
signer := signed.NewSigner(trust)
|
||||||
|
|
||||||
meta := map[string]json.RawMessage{
|
meta := map[string]json.RawMessage{
|
||||||
"root.json": []byte(`{
|
"root.json": []byte(`{
|
||||||
"signed": {
|
"signed": {
|
||||||
|
@ -75,7 +78,7 @@ func (RepoSuite) TestNewRepo(c *C) {
|
||||||
local.SetMeta(k, v)
|
local.SetMeta(k, v)
|
||||||
}
|
}
|
||||||
|
|
||||||
r, err := NewRepo(local)
|
r, err := NewRepo(signer, local, "sha256")
|
||||||
c.Assert(err, IsNil)
|
c.Assert(err, IsNil)
|
||||||
|
|
||||||
root, err := r.root()
|
root, err := r.root()
|
||||||
|
@ -108,6 +111,9 @@ func (RepoSuite) TestNewRepo(c *C) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (RepoSuite) TestInit(c *C) {
|
func (RepoSuite) TestInit(c *C) {
|
||||||
|
trust := signed.NewEd25519()
|
||||||
|
signer := signed.NewSigner(trust)
|
||||||
|
|
||||||
db := util.GetSqliteDB()
|
db := util.GetSqliteDB()
|
||||||
defer util.FlushDB(db)
|
defer util.FlushDB(db)
|
||||||
local := store.DBStore(
|
local := store.DBStore(
|
||||||
|
@ -117,7 +123,7 @@ func (RepoSuite) TestInit(c *C) {
|
||||||
)
|
)
|
||||||
local.AddBlob("/foo.txt", util.SampleMeta())
|
local.AddBlob("/foo.txt", util.SampleMeta())
|
||||||
|
|
||||||
r, err := NewRepo(local)
|
r, err := NewRepo(signer, local, "sha256")
|
||||||
c.Assert(err, IsNil)
|
c.Assert(err, IsNil)
|
||||||
|
|
||||||
// Init() sets root.ConsistentSnapshot
|
// Init() sets root.ConsistentSnapshot
|
||||||
|
@ -130,7 +136,7 @@ func (RepoSuite) TestInit(c *C) {
|
||||||
|
|
||||||
// Init() fails if targets have been added
|
// Init() fails if targets have been added
|
||||||
c.Assert(r.AddTarget("foo.txt", nil), IsNil)
|
c.Assert(r.AddTarget("foo.txt", nil), IsNil)
|
||||||
c.Assert(r.Init(true), Equals, ErrInitNotAllowed)
|
c.Assert(r.Init(true), Equals, tuferr.ErrInitNotAllowed)
|
||||||
}
|
}
|
||||||
|
|
||||||
func genKey(c *C, r *Repo, role string) string {
|
func genKey(c *C, r *Repo, role string) string {
|
||||||
|
@ -140,15 +146,18 @@ func genKey(c *C, r *Repo, role string) string {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (RepoSuite) TestGenKey(c *C) {
|
func (RepoSuite) TestGenKey(c *C) {
|
||||||
|
trust := signed.NewEd25519()
|
||||||
|
signer := signed.NewSigner(trust)
|
||||||
|
|
||||||
sqldb := util.GetSqliteDB()
|
sqldb := util.GetSqliteDB()
|
||||||
defer util.FlushDB(sqldb)
|
defer util.FlushDB(sqldb)
|
||||||
local := store.DBStore(sqldb, "")
|
local := store.DBStore(sqldb, "")
|
||||||
r, err := NewRepo(local)
|
r, err := NewRepo(signer, local, "sha256")
|
||||||
c.Assert(err, IsNil)
|
c.Assert(err, IsNil)
|
||||||
|
|
||||||
// generate a key for an unknown role
|
// generate a key for an unknown role
|
||||||
_, err = r.GenKey("foo")
|
_, err = r.GenKey("foo")
|
||||||
c.Assert(err, Equals, ErrInvalidRole{"foo"})
|
c.Assert(err, Equals, tuferr.ErrInvalidRole{"foo"})
|
||||||
|
|
||||||
// generate a root key
|
// generate a root key
|
||||||
id := genKey(c, r, "root")
|
id := genKey(c, r, "root")
|
||||||
|
@ -173,7 +182,7 @@ func (RepoSuite) TestGenKey(c *C) {
|
||||||
}
|
}
|
||||||
c.Assert(k.ID(), Equals, keyID)
|
c.Assert(k.ID(), Equals, keyID)
|
||||||
c.Assert(k.Value.Public, HasLen, ed25519.PublicKeySize)
|
c.Assert(k.Value.Public, HasLen, ed25519.PublicKeySize)
|
||||||
c.Assert(k.Value.Private, IsNil)
|
//c.Assert(k.Value.Private, IsNil)
|
||||||
|
|
||||||
// check root key + role are in db
|
// check root key + role are in db
|
||||||
db, err := r.db()
|
db, err := r.db()
|
||||||
|
@ -182,7 +191,7 @@ func (RepoSuite) TestGenKey(c *C) {
|
||||||
c.Assert(rootKey, NotNil)
|
c.Assert(rootKey, NotNil)
|
||||||
c.Assert(rootKey.ID, Equals, keyID)
|
c.Assert(rootKey.ID, Equals, keyID)
|
||||||
role := db.GetRole("root")
|
role := db.GetRole("root")
|
||||||
c.Assert(role.KeyIDs, DeepEquals, map[string]struct{}{keyID: {}})
|
c.Assert(role.KeyIDs, DeepEquals, []string{keyID})
|
||||||
|
|
||||||
// check the key was saved correctly
|
// check the key was saved correctly
|
||||||
localKeys, err := local.GetKeys("root")
|
localKeys, err := local.GetKeys("root")
|
||||||
|
@ -195,8 +204,8 @@ func (RepoSuite) TestGenKey(c *C) {
|
||||||
c.Assert(err, IsNil)
|
c.Assert(err, IsNil)
|
||||||
c.Assert(rootKeys, HasLen, 1)
|
c.Assert(rootKeys, HasLen, 1)
|
||||||
c.Assert(rootKeys[0].ID(), Equals, rootKey.ID)
|
c.Assert(rootKeys[0].ID(), Equals, rootKey.ID)
|
||||||
c.Assert(rootKeys[0].Value.Public, DeepEquals, rootKey.Serialize().Value.Public)
|
c.Assert(rootKeys[0].Value.Public, DeepEquals, rootKey.Key.Value.Public)
|
||||||
c.Assert(rootKeys[0].Value.Private, IsNil)
|
//c.Assert(rootKeys[0].Value.Private, IsNil)
|
||||||
|
|
||||||
// generate two targets keys
|
// generate two targets keys
|
||||||
genKey(c, r, "targets")
|
genKey(c, r, "targets")
|
||||||
|
@ -212,11 +221,11 @@ func (RepoSuite) TestGenKey(c *C) {
|
||||||
c.Fatal("missing targets role")
|
c.Fatal("missing targets role")
|
||||||
}
|
}
|
||||||
c.Assert(targetsRole.KeyIDs, HasLen, 2)
|
c.Assert(targetsRole.KeyIDs, HasLen, 2)
|
||||||
targetKeyIDs := make(map[string]struct{}, 2)
|
targetKeyIDs := make([]string, 0, 2)
|
||||||
db, err = r.db()
|
db, err = r.db()
|
||||||
c.Assert(err, IsNil)
|
c.Assert(err, IsNil)
|
||||||
for _, id := range targetsRole.KeyIDs {
|
for _, id := range targetsRole.KeyIDs {
|
||||||
targetKeyIDs[id] = struct{}{}
|
targetKeyIDs = append(targetKeyIDs, id)
|
||||||
_, ok = root.Keys[id]
|
_, ok = root.Keys[id]
|
||||||
if !ok {
|
if !ok {
|
||||||
c.Fatal("missing key")
|
c.Fatal("missing key")
|
||||||
|
@ -269,17 +278,20 @@ func (RepoSuite) TestGenKey(c *C) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (RepoSuite) TestRevokeKey(c *C) {
|
func (RepoSuite) TestRevokeKey(c *C) {
|
||||||
|
trust := signed.NewEd25519()
|
||||||
|
signer := signed.NewSigner(trust)
|
||||||
|
|
||||||
db := util.GetSqliteDB()
|
db := util.GetSqliteDB()
|
||||||
defer util.FlushDB(db)
|
defer util.FlushDB(db)
|
||||||
local := store.DBStore(db, "")
|
local := store.DBStore(db, "")
|
||||||
r, err := NewRepo(local)
|
r, err := NewRepo(signer, local, "sha256")
|
||||||
c.Assert(err, IsNil)
|
c.Assert(err, IsNil)
|
||||||
|
|
||||||
// revoking a key for an unknown role returns ErrInvalidRole
|
// revoking a key for an unknown role returns ErrInvalidRole
|
||||||
c.Assert(r.RevokeKey("foo", ""), DeepEquals, ErrInvalidRole{"foo"})
|
c.Assert(r.RevokeKey("foo", ""), DeepEquals, tuferr.ErrInvalidRole{"foo"})
|
||||||
|
|
||||||
// revoking a key which doesn't exist returns ErrKeyNotFound
|
// revoking a key which doesn't exist returns ErrKeyNotFound
|
||||||
c.Assert(r.RevokeKey("root", "nonexistent"), DeepEquals, ErrKeyNotFound{"root", "nonexistent"})
|
c.Assert(r.RevokeKey("root", "nonexistent"), DeepEquals, tuferr.ErrKeyNotFound{"root", "nonexistent"})
|
||||||
|
|
||||||
// generate keys
|
// generate keys
|
||||||
genKey(c, r, "root")
|
genKey(c, r, "root")
|
||||||
|
@ -319,16 +331,19 @@ func (RepoSuite) TestRevokeKey(c *C) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (RepoSuite) TestSign(c *C) {
|
func (RepoSuite) TestSign(c *C) {
|
||||||
|
trust := signed.NewEd25519()
|
||||||
|
signer := signed.NewSigner(trust)
|
||||||
|
|
||||||
baseMeta := map[string]json.RawMessage{"root.json": []byte(`{"signed":{},"signatures":[]}`)}
|
baseMeta := map[string]json.RawMessage{"root.json": []byte(`{"signed":{},"signatures":[]}`)}
|
||||||
db := util.GetSqliteDB()
|
db := util.GetSqliteDB()
|
||||||
defer util.FlushDB(db)
|
defer util.FlushDB(db)
|
||||||
local := store.DBStore(db, "")
|
local := store.DBStore(db, "")
|
||||||
local.SetMeta("root.json", baseMeta["root.json"])
|
local.SetMeta("root.json", baseMeta["root.json"])
|
||||||
r, err := NewRepo(local)
|
r, err := NewRepo(signer, local, "sha256")
|
||||||
c.Assert(err, IsNil)
|
c.Assert(err, IsNil)
|
||||||
|
|
||||||
// signing with no keys returns ErrInsufficientKeys
|
// signing with no keys returns ErrInsufficientKeys
|
||||||
c.Assert(r.Sign("root.json"), Equals, ErrInsufficientKeys{"root.json"})
|
c.Assert(r.Sign("root.json"), Equals, tuferr.ErrInsufficientKeys{"root.json"})
|
||||||
|
|
||||||
checkSigIDs := func(keyIDs ...string) {
|
checkSigIDs := func(keyIDs ...string) {
|
||||||
meta, err := local.GetMeta()
|
meta, err := local.GetMeta()
|
||||||
|
@ -341,8 +356,6 @@ func (RepoSuite) TestSign(c *C) {
|
||||||
}
|
}
|
||||||
s := &data.Signed{}
|
s := &data.Signed{}
|
||||||
c.Assert(json.Unmarshal(rootJSON, s), IsNil)
|
c.Assert(json.Unmarshal(rootJSON, s), IsNil)
|
||||||
fmt.Println("Len Signatures", len(s.Signatures))
|
|
||||||
fmt.Println("Len KeyIDs", len(keyIDs))
|
|
||||||
c.Assert(s.Signatures, HasLen, len(keyIDs))
|
c.Assert(s.Signatures, HasLen, len(keyIDs))
|
||||||
for i, id := range keyIDs {
|
for i, id := range keyIDs {
|
||||||
c.Assert(s.Signatures[i].KeyID, Equals, id)
|
c.Assert(s.Signatures[i].KeyID, Equals, id)
|
||||||
|
@ -350,53 +363,58 @@ func (RepoSuite) TestSign(c *C) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// signing with an available key generates a signature
|
// signing with an available key generates a signature
|
||||||
key, err := keys.NewKey()
|
//key, err := signer.NewKey()
|
||||||
|
kID, err := r.GenKey("root")
|
||||||
c.Assert(err, IsNil)
|
c.Assert(err, IsNil)
|
||||||
c.Assert(local.SaveKey("root", key.SerializePrivate()), IsNil)
|
//c.Assert(local.SaveKey("root", key.SerializePrivate()), IsNil)
|
||||||
c.Assert(r.Sign("root.json"), IsNil)
|
c.Assert(r.Sign("root.json"), IsNil)
|
||||||
checkSigIDs(key.ID)
|
checkSigIDs(kID)
|
||||||
|
|
||||||
// signing again does not generate a duplicate signature
|
// signing again does not generate a duplicate signature
|
||||||
c.Assert(r.Sign("root.json"), IsNil)
|
c.Assert(r.Sign("root.json"), IsNil)
|
||||||
checkSigIDs(key.ID)
|
checkSigIDs(kID)
|
||||||
|
|
||||||
// signing with a new available key generates another signature
|
// signing with a new available key generates another signature
|
||||||
newKey, err := keys.NewKey()
|
//newKey, err := signer.NewKey()
|
||||||
|
newkID, err := r.GenKey("root")
|
||||||
c.Assert(err, IsNil)
|
c.Assert(err, IsNil)
|
||||||
c.Assert(local.SaveKey("root", newKey.SerializePrivate()), IsNil)
|
//c.Assert(local.SaveKey("root", newKey.SerializePrivate()), IsNil)
|
||||||
c.Assert(r.Sign("root.json"), IsNil)
|
c.Assert(r.Sign("root.json"), IsNil)
|
||||||
checkSigIDs(key.ID, newKey.ID)
|
checkSigIDs(kID, newkID)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (RepoSuite) TestCommit(c *C) {
|
func (RepoSuite) TestCommit(c *C) {
|
||||||
|
trust := signed.NewEd25519()
|
||||||
|
signer := signed.NewSigner(trust)
|
||||||
|
|
||||||
//files := map[string][]byte{"/foo.txt": []byte("foo"), "/bar.txt": []byte("bar")}
|
//files := map[string][]byte{"/foo.txt": []byte("foo"), "/bar.txt": []byte("bar")}
|
||||||
db := util.GetSqliteDB()
|
db := util.GetSqliteDB()
|
||||||
defer util.FlushDB(db)
|
defer util.FlushDB(db)
|
||||||
local := store.DBStore(db, "")
|
local := store.DBStore(db, "")
|
||||||
r, err := NewRepo(local)
|
r, err := NewRepo(signer, local, "sha256")
|
||||||
c.Assert(err, IsNil)
|
c.Assert(err, IsNil)
|
||||||
|
|
||||||
// commit without root.json
|
// commit without root.json
|
||||||
c.Assert(r.Commit(), DeepEquals, ErrMissingMetadata{"root.json"})
|
c.Assert(r.Commit(), DeepEquals, tuferr.ErrMissingMetadata{"root.json"})
|
||||||
|
|
||||||
// commit without targets.json
|
// commit without targets.json
|
||||||
genKey(c, r, "root")
|
genKey(c, r, "root")
|
||||||
c.Assert(r.Commit(), DeepEquals, ErrMissingMetadata{"targets.json"})
|
c.Assert(r.Commit(), DeepEquals, tuferr.ErrMissingMetadata{"targets.json"})
|
||||||
|
|
||||||
// commit without snapshot.json
|
// commit without snapshot.json
|
||||||
genKey(c, r, "targets")
|
genKey(c, r, "targets")
|
||||||
local.AddBlob("/foo.txt", util.SampleMeta())
|
local.AddBlob("/foo.txt", util.SampleMeta())
|
||||||
c.Assert(r.AddTarget("foo.txt", nil), IsNil)
|
c.Assert(r.AddTarget("foo.txt", nil), IsNil)
|
||||||
c.Assert(r.Commit(), DeepEquals, ErrMissingMetadata{"snapshot.json"})
|
c.Assert(r.Commit(), DeepEquals, tuferr.ErrMissingMetadata{"snapshot.json"})
|
||||||
|
|
||||||
// commit without timestamp.json
|
// commit without timestamp.json
|
||||||
genKey(c, r, "snapshot")
|
genKey(c, r, "snapshot")
|
||||||
c.Assert(r.Snapshot(CompressionTypeNone), IsNil)
|
c.Assert(r.Snapshot(CompressionTypeNone), IsNil)
|
||||||
c.Assert(r.Commit(), DeepEquals, ErrMissingMetadata{"timestamp.json"})
|
c.Assert(r.Commit(), DeepEquals, tuferr.ErrMissingMetadata{"timestamp.json"})
|
||||||
|
|
||||||
// commit with timestamp.json but no timestamp key
|
// commit with timestamp.json but no timestamp key
|
||||||
c.Assert(r.Timestamp(), IsNil)
|
c.Assert(r.Timestamp(), IsNil)
|
||||||
c.Assert(r.Commit(), DeepEquals, ErrInsufficientSignatures{"timestamp.json", signed.ErrNoSignatures})
|
c.Assert(r.Commit(), DeepEquals, tuferr.ErrInsufficientSignatures{"timestamp.json", signed.ErrNoSignatures})
|
||||||
|
|
||||||
// commit success
|
// commit success
|
||||||
genKey(c, r, "timestamp")
|
genKey(c, r, "timestamp")
|
||||||
|
@ -436,7 +454,7 @@ func (RepoSuite) TestCommit(c *C) {
|
||||||
c.Assert(r.RevokeKey("timestamp", role.KeyIDs[0]), IsNil)
|
c.Assert(r.RevokeKey("timestamp", role.KeyIDs[0]), IsNil)
|
||||||
c.Assert(r.Snapshot(CompressionTypeNone), IsNil)
|
c.Assert(r.Snapshot(CompressionTypeNone), IsNil)
|
||||||
c.Assert(r.Timestamp(), IsNil)
|
c.Assert(r.Timestamp(), IsNil)
|
||||||
c.Assert(r.Commit(), DeepEquals, ErrNotEnoughKeys{"timestamp", 0, 1})
|
c.Assert(r.Commit(), DeepEquals, tuferr.ErrNotEnoughKeys{"timestamp", 0, 1})
|
||||||
}
|
}
|
||||||
|
|
||||||
type tmpDir struct {
|
type tmpDir struct {
|
||||||
|
@ -512,147 +530,154 @@ func (t *tmpDir) readFile(path string) []byte {
|
||||||
return data
|
return data
|
||||||
}
|
}
|
||||||
|
|
||||||
//func (RepoSuite) TestCommitFileSystem(c *C) {
|
func (RepoSuite) TestCommitFileSystem(c *C) {
|
||||||
// tmp := newTmpDir(c)
|
trust := signed.NewEd25519()
|
||||||
// local := FileSystemStore(tmp.path, nil)
|
signer := signed.NewSigner(trust)
|
||||||
// r, err := NewRepo(local)
|
tmp := newTmpDir(c)
|
||||||
// c.Assert(err, IsNil)
|
local := store.FileSystemStore(tmp.path, nil)
|
||||||
//
|
r, err := NewRepo(signer, local, "sha256")
|
||||||
// // don't use consistent snapshots to make the checks simpler
|
c.Assert(err, IsNil)
|
||||||
// c.Assert(r.Init(false), IsNil)
|
|
||||||
//
|
// don't use consistent snapshots to make the checks simpler
|
||||||
// // generating keys should stage root.json and create repo dirs
|
c.Assert(r.Init(false), IsNil)
|
||||||
// genKey(c, r, "root")
|
|
||||||
// genKey(c, r, "targets")
|
// generating keys should stage root.json and create repo dirs
|
||||||
// genKey(c, r, "snapshot")
|
genKey(c, r, "root")
|
||||||
// genKey(c, r, "timestamp")
|
genKey(c, r, "targets")
|
||||||
// tmp.assertExists("staged/root.json")
|
genKey(c, r, "snapshot")
|
||||||
// tmp.assertEmpty("repository")
|
genKey(c, r, "timestamp")
|
||||||
// tmp.assertEmpty("staged/targets")
|
tmp.assertExists("staged/root.json")
|
||||||
//
|
tmp.assertEmpty("repository")
|
||||||
// // adding a non-existent file fails
|
tmp.assertEmpty("staged/targets")
|
||||||
// c.Assert(r.AddTarget("foo.txt", nil), Equals, ErrFileNotFound{tmp.stagedTargetPath("foo.txt")})
|
|
||||||
// tmp.assertEmpty("repository")
|
// adding a non-existent file fails
|
||||||
//
|
c.Assert(r.AddTarget("foo.txt", nil), Equals, tuferr.ErrFileNotFound{tmp.stagedTargetPath("foo.txt")})
|
||||||
// // adding a file stages targets.json
|
tmp.assertEmpty("repository")
|
||||||
// tmp.writeStagedTarget("foo.txt", "foo")
|
|
||||||
// c.Assert(r.AddTarget("foo.txt", nil), IsNil)
|
// adding a file stages targets.json
|
||||||
// tmp.assertExists("staged/targets.json")
|
tmp.writeStagedTarget("foo.txt", "foo")
|
||||||
// tmp.assertEmpty("repository")
|
c.Assert(r.AddTarget("foo.txt", nil), IsNil)
|
||||||
// t, err := r.targets()
|
tmp.assertExists("staged/targets.json")
|
||||||
// c.Assert(err, IsNil)
|
tmp.assertEmpty("repository")
|
||||||
// c.Assert(t.Targets, HasLen, 1)
|
t, err := r.targets()
|
||||||
// if _, ok := t.Targets["/foo.txt"]; !ok {
|
c.Assert(err, IsNil)
|
||||||
// c.Fatal("missing target file: /foo.txt")
|
c.Assert(t.Targets, HasLen, 1)
|
||||||
// }
|
if _, ok := t.Targets["/foo.txt"]; !ok {
|
||||||
//
|
c.Fatal("missing target file: /foo.txt")
|
||||||
// // Snapshot() stages snapshot.json
|
}
|
||||||
// c.Assert(r.Snapshot(CompressionTypeNone), IsNil)
|
|
||||||
// tmp.assertExists("staged/snapshot.json")
|
// Snapshot() stages snapshot.json
|
||||||
// tmp.assertEmpty("repository")
|
c.Assert(r.Snapshot(CompressionTypeNone), IsNil)
|
||||||
//
|
tmp.assertExists("staged/snapshot.json")
|
||||||
// // Timestamp() stages timestamp.json
|
tmp.assertEmpty("repository")
|
||||||
// c.Assert(r.Timestamp(), IsNil)
|
|
||||||
// tmp.assertExists("staged/timestamp.json")
|
// Timestamp() stages timestamp.json
|
||||||
// tmp.assertEmpty("repository")
|
c.Assert(r.Timestamp(), IsNil)
|
||||||
//
|
tmp.assertExists("staged/timestamp.json")
|
||||||
// // committing moves files from staged -> repository
|
tmp.assertEmpty("repository")
|
||||||
// c.Assert(r.Commit(), IsNil)
|
|
||||||
// tmp.assertExists("repository/root.json")
|
// committing moves files from staged -> repository
|
||||||
// tmp.assertExists("repository/targets.json")
|
c.Assert(r.Commit(), IsNil)
|
||||||
// tmp.assertExists("repository/snapshot.json")
|
tmp.assertExists("repository/root.json")
|
||||||
// tmp.assertExists("repository/timestamp.json")
|
tmp.assertExists("repository/targets.json")
|
||||||
// tmp.assertFileContent("repository/targets/foo.txt", "foo")
|
tmp.assertExists("repository/snapshot.json")
|
||||||
// tmp.assertEmpty("staged/targets")
|
tmp.assertExists("repository/timestamp.json")
|
||||||
// tmp.assertEmpty("staged")
|
tmp.assertFileContent("repository/targets/foo.txt", "foo")
|
||||||
//
|
tmp.assertEmpty("staged/targets")
|
||||||
// // adding and committing another file moves it into repository/targets
|
tmp.assertEmpty("staged")
|
||||||
// tmp.writeStagedTarget("path/to/bar.txt", "bar")
|
|
||||||
// c.Assert(r.AddTarget("path/to/bar.txt", nil), IsNil)
|
// adding and committing another file moves it into repository/targets
|
||||||
// tmp.assertExists("staged/targets.json")
|
tmp.writeStagedTarget("path/to/bar.txt", "bar")
|
||||||
// c.Assert(r.Snapshot(CompressionTypeNone), IsNil)
|
c.Assert(r.AddTarget("path/to/bar.txt", nil), IsNil)
|
||||||
// c.Assert(r.Timestamp(), IsNil)
|
tmp.assertExists("staged/targets.json")
|
||||||
// c.Assert(r.Commit(), IsNil)
|
c.Assert(r.Snapshot(CompressionTypeNone), IsNil)
|
||||||
// tmp.assertFileContent("repository/targets/foo.txt", "foo")
|
c.Assert(r.Timestamp(), IsNil)
|
||||||
// tmp.assertFileContent("repository/targets/path/to/bar.txt", "bar")
|
c.Assert(r.Commit(), IsNil)
|
||||||
// tmp.assertEmpty("staged/targets")
|
tmp.assertFileContent("repository/targets/foo.txt", "foo")
|
||||||
// tmp.assertEmpty("staged")
|
tmp.assertFileContent("repository/targets/path/to/bar.txt", "bar")
|
||||||
//
|
tmp.assertEmpty("staged/targets")
|
||||||
// // removing and committing a file removes it from repository/targets
|
tmp.assertEmpty("staged")
|
||||||
// c.Assert(r.RemoveTarget("foo.txt"), IsNil)
|
|
||||||
// tmp.assertExists("staged/targets.json")
|
// removing and committing a file removes it from repository/targets
|
||||||
// c.Assert(r.Snapshot(CompressionTypeNone), IsNil)
|
c.Assert(r.RemoveTarget("foo.txt"), IsNil)
|
||||||
// c.Assert(r.Timestamp(), IsNil)
|
tmp.assertExists("staged/targets.json")
|
||||||
// c.Assert(r.Commit(), IsNil)
|
c.Assert(r.Snapshot(CompressionTypeNone), IsNil)
|
||||||
// tmp.assertNotExist("repository/targets/foo.txt")
|
c.Assert(r.Timestamp(), IsNil)
|
||||||
// tmp.assertFileContent("repository/targets/path/to/bar.txt", "bar")
|
c.Assert(r.Commit(), IsNil)
|
||||||
// tmp.assertEmpty("staged/targets")
|
tmp.assertNotExist("repository/targets/foo.txt")
|
||||||
// tmp.assertEmpty("staged")
|
tmp.assertFileContent("repository/targets/path/to/bar.txt", "bar")
|
||||||
//}
|
tmp.assertEmpty("staged/targets")
|
||||||
//
|
tmp.assertEmpty("staged")
|
||||||
//func (RepoSuite) TestConsistentSnapshot(c *C) {
|
}
|
||||||
// tmp := newTmpDir(c)
|
|
||||||
// local := FileSystemStore(tmp.path, nil)
|
func (RepoSuite) TestConsistentSnapshot(c *C) {
|
||||||
// r, err := NewRepo(local, "sha512", "sha256")
|
trust := signed.NewEd25519()
|
||||||
// c.Assert(err, IsNil)
|
signer := signed.NewSigner(trust)
|
||||||
//
|
tmp := newTmpDir(c)
|
||||||
// genKey(c, r, "root")
|
local := store.FileSystemStore(tmp.path, nil)
|
||||||
// genKey(c, r, "targets")
|
r, err := NewRepo(signer, local, "sha512", "sha256")
|
||||||
// genKey(c, r, "snapshot")
|
c.Assert(err, IsNil)
|
||||||
// genKey(c, r, "timestamp")
|
|
||||||
// tmp.writeStagedTarget("foo.txt", "foo")
|
genKey(c, r, "root")
|
||||||
// c.Assert(r.AddTarget("foo.txt", nil), IsNil)
|
genKey(c, r, "targets")
|
||||||
// tmp.writeStagedTarget("dir/bar.txt", "bar")
|
genKey(c, r, "snapshot")
|
||||||
// c.Assert(r.AddTarget("dir/bar.txt", nil), IsNil)
|
genKey(c, r, "timestamp")
|
||||||
// c.Assert(r.Snapshot(CompressionTypeNone), IsNil)
|
tmp.writeStagedTarget("foo.txt", "foo")
|
||||||
// c.Assert(r.Timestamp(), IsNil)
|
c.Assert(r.AddTarget("foo.txt", nil), IsNil)
|
||||||
// c.Assert(r.Commit(), IsNil)
|
tmp.writeStagedTarget("dir/bar.txt", "bar")
|
||||||
//
|
c.Assert(r.AddTarget("dir/bar.txt", nil), IsNil)
|
||||||
// hashes, err := r.fileHashes()
|
c.Assert(r.Snapshot(CompressionTypeNone), IsNil)
|
||||||
// c.Assert(err, IsNil)
|
c.Assert(r.Timestamp(), IsNil)
|
||||||
//
|
c.Assert(r.Commit(), IsNil)
|
||||||
// // root.json, targets.json and snapshot.json should exist at both hashed and unhashed paths
|
|
||||||
// for _, path := range []string{"root.json", "targets.json", "snapshot.json"} {
|
hashes, err := r.fileHashes()
|
||||||
// repoPath := filepath.Join("repository", path)
|
c.Assert(err, IsNil)
|
||||||
// tmp.assertHashedFilesExist(repoPath, hashes[path])
|
|
||||||
// tmp.assertExists(repoPath)
|
// root.json, targets.json and snapshot.json should exist at both hashed and unhashed paths
|
||||||
// }
|
for _, path := range []string{"root.json", "targets.json", "snapshot.json"} {
|
||||||
//
|
repoPath := filepath.Join("repository", path)
|
||||||
// // target files should exist at hashed but not unhashed paths
|
tmp.assertHashedFilesExist(repoPath, hashes[path])
|
||||||
// for _, path := range []string{"targets/foo.txt", "targets/dir/bar.txt"} {
|
tmp.assertExists(repoPath)
|
||||||
// repoPath := filepath.Join("repository", path)
|
}
|
||||||
// tmp.assertHashedFilesExist(repoPath, hashes[path])
|
|
||||||
// tmp.assertNotExist(repoPath)
|
// target files should exist at hashed but not unhashed paths
|
||||||
// }
|
for _, path := range []string{"targets/foo.txt", "targets/dir/bar.txt"} {
|
||||||
//
|
repoPath := filepath.Join("repository", path)
|
||||||
// // timestamp.json should exist at an unhashed path (it doesn't have a hash)
|
tmp.assertHashedFilesExist(repoPath, hashes[path])
|
||||||
// tmp.assertExists("repository/timestamp.json")
|
tmp.assertNotExist(repoPath)
|
||||||
//
|
}
|
||||||
// // removing a file should remove the hashed files
|
|
||||||
// c.Assert(r.RemoveTarget("foo.txt"), IsNil)
|
// timestamp.json should exist at an unhashed path (it doesn't have a hash)
|
||||||
// c.Assert(r.Snapshot(CompressionTypeNone), IsNil)
|
tmp.assertExists("repository/timestamp.json")
|
||||||
// c.Assert(r.Timestamp(), IsNil)
|
|
||||||
// c.Assert(r.Commit(), IsNil)
|
// removing a file should remove the hashed files
|
||||||
// tmp.assertHashedFilesNotExist("repository/targets/foo.txt", hashes["targets/foo.txt"])
|
c.Assert(r.RemoveTarget("foo.txt"), IsNil)
|
||||||
// tmp.assertNotExist("repository/targets/foo.txt")
|
c.Assert(r.Snapshot(CompressionTypeNone), IsNil)
|
||||||
//
|
c.Assert(r.Timestamp(), IsNil)
|
||||||
// // targets should be returned by new repo
|
c.Assert(r.Commit(), IsNil)
|
||||||
// newRepo, err := NewRepo(local, "sha512", "sha256")
|
tmp.assertHashedFilesNotExist("repository/targets/foo.txt", hashes["targets/foo.txt"])
|
||||||
// c.Assert(err, IsNil)
|
tmp.assertNotExist("repository/targets/foo.txt")
|
||||||
// t, err := newRepo.targets()
|
|
||||||
// c.Assert(err, IsNil)
|
// targets should be returned by new repo
|
||||||
// c.Assert(t.Targets, HasLen, 1)
|
newRepo, err := NewRepo(signer, local, "sha512", "sha256")
|
||||||
// if _, ok := t.Targets["/dir/bar.txt"]; !ok {
|
c.Assert(err, IsNil)
|
||||||
// c.Fatal("missing targets file: dir/bar.txt")
|
t, err := newRepo.targets()
|
||||||
// }
|
c.Assert(err, IsNil)
|
||||||
//}
|
c.Assert(t.Targets, HasLen, 1)
|
||||||
|
if _, ok := t.Targets["/dir/bar.txt"]; !ok {
|
||||||
|
c.Fatal("missing targets file: dir/bar.txt")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func (RepoSuite) TestExpiresAndVersion(c *C) {
|
func (RepoSuite) TestExpiresAndVersion(c *C) {
|
||||||
|
trust := signed.NewEd25519()
|
||||||
|
signer := signed.NewSigner(trust)
|
||||||
|
|
||||||
//files := map[string][]byte{"/foo.txt": []byte("foo")}
|
//files := map[string][]byte{"/foo.txt": []byte("foo")}
|
||||||
db := util.GetSqliteDB()
|
db := util.GetSqliteDB()
|
||||||
defer util.FlushDB(db)
|
defer util.FlushDB(db)
|
||||||
local := store.DBStore(db, "")
|
local := store.DBStore(db, "")
|
||||||
r, err := NewRepo(local)
|
r, err := NewRepo(signer, local, "sha256")
|
||||||
c.Assert(err, IsNil)
|
c.Assert(err, IsNil)
|
||||||
|
|
||||||
past := time.Now().Add(-1 * time.Second)
|
past := time.Now().Add(-1 * time.Second)
|
||||||
|
@ -664,7 +689,7 @@ func (RepoSuite) TestExpiresAndVersion(c *C) {
|
||||||
r.SnapshotWithExpires(CompressionTypeNone, past),
|
r.SnapshotWithExpires(CompressionTypeNone, past),
|
||||||
r.TimestampWithExpires(past),
|
r.TimestampWithExpires(past),
|
||||||
} {
|
} {
|
||||||
c.Assert(err, Equals, ErrInvalidExpires{past})
|
c.Assert(err, Equals, tuferr.ErrInvalidExpires{past})
|
||||||
}
|
}
|
||||||
|
|
||||||
genKey(c, r, "root")
|
genKey(c, r, "root")
|
||||||
|
@ -736,6 +761,9 @@ func (RepoSuite) TestExpiresAndVersion(c *C) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (RepoSuite) TestHashAlgorithm(c *C) {
|
func (RepoSuite) TestHashAlgorithm(c *C) {
|
||||||
|
trust := signed.NewEd25519()
|
||||||
|
signer := signed.NewSigner(trust)
|
||||||
|
|
||||||
//files := map[string][]byte{"/foo.txt": []byte("foo")}
|
//files := map[string][]byte{"/foo.txt": []byte("foo")}
|
||||||
db := util.GetSqliteDB()
|
db := util.GetSqliteDB()
|
||||||
defer util.FlushDB(db)
|
defer util.FlushDB(db)
|
||||||
|
@ -750,7 +778,7 @@ func (RepoSuite) TestHashAlgorithm(c *C) {
|
||||||
{args: []string{"sha512", "sha256"}},
|
{args: []string{"sha512", "sha256"}},
|
||||||
} {
|
} {
|
||||||
// generate metadata with specific hash functions
|
// generate metadata with specific hash functions
|
||||||
r, err := NewRepo(local, test.args...)
|
r, err := NewRepo(signer, local, test.args...)
|
||||||
c.Assert(err, IsNil)
|
c.Assert(err, IsNil)
|
||||||
genKey(c, r, "root")
|
genKey(c, r, "root")
|
||||||
genKey(c, r, "targets")
|
genKey(c, r, "targets")
|
||||||
|
@ -851,67 +879,69 @@ func testPassphraseFunc(p []byte) util.PassphraseFunc {
|
||||||
// c.Assert(insecureStore.SaveKey("targets", key.SerializePrivate()), IsNil)
|
// c.Assert(insecureStore.SaveKey("targets", key.SerializePrivate()), IsNil)
|
||||||
// assertKeys("targets", false, []*keys.Key{key})
|
// assertKeys("targets", false, []*keys.Key{key})
|
||||||
//}
|
//}
|
||||||
//
|
|
||||||
//func (RepoSuite) TestManageMultipleTargets(c *C) {
|
func (RepoSuite) TestManageMultipleTargets(c *C) {
|
||||||
// tmp := newTmpDir(c)
|
trust := signed.NewEd25519()
|
||||||
// local := FileSystemStore(tmp.path, nil)
|
signer := signed.NewSigner(trust)
|
||||||
// r, err := NewRepo(local)
|
tmp := newTmpDir(c)
|
||||||
// c.Assert(err, IsNil)
|
local := store.FileSystemStore(tmp.path, nil)
|
||||||
// // don't use consistent snapshots to make the checks simpler
|
r, err := NewRepo(signer, local)
|
||||||
// c.Assert(r.Init(false), IsNil)
|
c.Assert(err, IsNil)
|
||||||
// genKey(c, r, "root")
|
// don't use consistent snapshots to make the checks simpler
|
||||||
// genKey(c, r, "targets")
|
c.Assert(r.Init(false), IsNil)
|
||||||
// genKey(c, r, "snapshot")
|
genKey(c, r, "root")
|
||||||
// genKey(c, r, "timestamp")
|
genKey(c, r, "targets")
|
||||||
//
|
genKey(c, r, "snapshot")
|
||||||
// assertRepoTargets := func(paths ...string) {
|
genKey(c, r, "timestamp")
|
||||||
// t, err := r.targets()
|
|
||||||
// c.Assert(err, IsNil)
|
assertRepoTargets := func(paths ...string) {
|
||||||
// for _, path := range paths {
|
t, err := r.targets()
|
||||||
// if _, ok := t.Targets[path]; !ok {
|
c.Assert(err, IsNil)
|
||||||
// c.Fatalf("missing target file: %s", path)
|
for _, path := range paths {
|
||||||
// }
|
if _, ok := t.Targets[path]; !ok {
|
||||||
// }
|
c.Fatalf("missing target file: %s", path)
|
||||||
// }
|
}
|
||||||
//
|
}
|
||||||
// // adding and committing multiple files moves correct targets from staged -> repository
|
}
|
||||||
// tmp.writeStagedTarget("foo.txt", "foo")
|
|
||||||
// tmp.writeStagedTarget("bar.txt", "bar")
|
// adding and committing multiple files moves correct targets from staged -> repository
|
||||||
// c.Assert(r.AddTargets([]string{"foo.txt", "bar.txt"}, nil), IsNil)
|
tmp.writeStagedTarget("foo.txt", "foo")
|
||||||
// c.Assert(r.Snapshot(CompressionTypeNone), IsNil)
|
tmp.writeStagedTarget("bar.txt", "bar")
|
||||||
// c.Assert(r.Timestamp(), IsNil)
|
c.Assert(r.AddTargets([]string{"foo.txt", "bar.txt"}, nil), IsNil)
|
||||||
// c.Assert(r.Commit(), IsNil)
|
c.Assert(r.Snapshot(CompressionTypeNone), IsNil)
|
||||||
// assertRepoTargets("/foo.txt", "/bar.txt")
|
c.Assert(r.Timestamp(), IsNil)
|
||||||
// tmp.assertExists("repository/targets/foo.txt")
|
c.Assert(r.Commit(), IsNil)
|
||||||
// tmp.assertExists("repository/targets/bar.txt")
|
assertRepoTargets("/foo.txt", "/bar.txt")
|
||||||
//
|
tmp.assertExists("repository/targets/foo.txt")
|
||||||
// // adding all targets moves them all from staged -> repository
|
tmp.assertExists("repository/targets/bar.txt")
|
||||||
// count := 10
|
|
||||||
// files := make([]string, count)
|
// adding all targets moves them all from staged -> repository
|
||||||
// for i := 0; i < count; i++ {
|
count := 10
|
||||||
// files[i] = fmt.Sprintf("/file%d.txt", i)
|
files := make([]string, count)
|
||||||
// tmp.writeStagedTarget(files[i], "data")
|
for i := 0; i < count; i++ {
|
||||||
// }
|
files[i] = fmt.Sprintf("/file%d.txt", i)
|
||||||
// c.Assert(r.AddTargets(nil, nil), IsNil)
|
tmp.writeStagedTarget(files[i], "data")
|
||||||
// c.Assert(r.Snapshot(CompressionTypeNone), IsNil)
|
}
|
||||||
// c.Assert(r.Timestamp(), IsNil)
|
c.Assert(r.AddTargets(nil, nil), IsNil)
|
||||||
// c.Assert(r.Commit(), IsNil)
|
c.Assert(r.Snapshot(CompressionTypeNone), IsNil)
|
||||||
// tmp.assertExists("repository/targets/foo.txt")
|
c.Assert(r.Timestamp(), IsNil)
|
||||||
// tmp.assertExists("repository/targets/bar.txt")
|
c.Assert(r.Commit(), IsNil)
|
||||||
// assertRepoTargets(files...)
|
tmp.assertExists("repository/targets/foo.txt")
|
||||||
// for _, file := range files {
|
tmp.assertExists("repository/targets/bar.txt")
|
||||||
// tmp.assertExists("repository/targets/" + file)
|
assertRepoTargets(files...)
|
||||||
// }
|
for _, file := range files {
|
||||||
// tmp.assertEmpty("staged/targets")
|
tmp.assertExists("repository/targets/" + file)
|
||||||
// tmp.assertEmpty("staged")
|
}
|
||||||
//
|
tmp.assertEmpty("staged/targets")
|
||||||
// // removing all targets removes them from the repository and targets.json
|
tmp.assertEmpty("staged")
|
||||||
// c.Assert(r.RemoveTargets(nil), IsNil)
|
|
||||||
// c.Assert(r.Snapshot(CompressionTypeNone), IsNil)
|
// removing all targets removes them from the repository and targets.json
|
||||||
// c.Assert(r.Timestamp(), IsNil)
|
c.Assert(r.RemoveTargets(nil), IsNil)
|
||||||
// c.Assert(r.Commit(), IsNil)
|
c.Assert(r.Snapshot(CompressionTypeNone), IsNil)
|
||||||
// tmp.assertEmpty("repository/targets")
|
c.Assert(r.Timestamp(), IsNil)
|
||||||
// t, err := r.targets()
|
c.Assert(r.Commit(), IsNil)
|
||||||
// c.Assert(err, IsNil)
|
tmp.assertEmpty("repository/targets")
|
||||||
// c.Assert(t.Targets, HasLen, 0)
|
t, err := r.targets()
|
||||||
//}
|
c.Assert(err, IsNil)
|
||||||
|
c.Assert(t.Targets, HasLen, 0)
|
||||||
|
}
|
||||||
|
|
|
@ -5,7 +5,16 @@ import (
|
||||||
"github.com/endophage/go-tuf/keys"
|
"github.com/endophage/go-tuf/keys"
|
||||||
)
|
)
|
||||||
|
|
||||||
type Signer interface {
|
type SigningService interface {
|
||||||
GetPublicKeys(keyIDs ...string) (map[string]keys.Key, error)
|
Sign(keyIDs []string, data []byte) ([]data.Signature, error)
|
||||||
Sign(keyIDs []string, data json.RawMessage) ([]data.Signature, error)
|
}
|
||||||
|
|
||||||
|
type KeyService interface {
|
||||||
|
Create() (*keys.PublicKey, error)
|
||||||
|
PublicKeys(keyIDs ...string) (map[string]*keys.PublicKey, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
type TrustService interface {
|
||||||
|
SigningService
|
||||||
|
KeyService
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,40 +1,78 @@
|
||||||
package signed
|
package signed
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/agl/ed25519"
|
cjson "github.com/endophage/go-tuf/Godeps/_workspace/src/github.com/tent/canonical-json-go"
|
||||||
|
|
||||||
"github.com/endophage/go-tuf/data"
|
"github.com/endophage/go-tuf/data"
|
||||||
cjson "github.com/tent/canonical-json-go"
|
"github.com/endophage/go-tuf/keys"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Sign takes a data.Signed and a key, calculated and adds the signature
|
// Sign takes a data.Signed and a key, calculated and adds the signature
|
||||||
// to the data.Signed
|
// to the data.Signed
|
||||||
func Sign(s *data.Signed, k *data.Key) {
|
//func Sign(s *data.Signed, k *data.Key) {
|
||||||
id := k.ID()
|
// id := k.ID()
|
||||||
|
// signatures := make([]data.Signature, 0, len(s.Signatures)+1)
|
||||||
|
// for _, sig := range s.Signatures {
|
||||||
|
// if sig.KeyID == id {
|
||||||
|
// continue
|
||||||
|
// }
|
||||||
|
// signatures = append(signatures, sig)
|
||||||
|
// }
|
||||||
|
// priv := [ed25519.PrivateKeySize]byte{}
|
||||||
|
// copy(priv[:], k.Value.Private)
|
||||||
|
// sig := ed25519.Sign(&priv, s.Signed)
|
||||||
|
// s.Signatures = append(signatures, data.Signature{
|
||||||
|
// KeyID: id,
|
||||||
|
// Method: "ed25519",
|
||||||
|
// Signature: sig[:],
|
||||||
|
// })
|
||||||
|
//}
|
||||||
|
|
||||||
|
// Signer encapsulates a signing service with some convenience methods to
|
||||||
|
// interface between TUF keys and the generic service interface
|
||||||
|
type Signer struct {
|
||||||
|
service TrustService
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewSigner(service TrustService) *Signer {
|
||||||
|
return &Signer{service}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sign takes a data.Signed and a key, calculated and adds the signature
|
||||||
|
// to the data.Signed
|
||||||
|
func (signer *Signer) Sign(s *data.Signed, keys ...*keys.PublicKey) error {
|
||||||
signatures := make([]data.Signature, 0, len(s.Signatures)+1)
|
signatures := make([]data.Signature, 0, len(s.Signatures)+1)
|
||||||
|
keyIDMemb := make(map[string]struct{})
|
||||||
|
keyIDs := make([]string, 0, len(keys))
|
||||||
|
for _, key := range keys {
|
||||||
|
keyIDMemb[key.ID] = struct{}{}
|
||||||
|
keyIDs = append(keyIDs, key.ID)
|
||||||
|
}
|
||||||
for _, sig := range s.Signatures {
|
for _, sig := range s.Signatures {
|
||||||
if sig.KeyID == id {
|
if _, ok := keyIDMemb[sig.KeyID]; ok {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
signatures = append(signatures, sig)
|
signatures = append(signatures, sig)
|
||||||
}
|
}
|
||||||
priv := [ed25519.PrivateKeySize]byte{}
|
newSigs, err := signer.service.Sign(keyIDs, s.Signed)
|
||||||
copy(priv[:], k.Value.Private)
|
|
||||||
sig := ed25519.Sign(&priv, s.Signed)
|
if err != nil {
|
||||||
s.Signatures = append(signatures, data.Signature{
|
return err
|
||||||
KeyID: id,
|
}
|
||||||
Method: "ed25519",
|
s.Signatures = append(signatures, newSigs...)
|
||||||
Signature: sig[:],
|
return nil
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func Marshal(v interface{}, keys ...*data.Key) (*data.Signed, error) {
|
func (signer *Signer) Marshal(v interface{}, keys ...*keys.PublicKey) (*data.Signed, error) {
|
||||||
b, err := cjson.Marshal(v)
|
b, err := cjson.Marshal(v)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
s := &data.Signed{Signed: b}
|
s := &data.Signed{Signed: b}
|
||||||
for _, k := range keys {
|
err = signer.Sign(s, keys...)
|
||||||
Sign(s, k)
|
return s, err // err may be nil but there's no point in checking, just return it
|
||||||
}
|
}
|
||||||
return s, nil
|
|
||||||
|
func (signer *Signer) NewKey() (*keys.PublicKey, error) {
|
||||||
|
return signer.service.Create()
|
||||||
}
|
}
|
||||||
|
|
113
Godeps/_workspace/src/github.com/endophage/go-tuf/signed/sign_test.go
generated
vendored
Normal file
113
Godeps/_workspace/src/github.com/endophage/go-tuf/signed/sign_test.go
generated
vendored
Normal file
|
@ -0,0 +1,113 @@
|
||||||
|
package signed
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/endophage/go-tuf/data"
|
||||||
|
"github.com/endophage/go-tuf/keys"
|
||||||
|
)
|
||||||
|
|
||||||
|
type MockTrustService struct {
|
||||||
|
testSig data.Signature
|
||||||
|
testKey keys.PublicKey
|
||||||
|
}
|
||||||
|
|
||||||
|
func (mts *MockTrustService) Sign(keyIDs []string, data []byte) ([]data.Signature, error) {
|
||||||
|
sigs := []data.Signature{mts.testSig}
|
||||||
|
return sigs, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (mts *MockTrustService) Create(keyType string) (keys.PublicKey, error) {
|
||||||
|
return keys.PublicKey{mts.testKey}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (mts *MockTrustService) PublicKeys(keyIDs ...string) (map[string]keys.PublicKey, error) {
|
||||||
|
keys := map[string]keys.PublicKey{"testID": mts.testKey}
|
||||||
|
return keys, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var _ TrustService = &MockTrustService{}
|
||||||
|
|
||||||
|
// Test signing and ensure the expected signature is added
|
||||||
|
func TestBasicSign(t *testing.T) {
|
||||||
|
signer := Signer{&MockTrustService{
|
||||||
|
testSig: data.Signature{KeyID: "testID"},
|
||||||
|
testKey: keys.PublicKey{},
|
||||||
|
}}
|
||||||
|
key := keys.PublicKey{}
|
||||||
|
testData := data.Signed{}
|
||||||
|
|
||||||
|
signer.Sign(&testData, &key)
|
||||||
|
|
||||||
|
if len(testData.Signatures) != 1 {
|
||||||
|
t.Fatalf("Incorrect number of signatures: %d", len(testData.Signatures))
|
||||||
|
}
|
||||||
|
|
||||||
|
if testData.Signatures[0].KeyID != "testID" {
|
||||||
|
t.Fatalf("Wrong signature ID returned: %s", testData.Signatures[0].KeyID)
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test signing with the same key multiple times only registers a single signature
|
||||||
|
// for the key (N.B. MockTrustService.Sign will still be called again, but Signer.Sign
|
||||||
|
// should be cleaning previous signatures by the KeyID when asked to sign again)
|
||||||
|
func TestReSign(t *testing.T) {
|
||||||
|
signer := Signer{&MockTrustService{
|
||||||
|
testSig: data.Signature{KeyID: "testID"},
|
||||||
|
testKey: keys.PublicKey{},
|
||||||
|
}}
|
||||||
|
key := keys.PublicKey{ID: "testID"}
|
||||||
|
testData := data.Signed{}
|
||||||
|
|
||||||
|
signer.Sign(&testData, &key)
|
||||||
|
signer.Sign(&testData, &key)
|
||||||
|
|
||||||
|
if len(testData.Signatures) != 1 {
|
||||||
|
t.Fatalf("Incorrect number of signatures: %d", len(testData.Signatures))
|
||||||
|
}
|
||||||
|
|
||||||
|
if testData.Signatures[0].KeyID != "testID" {
|
||||||
|
t.Fatalf("Wrong signature ID returned: %s", testData.Signatures[0].KeyID)
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMultiSign(t *testing.T) {
|
||||||
|
signer := Signer{&MockTrustService{
|
||||||
|
testSig: data.Signature{KeyID: "testID"},
|
||||||
|
testKey: keys.PublicKey{},
|
||||||
|
}}
|
||||||
|
key := keys.PublicKey{ID: "testID1"}
|
||||||
|
testData := data.Signed{}
|
||||||
|
|
||||||
|
signer.Sign(&testData, &key)
|
||||||
|
|
||||||
|
key = keys.PublicKey{ID: "testID2"}
|
||||||
|
signer.Sign(&testData, &key)
|
||||||
|
|
||||||
|
if len(testData.Signatures) != 2 {
|
||||||
|
t.Fatalf("Incorrect number of signatures: %d", len(testData.Signatures))
|
||||||
|
}
|
||||||
|
|
||||||
|
keyIDs := map[string]struct{}{"testID1": struct{}{}, "testID2": struct{}{}}
|
||||||
|
for _, sig := range testData.Signatures {
|
||||||
|
if _, ok := keyIDs[sig.KeyID]; !ok {
|
||||||
|
t.Fatalf("Got a signature we didn't expect: %s", sig.KeyID)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestNewKey(t *testing.T) {
|
||||||
|
signer := Signer{&MockTrustService{
|
||||||
|
testSig: data.Signature{},
|
||||||
|
testKey: keys.PublicKey{ID: "testID"},
|
||||||
|
}}
|
||||||
|
|
||||||
|
key := signer.NewKey("testType")
|
||||||
|
|
||||||
|
if key.ID != "testID" {
|
||||||
|
t.Fatalf("Expected key ID not found: %s", key.ID)
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,89 @@
|
||||||
|
package signed
|
||||||
|
|
||||||
|
import (
|
||||||
|
"crypto/rand"
|
||||||
|
|
||||||
|
"github.com/endophage/go-tuf/Godeps/_workspace/src/github.com/agl/ed25519"
|
||||||
|
"github.com/endophage/go-tuf/data"
|
||||||
|
"github.com/endophage/go-tuf/keys"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Ed25519 implements a simple in memory keystore and trust service
|
||||||
|
type Ed25519 struct {
|
||||||
|
keys map[string]*keys.PrivateKey
|
||||||
|
}
|
||||||
|
|
||||||
|
var _ TrustService = &Ed25519{}
|
||||||
|
|
||||||
|
func NewEd25519() *Ed25519 {
|
||||||
|
return &Ed25519{
|
||||||
|
make(map[string]*keys.PrivateKey),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// addKey allows you to add a private key to the trust service
|
||||||
|
func (trust *Ed25519) addKey(k *keys.PrivateKey) {
|
||||||
|
key := keys.PrivateKey{
|
||||||
|
PublicKey: keys.PublicKey{
|
||||||
|
Key: data.Key{
|
||||||
|
Type: k.Type,
|
||||||
|
Value: data.KeyValue{
|
||||||
|
Public: make([]byte, len(k.Value.Public)),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
ID: k.ID,
|
||||||
|
},
|
||||||
|
Private: make([]byte, len(k.Private)),
|
||||||
|
}
|
||||||
|
|
||||||
|
copy(key.Value.Public, k.Value.Public)
|
||||||
|
copy(key.Private, k.Private)
|
||||||
|
trust.keys[k.ID] = &key
|
||||||
|
}
|
||||||
|
|
||||||
|
func (trust *Ed25519) RemoveKey(keyID string) {
|
||||||
|
delete(trust.keys, keyID)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (trust *Ed25519) Sign(keyIDs []string, toSign []byte) ([]data.Signature, error) {
|
||||||
|
signatures := make([]data.Signature, 0, len(keyIDs))
|
||||||
|
for _, kID := range keyIDs {
|
||||||
|
priv := [ed25519.PrivateKeySize]byte{}
|
||||||
|
pub := [ed25519.PublicKeySize]byte{}
|
||||||
|
copy(priv[:], trust.keys[kID].Private)
|
||||||
|
copy(pub[:], trust.keys[kID].Value.Public)
|
||||||
|
sig := ed25519.Sign(&priv, toSign)
|
||||||
|
signatures = append(signatures, data.Signature{
|
||||||
|
KeyID: kID,
|
||||||
|
Method: "ed25519",
|
||||||
|
Signature: sig[:],
|
||||||
|
})
|
||||||
|
}
|
||||||
|
return signatures, nil
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
func (trust *Ed25519) Create() (*keys.PublicKey, error) {
|
||||||
|
pub, priv, err := ed25519.GenerateKey(rand.Reader)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
pubBytes := make([]byte, ed25519.PublicKeySize)
|
||||||
|
copy(pubBytes, pub[:])
|
||||||
|
privBytes := make([]byte, ed25519.PrivateKeySize)
|
||||||
|
copy(privBytes, priv[:])
|
||||||
|
public := keys.NewPublicKey("ed25519", pubBytes)
|
||||||
|
private := keys.PrivateKey{*public, privBytes}
|
||||||
|
trust.addKey(&private)
|
||||||
|
return public, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (trust *Ed25519) PublicKeys(keyIDs ...string) (map[string]*keys.PublicKey, error) {
|
||||||
|
k := make(map[string]*keys.PublicKey)
|
||||||
|
for _, kID := range keyIDs {
|
||||||
|
if key, ok := trust.keys[kID]; ok {
|
||||||
|
k[kID] = &key.PublicKey
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return k, nil
|
||||||
|
}
|
|
@ -6,10 +6,10 @@ import (
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/agl/ed25519"
|
"github.com/endophage/go-tuf/Godeps/_workspace/src/github.com/agl/ed25519"
|
||||||
|
"github.com/endophage/go-tuf/Godeps/_workspace/src/github.com/tent/canonical-json-go"
|
||||||
"github.com/endophage/go-tuf/data"
|
"github.com/endophage/go-tuf/data"
|
||||||
"github.com/endophage/go-tuf/keys"
|
"github.com/endophage/go-tuf/keys"
|
||||||
"github.com/tent/canonical-json-go"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
|
@ -74,8 +74,8 @@ func VerifySignatures(s *data.Signed, role string, db *keys.DB) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
valid := make(map[string]struct{})
|
valid := make(map[string]struct{})
|
||||||
var sigBytes [ed25519.SignatureSize]byte
|
|
||||||
for _, sig := range s.Signatures {
|
for _, sig := range s.Signatures {
|
||||||
|
var sigBytes [ed25519.SignatureSize]byte
|
||||||
if sig.Method != "ed25519" {
|
if sig.Method != "ed25519" {
|
||||||
return ErrWrongMethod
|
return ErrWrongMethod
|
||||||
}
|
}
|
||||||
|
@ -92,7 +92,9 @@ func VerifySignatures(s *data.Signed, role string, db *keys.DB) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
copy(sigBytes[:], sig.Signature)
|
copy(sigBytes[:], sig.Signature)
|
||||||
if !ed25519.Verify(&key.Public, msg, &sigBytes) {
|
var keyBytes [32]byte
|
||||||
|
copy(keyBytes[:], key.Value.Public)
|
||||||
|
if !ed25519.Verify(&keyBytes, msg, &sigBytes) {
|
||||||
return ErrInvalid
|
return ErrInvalid
|
||||||
}
|
}
|
||||||
valid[sig.KeyID] = struct{}{}
|
valid[sig.KeyID] = struct{}{}
|
||||||
|
|
|
@ -4,11 +4,11 @@ import (
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/agl/ed25519"
|
"github.com/endophage/go-tuf/Godeps/_workspace/src/github.com/agl/ed25519"
|
||||||
"github.com/endophage/go-tuf/data"
|
"github.com/endophage/go-tuf/data"
|
||||||
"github.com/endophage/go-tuf/keys"
|
"github.com/endophage/go-tuf/keys"
|
||||||
|
|
||||||
. "gopkg.in/check.v1"
|
. "github.com/endophage/go-tuf/Godeps/_workspace/src/gopkg.in/check.v1"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Hook up gocheck into the "go test" runner.
|
// Hook up gocheck into the "go test" runner.
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
package store
|
package store
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"io"
|
"io"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
|
@ -11,75 +10,16 @@ import (
|
||||||
|
|
||||||
"github.com/endophage/go-tuf/data"
|
"github.com/endophage/go-tuf/data"
|
||||||
"github.com/endophage/go-tuf/encrypted"
|
"github.com/endophage/go-tuf/encrypted"
|
||||||
|
"github.com/endophage/go-tuf/errors"
|
||||||
"github.com/endophage/go-tuf/util"
|
"github.com/endophage/go-tuf/util"
|
||||||
)
|
)
|
||||||
|
|
||||||
func MemoryStore(meta map[string]json.RawMessage, files map[string][]byte) LocalStore {
|
// topLevelManifests determines the order signatures are verified when committing.
|
||||||
if meta == nil {
|
var topLevelManifests = []string{
|
||||||
meta = make(map[string]json.RawMessage)
|
"root.json",
|
||||||
}
|
"targets.json",
|
||||||
return &memoryStore{
|
"snapshot.json",
|
||||||
meta: meta,
|
"timestamp.json",
|
||||||
files: files,
|
|
||||||
keys: make(map[string][]*data.Key),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type memoryStore struct {
|
|
||||||
meta map[string]json.RawMessage
|
|
||||||
files map[string][]byte
|
|
||||||
keys map[string][]*data.Key
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *memoryStore) GetMeta() (map[string]json.RawMessage, error) {
|
|
||||||
return m.meta, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *memoryStore) SetMeta(name string, meta json.RawMessage) error {
|
|
||||||
m.meta[name] = meta
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *memoryStore) WalkStagedTargets(paths []string, targetsFn targetsWalkFunc) error {
|
|
||||||
if len(paths) == 0 {
|
|
||||||
for path, data := range m.files {
|
|
||||||
if err := targetsFn(path, bytes.NewReader(data)); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, path := range paths {
|
|
||||||
data, ok := m.files[path]
|
|
||||||
if !ok {
|
|
||||||
return ErrFileNotFound{path}
|
|
||||||
}
|
|
||||||
if err := targetsFn(path, bytes.NewReader(data)); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *memoryStore) Commit(map[string]json.RawMessage, bool, map[string]data.Hashes) error {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *memoryStore) GetKeys(role string) ([]*data.Key, error) {
|
|
||||||
return m.keys[role], nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *memoryStore) SaveKey(role string, key *data.Key) error {
|
|
||||||
if _, ok := m.keys[role]; !ok {
|
|
||||||
m.keys[role] = make([]*data.Key, 0)
|
|
||||||
}
|
|
||||||
m.keys[role] = append(m.keys[role], key)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *memoryStore) Clean() error {
|
|
||||||
return nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type persistedKeys struct {
|
type persistedKeys struct {
|
||||||
|
@ -171,7 +111,11 @@ func (f *fileSystemStore) WalkStagedTargets(paths []string, targetsFn targetsWal
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
defer file.Close()
|
defer file.Close()
|
||||||
return targetsFn(rel, file)
|
meta, err := util.GenerateFileMeta(file, "sha256")
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return targetsFn(rel, meta)
|
||||||
}
|
}
|
||||||
return filepath.Walk(filepath.Join(f.stagedDir(), "targets"), walkFunc)
|
return filepath.Walk(filepath.Join(f.stagedDir(), "targets"), walkFunc)
|
||||||
}
|
}
|
||||||
|
@ -181,7 +125,7 @@ func (f *fileSystemStore) WalkStagedTargets(paths []string, targetsFn targetsWal
|
||||||
realPath := filepath.Join(f.stagedDir(), "targets", path)
|
realPath := filepath.Join(f.stagedDir(), "targets", path)
|
||||||
if _, err := os.Stat(realPath); err != nil {
|
if _, err := os.Stat(realPath); err != nil {
|
||||||
if os.IsNotExist(err) {
|
if os.IsNotExist(err) {
|
||||||
return ErrFileNotFound{realPath}
|
return errors.ErrFileNotFound{realPath}
|
||||||
}
|
}
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -192,11 +136,15 @@ func (f *fileSystemStore) WalkStagedTargets(paths []string, targetsFn targetsWal
|
||||||
file, err := os.Open(realPath)
|
file, err := os.Open(realPath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if os.IsNotExist(err) {
|
if os.IsNotExist(err) {
|
||||||
return ErrFileNotFound{realPath}
|
return errors.ErrFileNotFound{realPath}
|
||||||
}
|
}
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
err = targetsFn(path, file)
|
meta, err := util.GenerateFileMeta(file, "sha256")
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
err = targetsFn(path, meta)
|
||||||
file.Close()
|
file.Close()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
@ -382,7 +330,7 @@ func (f *fileSystemStore) loadKeys(role string) ([]*data.Key, []byte, error) {
|
||||||
|
|
||||||
// the keys are encrypted so cannot be loaded if passphraseFunc is not set
|
// the keys are encrypted so cannot be loaded if passphraseFunc is not set
|
||||||
if f.passphraseFunc == nil {
|
if f.passphraseFunc == nil {
|
||||||
return nil, nil, ErrPassphraseRequired{role}
|
return nil, nil, errors.ErrPassphraseRequired{role}
|
||||||
}
|
}
|
||||||
|
|
||||||
pass, err := f.passphraseFunc(role, false)
|
pass, err := f.passphraseFunc(role, false)
|
86
Godeps/_workspace/src/github.com/endophage/go-tuf/store/memorystore.go
generated
vendored
Normal file
86
Godeps/_workspace/src/github.com/endophage/go-tuf/store/memorystore.go
generated
vendored
Normal file
|
@ -0,0 +1,86 @@
|
||||||
|
package store
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/json"
|
||||||
|
|
||||||
|
"github.com/endophage/go-tuf/data"
|
||||||
|
"github.com/endophage/go-tuf/errors"
|
||||||
|
"github.com/endophage/go-tuf/util"
|
||||||
|
)
|
||||||
|
|
||||||
|
func MemoryStore(meta map[string]json.RawMessage, files map[string][]byte) LocalStore {
|
||||||
|
if meta == nil {
|
||||||
|
meta = make(map[string]json.RawMessage)
|
||||||
|
}
|
||||||
|
return &memoryStore{
|
||||||
|
meta: meta,
|
||||||
|
files: files,
|
||||||
|
keys: make(map[string][]*data.Key),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type memoryStore struct {
|
||||||
|
meta map[string]json.RawMessage
|
||||||
|
files map[string][]byte
|
||||||
|
keys map[string][]*data.Key
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *memoryStore) GetMeta() (map[string]json.RawMessage, error) {
|
||||||
|
return m.meta, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *memoryStore) SetMeta(name string, meta json.RawMessage) error {
|
||||||
|
m.meta[name] = meta
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *memoryStore) WalkStagedTargets(paths []string, targetsFn targetsWalkFunc) error {
|
||||||
|
if len(paths) == 0 {
|
||||||
|
for path, data := range m.files {
|
||||||
|
meta, err := util.GenerateFileMeta(bytes.NewReader(data), "sha256")
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err = targetsFn(path, meta); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, path := range paths {
|
||||||
|
data, ok := m.files[path]
|
||||||
|
if !ok {
|
||||||
|
return errors.ErrFileNotFound{path}
|
||||||
|
}
|
||||||
|
meta, err := util.GenerateFileMeta(bytes.NewReader(data), "sha256")
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err = targetsFn(path, meta); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *memoryStore) Commit(map[string]json.RawMessage, bool, map[string]data.Hashes) error {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *memoryStore) GetKeys(role string) ([]*data.Key, error) {
|
||||||
|
return m.keys[role], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *memoryStore) SaveKey(role string, key *data.Key) error {
|
||||||
|
if _, ok := m.keys[role]; !ok {
|
||||||
|
m.keys[role] = make([]*data.Key, 0)
|
||||||
|
}
|
||||||
|
m.keys[role] = append(m.keys[role], key)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *memoryStore) Clean() error {
|
||||||
|
return nil
|
||||||
|
}
|
|
@ -5,8 +5,8 @@ import (
|
||||||
"encoding/hex"
|
"encoding/hex"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
. "github.com/endophage/go-tuf/Godeps/_workspace/src/gopkg.in/check.v1"
|
||||||
"github.com/endophage/go-tuf/data"
|
"github.com/endophage/go-tuf/data"
|
||||||
. "gopkg.in/check.v1"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// Hook up gocheck into the "go test" runner.
|
// Hook up gocheck into the "go test" runner.
|
||||||
|
|
Loading…
Reference in New Issue