mirror of https://github.com/kubernetes/kops.git
Merge pull request #8171 from tanjunchen/staticcheck001
util/pkg/vfs/:staticcheck
This commit is contained in:
commit
2376814575
|
|
@ -100,22 +100,22 @@ func (c *VFSContext) ReadFile(location string, options ...VFSOption) ([]byte, er
|
|||
httpURL := "http://169.254.169.254/computeMetadata/v1/instance/attributes/" + u.Path
|
||||
httpHeaders := make(map[string]string)
|
||||
httpHeaders["Metadata-Flavor"] = "Google"
|
||||
return c.readHttpLocation(httpURL, httpHeaders, opts)
|
||||
return c.readHTTPLocation(httpURL, httpHeaders, opts)
|
||||
case "aws":
|
||||
httpURL := "http://169.254.169.254/latest/" + u.Path
|
||||
return c.readHttpLocation(httpURL, nil, opts)
|
||||
return c.readHTTPLocation(httpURL, nil, opts)
|
||||
case "digitalocean":
|
||||
httpURL := "http://169.254.169.254/metadata/v1" + u.Path
|
||||
return c.readHttpLocation(httpURL, nil, opts)
|
||||
return c.readHTTPLocation(httpURL, nil, opts)
|
||||
case "alicloud":
|
||||
httpURL := "http://100.100.100.200/latest/meta-data/" + u.Path
|
||||
return c.readHttpLocation(httpURL, nil, opts)
|
||||
return c.readHTTPLocation(httpURL, nil, opts)
|
||||
default:
|
||||
return nil, fmt.Errorf("unknown metadata type: %q in %q", u.Host, location)
|
||||
}
|
||||
|
||||
case "http", "https":
|
||||
return c.readHttpLocation(location, nil, opts)
|
||||
return c.readHTTPLocation(location, nil, opts)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -169,10 +169,10 @@ func (c *VFSContext) BuildVfsPath(p string) (Path, error) {
|
|||
return nil, fmt.Errorf("unknown / unhandled path type: %q", p)
|
||||
}
|
||||
|
||||
// readHttpLocation reads an http (or https) url.
|
||||
// readHTTPLocation reads an http (or https) url.
|
||||
// It returns the contents, or an error on any non-200 response. On a 404, it will return os.ErrNotExist
|
||||
// It will retry a few times on a 500 class error
|
||||
func (c *VFSContext) readHttpLocation(httpURL string, httpHeaders map[string]string, opts vfsOptions) ([]byte, error) {
|
||||
func (c *VFSContext) readHTTPLocation(httpURL string, httpHeaders map[string]string, opts vfsOptions) ([]byte, error) {
|
||||
var body []byte
|
||||
|
||||
done, err := RetryWithBackoff(opts.backoff, func() (bool, error) {
|
||||
|
|
|
|||
|
|
@ -149,11 +149,11 @@ func (p *GSPath) WriteFile(data io.ReadSeeker, acl ACL) error {
|
|||
}
|
||||
|
||||
if acl != nil {
|
||||
gsAcl, ok := acl.(*GSAcl)
|
||||
gsACL, ok := acl.(*GSAcl)
|
||||
if !ok {
|
||||
return true, fmt.Errorf("write to %s with ACL of unexpected type %T", p, acl)
|
||||
}
|
||||
obj.Acl = gsAcl.Acl
|
||||
obj.Acl = gsACL.Acl
|
||||
}
|
||||
|
||||
if _, err := data.Seek(0, 0); err != nil {
|
||||
|
|
|
|||
|
|
@ -175,11 +175,11 @@ func (p *SSHPath) WriteFile(data io.ReadSeeker, acl ACL) error {
|
|||
|
||||
if err == nil {
|
||||
if acl != nil {
|
||||
sshAcl, ok := acl.(*SSHAcl)
|
||||
sshACL, ok := acl.(*SSHAcl)
|
||||
if !ok {
|
||||
err = fmt.Errorf("unexpected acl type %T", acl)
|
||||
} else {
|
||||
err = sftpClient.Chmod(tempfile, sshAcl.Mode)
|
||||
err = sftpClient.Chmod(tempfile, sshACL.Mode)
|
||||
if err != nil {
|
||||
err = fmt.Errorf("error during chmod of %q: %v", tempfile, err)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -38,9 +38,9 @@ func NewVFSScan(base Path) *VFSScan {
|
|||
type ChangeType string
|
||||
|
||||
const (
|
||||
ChangeType_Added ChangeType = "ADDED"
|
||||
ChangeType_Removed ChangeType = "REMOVED"
|
||||
ChangeType_Modified ChangeType = "MODIFIED"
|
||||
ChangeTypeAdded ChangeType = "ADDED"
|
||||
ChangeTypeRemoved ChangeType = "REMOVED"
|
||||
ChangeTypeModified ChangeType = "MODIFIED"
|
||||
)
|
||||
|
||||
type Change struct {
|
||||
|
|
@ -49,7 +49,7 @@ type Change struct {
|
|||
Hash *hashing.Hash
|
||||
}
|
||||
|
||||
// Scans for changes files. On the first call will return all files as ChangeType_Added.
|
||||
// Scans for changes files. On the first call will return all files as ChangeTypeAdded.
|
||||
// On subsequent calls will return any changed files (using their hashes)
|
||||
func (v *VFSScan) Scan() ([]Change, error) {
|
||||
allFiles, err := v.Base.ReadTree()
|
||||
|
|
@ -79,7 +79,7 @@ func (v *VFSScan) Scan() ([]Change, error) {
|
|||
var changes []Change
|
||||
for k, f := range files {
|
||||
hash := hashes[k]
|
||||
changes = append(changes, Change{ChangeType: ChangeType_Added, Path: f, Hash: hash})
|
||||
changes = append(changes, Change{ChangeType: ChangeTypeAdded, Path: f, Hash: hash})
|
||||
}
|
||||
return changes, nil
|
||||
}
|
||||
|
|
@ -90,9 +90,9 @@ func (v *VFSScan) Scan() ([]Change, error) {
|
|||
newHash := hashes[k]
|
||||
|
||||
if oldHash == nil {
|
||||
changes = append(changes, Change{ChangeType: ChangeType_Added, Path: f, Hash: newHash})
|
||||
changes = append(changes, Change{ChangeType: ChangeTypeAdded, Path: f, Hash: newHash})
|
||||
} else if !oldHash.Equal(newHash) {
|
||||
changes = append(changes, Change{ChangeType: ChangeType_Modified, Path: f, Hash: newHash})
|
||||
changes = append(changes, Change{ChangeType: ChangeTypeModified, Path: f, Hash: newHash})
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -100,7 +100,7 @@ func (v *VFSScan) Scan() ([]Change, error) {
|
|||
newHash := hashes[k]
|
||||
f := files[k]
|
||||
if newHash == nil {
|
||||
changes = append(changes, Change{ChangeType: ChangeType_Removed, Path: f, Hash: newHash})
|
||||
changes = append(changes, Change{ChangeType: ChangeTypeRemoved, Path: f, Hash: newHash})
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -125,7 +125,7 @@ func SyncDir(src *VFSScan, destBase Path) error {
|
|||
destFile := destBase.Join(relativePath)
|
||||
|
||||
switch change.ChangeType {
|
||||
case ChangeType_Removed:
|
||||
case ChangeTypeRemoved:
|
||||
err := destFile.Remove()
|
||||
if err != nil {
|
||||
if !os.IsNotExist(err) {
|
||||
|
|
@ -134,7 +134,7 @@ func SyncDir(src *VFSScan, destBase Path) error {
|
|||
}
|
||||
continue
|
||||
|
||||
case ChangeType_Modified, ChangeType_Added:
|
||||
case ChangeTypeModified, ChangeTypeAdded:
|
||||
hashMatch, err := hashesMatch(f, destFile)
|
||||
if err != nil {
|
||||
return err
|
||||
|
|
|
|||
Loading…
Reference in New Issue