Update vendor

This commit is contained in:
mikesplain 2019-09-03 11:26:27 -04:00
parent 1382b326e3
commit 8a2b8d87c0
36 changed files with 6292 additions and 2518 deletions

View File

@ -23,6 +23,7 @@ import (
"log"
"os"
"path/filepath"
"sort"
"strings"
"github.com/bazelbuild/bazel-gazelle/config"
@ -40,12 +41,13 @@ import (
// update commands. This includes everything in config.Config, but it also
// includes some additional fields that aren't relevant to other packages.
type updateConfig struct {
dirs []string
emit emitFunc
repos []repo.Repo
walkMode walk.Mode
patchPath string
patchBuffer bytes.Buffer
dirs []string
emit emitFunc
repos []repo.Repo
workspaceFiles []*rule.File
walkMode walk.Mode
patchPath string
patchBuffer bytes.Buffer
}
type emitFunc func(c *config.Config, f *rule.File) error
@ -63,8 +65,10 @@ func getUpdateConfig(c *config.Config) *updateConfig {
}
type updateConfigurer struct {
mode string
recursive bool
mode string
recursive bool
knownImports []string
repoConfigPath string
}
func (ucr *updateConfigurer) RegisterFlags(fs *flag.FlagSet, cmd string, c *config.Config) {
@ -76,6 +80,8 @@ func (ucr *updateConfigurer) RegisterFlags(fs *flag.FlagSet, cmd string, c *conf
fs.StringVar(&ucr.mode, "mode", "fix", "print: prints all of the updated BUILD files\n\tfix: rewrites all of the BUILD files in place\n\tdiff: computes the rewrite but then just does a diff")
fs.BoolVar(&ucr.recursive, "r", true, "when true, gazelle will update subdirectories recursively")
fs.StringVar(&uc.patchPath, "patch", "", "when set with -mode=diff, gazelle will write to a file instead of stdout")
fs.Var(&gzflag.MultiFlag{Values: &ucr.knownImports}, "known_import", "import path for which external resolution is skipped (can specify multiple times)")
fs.StringVar(&ucr.repoConfigPath, "repo_config", "", "file where Gazelle should load repository configuration. Defaults to WORKSPACE.")
}
func (ucr *updateConfigurer) CheckFlags(fs *flag.FlagSet, c *config.Config) error {
@ -118,6 +124,73 @@ func (ucr *updateConfigurer) CheckFlags(fs *flag.FlagSet, c *config.Config) erro
uc.walkMode = walk.UpdateDirsMode
}
// Load the repo configuration file (WORKSPACE by default) to find out
// names and prefixes of other go_repositories. This affects external
// dependency resolution for Go.
// TODO(jayconrod): this should be moved to language/go.
var repoFileMap map[*rule.File][]string
if ucr.repoConfigPath == "" {
ucr.repoConfigPath = filepath.Join(c.RepoRoot, "WORKSPACE")
}
repoConfigFile, err := rule.LoadWorkspaceFile(ucr.repoConfigPath, "")
if err != nil {
if !os.IsNotExist(err) {
return err
}
} else {
uc.repos, repoFileMap, err = repo.ListRepositories(repoConfigFile)
if err != nil {
return err
}
}
repoPrefixes := make(map[string]bool)
for _, r := range uc.repos {
repoPrefixes[r.GoPrefix] = true
}
for _, imp := range ucr.knownImports {
if repoPrefixes[imp] {
continue
}
repo := repo.Repo{
Name: label.ImportPathToBazelRepoName(imp),
GoPrefix: imp,
}
uc.repos = append(uc.repos, repo)
}
// If the repo configuration file is not WORKSPACE, also load WORKSPACE
// so we can apply any necessary fixes.
workspacePath := filepath.Join(c.RepoRoot, "WORKSPACE")
var workspace *rule.File
if ucr.repoConfigPath == workspacePath {
workspace = repoConfigFile
} else {
workspace, err = rule.LoadWorkspaceFile(workspacePath, "")
if err != nil && !os.IsNotExist(err) {
return err
}
if workspace != nil {
_, repoFileMap, err = repo.ListRepositories(workspace)
if err != nil {
return err
}
}
}
if workspace != nil {
c.RepoName = findWorkspaceName(workspace)
uc.workspaceFiles = make([]*rule.File, 0, len(repoFileMap))
seen := make(map[string]bool)
for f := range repoFileMap {
if !seen[f.Path] {
uc.workspaceFiles = append(uc.workspaceFiles, f)
seen[f.Path] = true
}
}
sort.Slice(uc.workspaceFiles, func(i, j int) bool {
return uc.workspaceFiles[i].Path < uc.workspaceFiles[j].Path
})
}
return nil
}
@ -253,7 +326,7 @@ func runFixUpdate(cmd command, args []string) error {
if repl, ok := c.KindMap[r.Kind()]; ok {
mappedKindInfo[repl.KindName] = kinds[r.Kind()]
mappedKinds = append(mappedKinds, repl)
mrslv.MappedKind(f, repl)
mrslv.MappedKind(rel, repl)
r.SetKind(repl.KindName)
}
}
@ -291,11 +364,12 @@ func runFixUpdate(cmd command, args []string) error {
ruleIndex.Finish()
// Resolve dependencies.
rc := repo.NewRemoteCache(uc.repos)
rc, cleanupRc := repo.NewRemoteCache(uc.repos)
defer cleanupRc()
for _, v := range visits {
for i, r := range v.rules {
from := label.New(c.RepoName, v.pkgRel, r.Name())
mrslv.Resolver(r, v.file).Resolve(v.c, ruleIndex, rc, r, v.imports[i], from)
mrslv.Resolver(r, v.pkgRel).Resolve(v.c, ruleIndex, rc, r, v.imports[i], from)
}
merger.MergeFile(v.file, v.empty, v.rules, merger.PostResolve,
unionKindInfoMaps(kinds, v.mappedKindInfo))
@ -330,9 +404,6 @@ func newFixUpdateConfiguration(cmd command, args []string, cexts []config.Config
// -h or -help were passed explicitly.
fs.Usage = func() {}
var knownImports []string
fs.Var(&gzflag.MultiFlag{Values: &knownImports}, "known_import", "import path for which external resolution is skipped (can specify multiple times)")
for _, cext := range cexts {
cext.RegisterFlags(fs, cmd.String(), c)
}
@ -353,31 +424,8 @@ func newFixUpdateConfiguration(cmd command, args []string, cexts []config.Config
}
uc := getUpdateConfig(c)
workspacePath := filepath.Join(c.RepoRoot, "WORKSPACE")
if workspace, err := rule.LoadWorkspaceFile(workspacePath, ""); err != nil {
if !os.IsNotExist(err) {
return nil, err
}
} else {
if err := fixWorkspace(c, workspace, loads); err != nil {
return nil, err
}
c.RepoName = findWorkspaceName(workspace)
uc.repos = repo.ListRepositories(workspace)
}
repoPrefixes := make(map[string]bool)
for _, r := range uc.repos {
repoPrefixes[r.GoPrefix] = true
}
for _, imp := range knownImports {
if repoPrefixes[imp] {
continue
}
repo := repo.Repo{
Name: label.ImportPathToBazelRepoName(imp),
GoPrefix: imp,
}
uc.repos = append(uc.repos, repo)
if err := fixRepoFiles(c, uc.workspaceFiles, loads); err != nil {
return nil, err
}
return c, nil
@ -412,7 +460,7 @@ FLAGS:
fs.PrintDefaults()
}
func fixWorkspace(c *config.Config, workspace *rule.File, loads []rule.LoadInfo) error {
func fixRepoFiles(c *config.Config, files []*rule.File, loads []rule.LoadInfo) error {
uc := getUpdateConfig(c)
if !c.ShouldFix {
return nil
@ -427,12 +475,19 @@ func fixWorkspace(c *config.Config, workspace *rule.File, loads []rule.LoadInfo)
return nil
}
merger.FixWorkspace(workspace)
merger.FixLoads(workspace, loads)
if err := merger.CheckGazelleLoaded(workspace); err != nil {
return err
for _, f := range files {
merger.FixLoads(f, loads)
if f.Path == filepath.Join(c.RepoRoot, "WORKSPACE") {
merger.FixWorkspace(f)
if err := merger.CheckGazelleLoaded(f); err != nil {
return err
}
}
if err := uc.emit(c, f); err != nil {
return err
}
}
return uc.emit(c, workspace)
return nil
}
func findWorkspaceName(f *rule.File) string {
@ -511,9 +566,9 @@ func applyKindMappings(mappedKinds []config.MappedKind, loads []rule.LoadInfo) [
// appendOrMergeKindMapping adds LoadInfo for the given replacement.
func appendOrMergeKindMapping(mappedLoads []rule.LoadInfo, mappedKind config.MappedKind) []rule.LoadInfo {
// If mappedKind.KindLoad already exists in the list, create a merged copy.
for _, load := range mappedLoads {
for i, load := range mappedLoads {
if load.Name == mappedKind.KindLoad {
load.Symbols = append(load.Symbols, mappedKind.KindName)
mappedLoads[i].Symbols = append(load.Symbols, mappedKind.KindName)
return mappedLoads
}
}

View File

@ -43,21 +43,18 @@ func (mr *metaResolver) AddBuiltin(kindName string, resolver resolve.Resolver) {
}
// MappedKind records the fact that the given mapping was applied while
// processing the given file.
func (mr *metaResolver) MappedKind(f *rule.File, kind config.MappedKind) {
mr.mappedKinds[f.Pkg] = append(mr.mappedKinds[f.Pkg], kind)
// processing the given package.
func (mr *metaResolver) MappedKind(pkgRel string, kind config.MappedKind) {
mr.mappedKinds[pkgRel] = append(mr.mappedKinds[pkgRel], kind)
}
// Resolver returns a resolver for the given rule and file, and a bool
// indicating whether one was found. If f is nil, mapped kinds are disregarded.
func (mr metaResolver) Resolver(r *rule.Rule, f *rule.File) resolve.Resolver {
// If f is provided, check the replacements used while processing that package.
// If the rule is a kind that was mapped, return the resolver for the kind it was mapped from.
if f != nil {
for _, mappedKind := range mr.mappedKinds[f.Pkg] {
if mappedKind.KindName == r.Kind() {
return mr.builtins[mappedKind.FromKind]
}
// Resolver returns a resolver for the given rule and package, and a bool
// indicating whether one was found. Empty string may be passed for pkgRel,
// which results in consulting the builtin kinds only.
func (mr metaResolver) Resolver(r *rule.Rule, pkgRel string) resolve.Resolver {
for _, mappedKind := range mr.mappedKinds[pkgRel] {
if mappedKind.KindName == r.Kind() {
return mr.builtins[mappedKind.FromKind]
}
}
return mr.builtins[r.Kind()]

View File

@ -21,6 +21,7 @@ import (
"fmt"
"os"
"path/filepath"
"strings"
"sync"
"github.com/bazelbuild/bazel-gazelle/config"
@ -30,18 +31,21 @@ import (
"github.com/bazelbuild/bazel-gazelle/rule"
)
type updateReposFn func(c *updateReposConfig, oldFile *rule.File, kinds map[string]rule.KindInfo) error
type updateReposFn func(c *updateReposConfig, workspace *rule.File, oldFile *rule.File, kinds map[string]rule.KindInfo) ([]*rule.File, error)
type updateReposConfig struct {
fn updateReposFn
lockFilename string
importPaths []string
macroFileName string
macroDefName string
buildExternalAttr string
buildFileNamesAttr string
buildFileGenerationAttr string
buildTagsAttr string
buildFileProtoModeAttr string
buildExtraArgsAttr string
pruneRules bool
}
var validBuildExternalAttr = []string{"external", "vendored"}
@ -56,16 +60,40 @@ func getUpdateReposConfig(c *config.Config) *updateReposConfig {
type updateReposConfigurer struct{}
type macroFlag struct {
macroFileName *string
macroDefName *string
}
func (f macroFlag) Set(value string) error {
args := strings.Split(value, "%")
if len(args) != 2 {
return fmt.Errorf("Failure parsing to_macro: %s, expected format is macroFile%%defName", value)
}
if strings.HasPrefix(args[0], "..") {
return fmt.Errorf("Failure parsing to_macro: %s, macro file path %s should not start with \"..\"", value, args[0])
}
*f.macroFileName = args[0]
*f.macroDefName = args[1]
return nil
}
func (f macroFlag) String() string {
return ""
}
func (_ *updateReposConfigurer) RegisterFlags(fs *flag.FlagSet, cmd string, c *config.Config) {
uc := &updateReposConfig{}
c.Exts[updateReposName] = uc
fs.StringVar(&uc.lockFilename, "from_file", "", "Gazelle will translate repositories listed in this file into repository rules in WORKSPACE. Currently only dep's Gopkg.lock is supported.")
fs.StringVar(&uc.lockFilename, "from_file", "", "Gazelle will translate repositories listed in this file into repository rules in WORKSPACE or a .bzl macro function. Gopkg.lock and go.mod files are supported")
fs.StringVar(&uc.buildFileNamesAttr, "build_file_names", "", "Sets the build_file_name attribute for the generated go_repository rule(s).")
fs.Var(&gzflag.AllowedStringFlag{Value: &uc.buildExternalAttr, Allowed: validBuildExternalAttr}, "build_external", "Sets the build_external attribute for the generated go_repository rule(s).")
fs.Var(&gzflag.AllowedStringFlag{Value: &uc.buildFileGenerationAttr, Allowed: validBuildFileGenerationAttr}, "build_file_generation", "Sets the build_file_generation attribute for the generated go_repository rule(s).")
fs.StringVar(&uc.buildTagsAttr, "build_tags", "", "Sets the build_tags attribute for the generated go_repository rule(s).")
fs.Var(&gzflag.AllowedStringFlag{Value: &uc.buildFileProtoModeAttr, Allowed: validBuildFileProtoModeAttr}, "build_file_proto_mode", "Sets the build_file_proto_mode attribute for the generated go_repository rule(s).")
fs.StringVar(&uc.buildExtraArgsAttr, "build_extra_args", "", "Sets the build_extra_args attribute for the generated go_repository rule(s).")
fs.Var(macroFlag{macroFileName: &uc.macroFileName, macroDefName: &uc.macroDefName}, "to_macro", "Tells Gazelle to write repository rules into a .bzl macro function rather than the WORKSPACE file. . The expected format is: macroFile%defName")
fs.BoolVar(&uc.pruneRules, "prune", false, "When enabled, Gazelle will remove rules that no longer have equivalent repos in the Gopkg.lock/go.mod file. Can only used with -from_file.")
}
func (_ *updateReposConfigurer) CheckFlags(fs *flag.FlagSet, c *config.Config) error {
@ -81,6 +109,9 @@ func (_ *updateReposConfigurer) CheckFlags(fs *flag.FlagSet, c *config.Config) e
if len(fs.Args()) == 0 {
return fmt.Errorf("No repositories specified\nTry -help for more information.")
}
if uc.pruneRules {
return fmt.Errorf("The -prune option can only be used with -from_file.")
}
uc.fn = updateImportPaths
uc.importPaths = fs.Args()
}
@ -108,22 +139,42 @@ func updateRepos(args []string) error {
}
uc := getUpdateReposConfig(c)
workspacePath := filepath.Join(c.RepoRoot, "WORKSPACE")
f, err := rule.LoadWorkspaceFile(workspacePath, "")
path := filepath.Join(c.RepoRoot, "WORKSPACE")
workspace, err := rule.LoadWorkspaceFile(path, "")
if err != nil {
return fmt.Errorf("error loading %q: %v", workspacePath, err)
return fmt.Errorf("error loading %q: %v", path, err)
}
var destFile *rule.File
if uc.macroFileName == "" {
destFile = workspace
} else {
macroPath := filepath.Join(c.RepoRoot, filepath.Clean(uc.macroFileName))
if _, err = os.Stat(macroPath); os.IsNotExist(err) {
destFile, err = rule.EmptyMacroFile(macroPath, "", uc.macroDefName)
} else {
destFile, err = rule.LoadMacroFile(macroPath, "", uc.macroDefName)
}
if err != nil {
return fmt.Errorf("error loading %q: %v", macroPath, err)
}
}
merger.FixWorkspace(f)
if err := uc.fn(uc, f, kinds); err != nil {
merger.FixWorkspace(workspace)
files, err := uc.fn(uc, workspace, destFile, kinds)
if err != nil {
return err
}
merger.FixLoads(f, loads)
if err := merger.CheckGazelleLoaded(f); err != nil {
return err
}
if err := f.Save(f.Path); err != nil {
return fmt.Errorf("error writing %q: %v", f.Path, err)
for _, f := range files {
merger.FixLoads(f, loads)
if f.Path == workspace.Path {
if err := merger.CheckGazelleLoaded(workspace); err != nil {
return err
}
}
if err := f.Save(f.Path); err != nil {
return err
}
}
return nil
}
@ -173,9 +224,13 @@ FLAGS:
fs.PrintDefaults()
}
func updateImportPaths(c *updateReposConfig, f *rule.File, kinds map[string]rule.KindInfo) error {
rs := repo.ListRepositories(f)
rc := repo.NewRemoteCache(rs)
func updateImportPaths(c *updateReposConfig, workspace *rule.File, destFile *rule.File, kinds map[string]rule.KindInfo) ([]*rule.File, error) {
repos, reposByFile, err := repo.ListRepositories(workspace)
if err != nil {
return nil, err
}
rc, cleanupRc := repo.NewRemoteCache(repos)
defer cleanupRc()
genRules := make([]*rule.Rule, len(c.importPaths))
errs := make([]error, len(c.importPaths))
@ -200,26 +255,29 @@ func updateImportPaths(c *updateReposConfig, f *rule.File, kinds map[string]rule
for _, err := range errs {
if err != nil {
return err
return nil, err
}
}
merger.MergeFile(f, nil, genRules, merger.PreResolve, kinds)
return nil
files := repo.MergeRules(genRules, reposByFile, destFile, kinds, false)
return files, nil
}
func importFromLockFile(c *updateReposConfig, f *rule.File, kinds map[string]rule.KindInfo) error {
rs := repo.ListRepositories(f)
rc := repo.NewRemoteCache(rs)
func importFromLockFile(c *updateReposConfig, workspace *rule.File, destFile *rule.File, kinds map[string]rule.KindInfo) ([]*rule.File, error) {
repos, reposByFile, err := repo.ListRepositories(workspace)
if err != nil {
return nil, err
}
rc, cleanupRc := repo.NewRemoteCache(repos)
defer cleanupRc()
genRules, err := repo.ImportRepoRules(c.lockFilename, rc)
if err != nil {
return err
return nil, err
}
for i := range genRules {
applyBuildAttributes(c, genRules[i])
}
merger.MergeFile(f, nil, genRules, merger.PreResolve, kinds)
return nil
files := repo.MergeRules(genRules, reposByFile, destFile, kinds, c.pruneRules)
return files, nil
}
func applyBuildAttributes(c *updateReposConfig, r *rule.Rule) {
@ -239,6 +297,7 @@ func applyBuildAttributes(c *updateReposConfig, r *rule.Rule) {
r.SetAttr("build_file_proto_mode", c.buildFileProtoModeAttr)
}
if c.buildExtraArgsAttr != "" {
r.SetAttr("build_extra_args", c.buildExtraArgsAttr)
extraArgs := strings.Split(c.buildExtraArgsAttr, ",")
r.SetAttr("build_extra_args", extraArgs)
}
}

View File

@ -26,7 +26,7 @@ import (
"github.com/bazelbuild/bazel-gazelle/repo"
)
var minimumRulesGoVersion = version.Version{0, 13, 0}
var minimumRulesGoVersion = version.Version{0, 19, 0}
// checkRulesGoVersion checks whether a compatible version of rules_go is
// being used in the workspace. A message will be logged if an incompatible

View File

@ -20,7 +20,9 @@ import (
"fmt"
"go/build"
"log"
"os"
"path"
"path/filepath"
"strings"
"github.com/bazelbuild/bazel-gazelle/config"
@ -71,6 +73,16 @@ type goConfig struct {
// goGrpcCompilersSet indicates whether goGrpcCompiler was set explicitly.
goGrpcCompilersSet bool
// goRepositoryMode is true if Gazelle was invoked by a go_repository rule.
// In this mode, we won't go out to the network to resolve external deps.
goRepositoryMode bool
// moduleMode is true if the current directory is intended to be built
// as part of a module. Minimal module compatibility won't be supported
// if this is true in the root directory. External dependencies may be
// resolved differently (also depending on goRepositoryMode).
moduleMode bool
}
var (
@ -222,6 +234,16 @@ func (_ *goLang) RegisterFlags(fs *flag.FlagSet, cmd string, c *config.Config) {
&gzflag.MultiFlag{Values: &gc.goGrpcCompilers, IsSet: &gc.goGrpcCompilersSet},
"go_grpc_compiler",
"go_proto_library compiler to use for gRPC (may be repeated)")
fs.BoolVar(
&gc.goRepositoryMode,
"go_repository_mode",
false,
"set when gazelle is invoked by go_repository")
fs.BoolVar(
&gc.moduleMode,
"go_repository_module_mode",
false,
"set when gazelle is invoked by go_repository in module mode")
}
c.Exts[goName] = gc
}
@ -247,6 +269,13 @@ func (_ *goLang) Configure(c *config.Config, rel string, f *rule.File) {
}
c.Exts[goName] = gc
if !gc.moduleMode {
st, err := os.Stat(filepath.Join(c.RepoRoot, filepath.FromSlash(rel), "go.mod"))
if err == nil && !st.IsDir() {
gc.moduleMode = true
}
}
if path.Base(rel) == "vendor" {
gc.importMapPrefix = inferImportPath(gc, rel)
gc.importMapPrefixRel = rel

View File

@ -37,10 +37,26 @@ func (gl *goLang) GenerateRules(args language.GenerateArgs) language.GenerateRes
c := args.Config
gc := getGoConfig(c)
pcMode := getProtoMode(c)
// This is a collection of proto_library rule names that have a corresponding
// go_proto_library rule already generated.
goProtoRules := make(map[string]struct{})
var protoRuleNames []string
protoPackages := make(map[string]proto.Package)
protoFileInfo := make(map[string]proto.FileInfo)
for _, r := range args.OtherGen {
if r.Kind() == "go_proto_library" {
if proto := r.AttrString("proto"); proto != "" {
goProtoRules[proto] = struct{}{}
}
if protos := r.AttrStrings("protos"); protos != nil {
for _, proto := range protos {
goProtoRules[proto] = struct{}{}
}
}
}
if r.Kind() != "proto_library" {
continue
}
@ -107,6 +123,13 @@ func (gl *goLang) GenerateRules(args language.GenerateArgs) language.GenerateRes
if _, ok := err.(*build.NoGoError); ok {
if len(protoPackages) == 1 {
for name, ppkg := range protoPackages {
if _, ok := goProtoRules[":"+name]; ok {
// if a go_proto_library rule already exists for this
// proto package, treat it as if the proto package
// doesn't exist.
pkg = emptyPackage(c, args.Dir, args.Rel)
break
}
pkg = &goPackage{
name: goProtoPackageName(ppkg),
importPath: goProtoImportPath(gc, ppkg, args.Rel),
@ -151,6 +174,13 @@ func (gl *goLang) GenerateRules(args language.GenerateArgs) language.GenerateRes
var rules []*rule.Rule
var protoEmbed string
for _, name := range protoRuleNames {
if _, ok := goProtoRules[":"+name]; ok {
// if a go_proto_library rule exists for this proto_library rule
// already, skip creating another go_proto_library for it, assuming
// that a different gazelle extension is responsible for
// go_proto_library rule generation.
continue
}
ppkg := protoPackages[name]
var rs []*rule.Rule
if name == protoName {
@ -465,11 +495,22 @@ func (g *generator) setCommonAttrs(r *rule.Rule, pkgRel, visibility string, targ
}
func (g *generator) setImportAttrs(r *rule.Rule, importPath string) {
gc := getGoConfig(g.c)
r.SetAttr("importpath", importPath)
goConf := getGoConfig(g.c)
if goConf.importMapPrefix != "" {
fromPrefixRel := pathtools.TrimPrefix(g.rel, goConf.importMapPrefixRel)
importMap := path.Join(goConf.importMapPrefix, fromPrefixRel)
// Set importpath_aliases if we need minimal module compatibility.
// If a package is part of a module with a v2+ semantic import version
// suffix, packages that are not part of modules may import it without
// the suffix.
if gc.goRepositoryMode && gc.moduleMode && pathtools.HasPrefix(importPath, gc.prefix) && gc.prefixRel == "" {
if mmcImportPath := pathWithoutSemver(importPath); mmcImportPath != "" {
r.SetAttr("importpath_aliases", []string{mmcImportPath})
}
}
if gc.importMapPrefix != "" {
fromPrefixRel := pathtools.TrimPrefix(g.rel, gc.importMapPrefixRel)
importMap := path.Join(gc.importMapPrefix, fromPrefixRel)
if importMap != importPath {
r.SetAttr("importmap", importMap)
}

View File

@ -83,18 +83,23 @@ var goKinds = map[string]rule.KindInfo{
ResolveAttrs: map[string]bool{"deps": true},
},
"go_repository": {
MatchAttrs: []string{"importpath"},
NonEmptyAttrs: nil, // never empty
MatchAttrs: []string{"importpath"},
NonEmptyAttrs: map[string]bool{
"importpath": true,
},
MergeableAttrs: map[string]bool{
"commit": true,
"importpath": true,
"remote": true,
"replace": true,
"sha256": true,
"strip_prefix": true,
"sum": true,
"tag": true,
"type": true,
"urls": true,
"vcs": true,
"version": true,
},
},
"go_test": {

View File

@ -19,11 +19,16 @@ var knownGoProtoImports = map[string]label.Label{
"github.com/golang/protobuf/ptypes/timestamp": label.New("io_bazel_rules_go", "proto/wkt", "timestamp_go_proto"),
"google.golang.org/genproto/protobuf/ptype": label.New("io_bazel_rules_go", "proto/wkt", "type_go_proto"),
"github.com/golang/protobuf/ptypes/wrappers": label.New("io_bazel_rules_go", "proto/wkt", "wrappers_go_proto"),
"google.golang.org/genproto/googleapis/ads/googleads/v0/common": label.New("go_googleapis", "google/ads/googleads/v0/common", "common_go_proto"),
"google.golang.org/genproto/googleapis/ads/googleads/v0/enums": label.New("go_googleapis", "google/ads/googleads/v0/enums", "enums_go_proto"),
"google.golang.org/genproto/googleapis/ads/googleads/v0/errors": label.New("go_googleapis", "google/ads/googleads/v0/errors", "errors_go_proto"),
"google.golang.org/genproto/googleapis/ads/googleads/v0/resources": label.New("go_googleapis", "google/ads/googleads/v0/resources", "resources_go_proto"),
"google.golang.org/genproto/googleapis/ads/googleads/v0/services": label.New("go_googleapis", "google/ads/googleads/v0/services", "services_go_proto"),
"google.golang.org/genproto/googleapis/ads/googleads/v1/common": label.New("go_googleapis", "google/ads/googleads/v1/common", "common_go_proto"),
"google.golang.org/genproto/googleapis/ads/googleads/v1/enums": label.New("go_googleapis", "google/ads/googleads/v1/enums", "enums_go_proto"),
"google.golang.org/genproto/googleapis/ads/googleads/v1/errors": label.New("go_googleapis", "google/ads/googleads/v1/errors", "errors_go_proto"),
"google.golang.org/genproto/googleapis/ads/googleads/v1/resources": label.New("go_googleapis", "google/ads/googleads/v1/resources", "resources_go_proto"),
"google.golang.org/genproto/googleapis/ads/googleads/v1/services": label.New("go_googleapis", "google/ads/googleads/v1/services", "services_go_proto"),
"google.golang.org/genproto/googleapis/ads/googleads/v2/common": label.New("go_googleapis", "google/ads/googleads/v2/common", "common_go_proto"),
"google.golang.org/genproto/googleapis/ads/googleads/v2/enums": label.New("go_googleapis", "google/ads/googleads/v2/enums", "enums_go_proto"),
"google.golang.org/genproto/googleapis/ads/googleads/v2/errors": label.New("go_googleapis", "google/ads/googleads/v2/errors", "errors_go_proto"),
"google.golang.org/genproto/googleapis/ads/googleads/v2/resources": label.New("go_googleapis", "google/ads/googleads/v2/resources", "resources_go_proto"),
"google.golang.org/genproto/googleapis/ads/googleads/v2/services": label.New("go_googleapis", "google/ads/googleads/v2/services", "services_go_proto"),
"google.golang.org/genproto/googleapis/api/annotations": label.New("go_googleapis", "google/api", "annotations_go_proto"),
"google.golang.org/genproto/googleapis/api/serviceconfig": label.New("go_googleapis", "google/api", "serviceconfig_go_proto"),
"google.golang.org/genproto/googleapis/api/configchange": label.New("go_googleapis", "google/api", "configchange_go_proto"),
@ -48,19 +53,25 @@ var knownGoProtoImports = map[string]label.Label{
"google.golang.org/genproto/googleapis/bigtable/v1": label.New("go_googleapis", "google/bigtable/v1", "bigtable_go_proto"),
"google.golang.org/genproto/googleapis/bigtable/v2": label.New("go_googleapis", "google/bigtable/v2", "bigtable_go_proto"),
"google.golang.org/genproto/googleapis/bytestream": label.New("go_googleapis", "google/bytestream", "bytestream_go_proto"),
"google.golang.org/genproto/googleapis/cloud/asset/v1": label.New("go_googleapis", "google/cloud/asset/v1", "asset_go_proto"),
"google.golang.org/genproto/googleapis/cloud/asset/v1beta1": label.New("go_googleapis", "google/cloud/asset/v1beta1", "asset_go_proto"),
"google.golang.org/genproto/googleapis/cloud/audit": label.New("go_googleapis", "google/cloud/audit", "audit_go_proto"),
"google.golang.org/genproto/googleapis/cloud/automl/v1beta1": label.New("go_googleapis", "google/cloud/automl/v1beta1", "automl_go_proto"),
"google.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1": label.New("go_googleapis", "google/cloud/bigquery/datatransfer/v1", "datatransfer_go_proto"),
"google.golang.org/genproto/googleapis/cloud/bigquery/logging/v1": label.New("go_googleapis", "google/cloud/bigquery/logging/v1", "logging_go_proto"),
"google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1": label.New("go_googleapis", "google/cloud/bigquery/storage/v1beta1", "storage_go_proto"),
"google.golang.org/genproto/googleapis/cloud/bigquery/v2": label.New("go_googleapis", "google/cloud/bigquery/v2", "bigquery_go_proto"),
"google.golang.org/genproto/googleapis/cloud/billing/v1": label.New("go_googleapis", "google/cloud/billing/v1", "billing_go_proto"),
"google.golang.org/genproto/googleapis/cloud/binaryauthorization/v1beta1": label.New("go_googleapis", "google/cloud/binaryauthorization/v1beta1", "binaryauthorization_go_proto"),
"google.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1": label.New("go_googleapis", "google/cloud/datacatalog/v1beta1", "datacatalog_go_proto"),
"google.golang.org/genproto/googleapis/cloud/datalabeling/v1beta1": label.New("go_googleapis", "google/cloud/datalabeling/v1beta1", "datalabeling_go_proto"),
"google.golang.org/genproto/googleapis/cloud/dataproc/v1": label.New("go_googleapis", "google/cloud/dataproc/v1", "dataproc_go_proto"),
"google.golang.org/genproto/googleapis/cloud/dataproc/v1beta2": label.New("go_googleapis", "google/cloud/dataproc/v1beta2", "dataproc_go_proto"),
"google.golang.org/genproto/googleapis/cloud/dialogflow/v2": label.New("go_googleapis", "google/cloud/dialogflow/v2", "dialogflow_go_proto"),
"google.golang.org/genproto/googleapis/cloud/dialogflow/v2beta1": label.New("go_googleapis", "google/cloud/dialogflow/v2beta1", "dialogflow_go_proto"),
"google.golang.org/genproto/googleapis/cloud/functions/v1beta2": label.New("go_googleapis", "google/cloud/functions/v1beta2", "functions_go_proto"),
"google.golang.org/genproto/googleapis/cloud/iot/v1": label.New("go_googleapis", "google/cloud/iot/v1", "iot_go_proto"),
"google.golang.org/genproto/googleapis/cloud/irm/v1alpha2": label.New("go_googleapis", "google/cloud/irm/v1alpha2", "irm_go_proto"),
"google.golang.org/genproto/googleapis/cloud/kms/v1": label.New("go_googleapis", "google/cloud/kms/v1", "kms_go_proto"),
"google.golang.org/genproto/googleapis/cloud/language/v1": label.New("go_googleapis", "google/cloud/language/v1", "language_go_proto"),
"google.golang.org/genproto/googleapis/cloud/language/v1beta1": label.New("go_googleapis", "google/cloud/language/v1beta1", "language_go_proto"),
@ -71,30 +82,42 @@ var knownGoProtoImports = map[string]label.Label{
"google.golang.org/genproto/googleapis/cloud/oslogin/v1": label.New("go_googleapis", "google/cloud/oslogin/v1", "oslogin_go_proto"),
"google.golang.org/genproto/googleapis/cloud/oslogin/v1alpha": label.New("go_googleapis", "google/cloud/oslogin/v1alpha", "oslogin_go_proto"),
"google.golang.org/genproto/googleapis/cloud/oslogin/v1beta": label.New("go_googleapis", "google/cloud/oslogin/v1beta", "oslogin_go_proto"),
"google.golang.org/genproto/googleapis/cloud/phishingprotection/v1beta1": label.New("go_googleapis", "google/cloud/phishingprotection/v1beta1", "phishingprotection_go_proto"),
"google.golang.org/genproto/googleapis/cloud/recaptchaenterprise/v1beta1": label.New("go_googleapis", "google/cloud/recaptchaenterprise/v1beta1", "recaptchaenterprise_go_proto"),
"google.golang.org/genproto/googleapis/cloud/recommender/v1beta1": label.New("go_googleapis", "google/cloud/recommender/v1beta1", "recommender_go_proto"),
"google.golang.org/genproto/googleapis/cloud/redis/v1": label.New("go_googleapis", "google/cloud/redis/v1", "redis_go_proto"),
"google.golang.org/genproto/googleapis/cloud/redis/v1beta1": label.New("go_googleapis", "google/cloud/redis/v1beta1", "redis_go_proto"),
"google.golang.org/genproto/googleapis/cloud/resourcemanager/v2": label.New("go_googleapis", "google/cloud/resourcemanager/v2", "resourcemanager_go_proto"),
"google.golang.org/genproto/googleapis/cloud/runtimeconfig/v1beta1": label.New("go_googleapis", "google/cloud/runtimeconfig/v1beta1", "runtimeconfig_go_proto"),
"google.golang.org/genproto/googleapis/cloud/scheduler/v1": label.New("go_googleapis", "google/cloud/scheduler/v1", "scheduler_go_proto"),
"google.golang.org/genproto/googleapis/cloud/scheduler/v1beta1": label.New("go_googleapis", "google/cloud/scheduler/v1beta1", "scheduler_go_proto"),
"google.golang.org/genproto/googleapis/cloud/securitycenter/v1": label.New("go_googleapis", "google/cloud/securitycenter/v1", "securitycenter_go_proto"),
"google.golang.org/genproto/googleapis/cloud/securitycenter/v1beta1": label.New("go_googleapis", "google/cloud/securitycenter/v1beta1", "securitycenter_go_proto"),
"google.golang.org/genproto/googleapis/cloud/speech/v1": label.New("go_googleapis", "google/cloud/speech/v1", "speech_go_proto"),
"google.golang.org/genproto/googleapis/cloud/speech/v1p1beta1": label.New("go_googleapis", "google/cloud/speech/v1p1beta1", "speech_go_proto"),
"google.golang.org/genproto/googleapis/cloud/support/common": label.New("go_googleapis", "google/cloud/support", "common_go_proto"),
"google.golang.org/genproto/googleapis/cloud/support/v1alpha1": label.New("go_googleapis", "google/cloud/support/v1alpha1", "support_go_proto"),
"google.golang.org/genproto/googleapis/cloud/talent/v4beta1": label.New("go_googleapis", "google/cloud/talent/v4beta1", "talent_go_proto"),
"google.golang.org/genproto/googleapis/cloud/tasks/v2": label.New("go_googleapis", "google/cloud/tasks/v2", "tasks_go_proto"),
"google.golang.org/genproto/googleapis/cloud/tasks/v2beta2": label.New("go_googleapis", "google/cloud/tasks/v2beta2", "tasks_go_proto"),
"google.golang.org/genproto/googleapis/cloud/tasks/v2beta3": label.New("go_googleapis", "google/cloud/tasks/v2beta3", "tasks_go_proto"),
"google.golang.org/genproto/googleapis/cloud/texttospeech/v1": label.New("go_googleapis", "google/cloud/texttospeech/v1", "texttospeech_go_proto"),
"google.golang.org/genproto/googleapis/cloud/texttospeech/v1beta1": label.New("go_googleapis", "google/cloud/texttospeech/v1beta1", "texttospeech_go_proto"),
"google.golang.org/genproto/googleapis/cloud/translate/v3beta1": label.New("go_googleapis", "google/cloud/translate/v3beta1", "translate_go_proto"),
"google.golang.org/genproto/googleapis/cloud/videointelligence/v1": label.New("go_googleapis", "google/cloud/videointelligence/v1", "videointelligence_go_proto"),
"google.golang.org/genproto/googleapis/cloud/videointelligence/v1beta1": label.New("go_googleapis", "google/cloud/videointelligence/v1beta1", "videointelligence_go_proto"),
"google.golang.org/genproto/googleapis/cloud/videointelligence/v1beta2": label.New("go_googleapis", "google/cloud/videointelligence/v1beta2", "videointelligence_go_proto"),
"google.golang.org/genproto/googleapis/cloud/videointelligence/v1p1beta1": label.New("go_googleapis", "google/cloud/videointelligence/v1p1beta1", "videointelligence_go_proto"),
"google.golang.org/genproto/googleapis/cloud/videointelligence/v1p2beta1": label.New("go_googleapis", "google/cloud/videointelligence/v1p2beta1", "videointelligence_go_proto"),
"google.golang.org/genproto/googleapis/cloud/videointelligence/v1p3beta1": label.New("go_googleapis", "google/cloud/videointelligence/v1p3beta1", "videointelligence_go_proto"),
"google.golang.org/genproto/googleapis/cloud/vision/v1": label.New("go_googleapis", "google/cloud/vision/v1", "vision_go_proto"),
"google.golang.org/genproto/googleapis/cloud/vision/v1p1beta1": label.New("go_googleapis", "google/cloud/vision/v1p1beta1", "vision_go_proto"),
"google.golang.org/genproto/googleapis/cloud/vision/v1p2beta1": label.New("go_googleapis", "google/cloud/vision/v1p2beta1", "vision_go_proto"),
"google.golang.org/genproto/googleapis/cloud/vision/v1p3beta1": label.New("go_googleapis", "google/cloud/vision/v1p3beta1", "vision_go_proto"),
"google.golang.org/genproto/googleapis/cloud/vision/v1p4beta1": label.New("go_googleapis", "google/cloud/vision/v1p4beta1", "vision_go_proto"),
"google.golang.org/genproto/googleapis/cloud/webrisk/v1beta1": label.New("go_googleapis", "google/cloud/webrisk/v1beta1", "webrisk_go_proto"),
"google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha": label.New("go_googleapis", "google/cloud/websecurityscanner/v1alpha", "websecurityscanner_go_proto"),
"google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta": label.New("go_googleapis", "google/cloud/websecurityscanner/v1beta", "websecurityscanner_go_proto"),
"google.golang.org/genproto/googleapis/container/v1": label.New("go_googleapis", "google/container/v1", "container_go_proto"),
"google.golang.org/genproto/googleapis/container/v1alpha1": label.New("go_googleapis", "google/container/v1alpha1", "container_go_proto"),
"google.golang.org/genproto/googleapis/container/v1beta1": label.New("go_googleapis", "google/container/v1beta1", "container_go_proto"),
@ -109,6 +132,7 @@ var knownGoProtoImports = map[string]label.Label{
"google.golang.org/genproto/googleapis/devtools/cloudprofiler/v2": label.New("go_googleapis", "google/devtools/cloudprofiler/v2", "cloudprofiler_go_proto"),
"google.golang.org/genproto/googleapis/devtools/cloudtrace/v1": label.New("go_googleapis", "google/devtools/cloudtrace/v1", "cloudtrace_go_proto"),
"google.golang.org/genproto/googleapis/devtools/cloudtrace/v2": label.New("go_googleapis", "google/devtools/cloudtrace/v2", "cloudtrace_go_proto"),
"google.golang.org/genproto/googleapis/devtools/containeranalysis/v1": label.New("go_googleapis", "google/devtools/containeranalysis/v1", "containeranalysis_go_proto"),
"google.golang.org/genproto/googleapis/devtools/containeranalysis/v1alpha1": label.New("go_googleapis", "google/devtools/containeranalysis/v1alpha1", "containeranalysis_go_proto"),
"google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/attestation": label.New("go_googleapis", "google/devtools/containeranalysis/v1beta1/attestation", "attestation_go_proto"),
"google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/build": label.New("go_googleapis", "google/devtools/containeranalysis/v1beta1/build", "build_go_proto"),
@ -128,6 +152,7 @@ var knownGoProtoImports = map[string]label.Label{
"google.golang.org/genproto/googleapis/devtools/source/v1": label.New("go_googleapis", "google/devtools/source/v1", "source_go_proto"),
"google.golang.org/genproto/googleapis/devtools/sourcerepo/v1": label.New("go_googleapis", "google/devtools/sourcerepo/v1", "sourcerepo_go_proto"),
"google.golang.org/genproto/googleapis/example/library/v1": label.New("go_googleapis", "google/example/library/v1", "library_go_proto"),
"google.golang.org/genproto/googleapis/firebase/fcm/connection/v1alpha1": label.New("go_googleapis", "google/firebase/fcm/connection/v1alpha1", "connection_go_proto"),
"google.golang.org/genproto/googleapis/firestore/admin/v1": label.New("go_googleapis", "google/firestore/admin/v1", "admin_go_proto"),
"google.golang.org/genproto/googleapis/firestore/admin/v1beta1": label.New("go_googleapis", "google/firestore/admin/v1beta1", "admin_go_proto"),
"google.golang.org/genproto/googleapis/firestore/admin/v1beta2": label.New("go_googleapis", "google/firestore/admin/v1beta2", "admin_go_proto"),
@ -155,12 +180,17 @@ var knownGoProtoImports = map[string]label.Label{
"google.golang.org/genproto/googleapis/spanner/v1": label.New("go_googleapis", "google/spanner/v1", "spanner_go_proto"),
"google.golang.org/genproto/googleapis/storagetransfer/v1": label.New("go_googleapis", "google/storagetransfer/v1", "storagetransfer_go_proto"),
"google.golang.org/genproto/googleapis/streetview/publish/v1": label.New("go_googleapis", "google/streetview/publish/v1", "publish_go_proto"),
"google.golang.org/genproto/googleapis/type/calendarperiod": label.New("go_googleapis", "google/type", "calendarperiod_go_proto"),
"google.golang.org/genproto/googleapis/type/color": label.New("go_googleapis", "google/type", "color_go_proto"),
"google.golang.org/genproto/googleapis/type/date": label.New("go_googleapis", "google/type", "date_go_proto"),
"google.golang.org/genproto/googleapis/type/dayofweek": label.New("go_googleapis", "google/type", "dayofweek_go_proto"),
"google.golang.org/genproto/googleapis/type/expr": label.New("go_googleapis", "google/type", "expr_go_proto"),
"google.golang.org/genproto/googleapis/type/fraction": label.New("go_googleapis", "google/type", "fraction_go_proto"),
"google.golang.org/genproto/googleapis/type/latlng": label.New("go_googleapis", "google/type", "latlng_go_proto"),
"google.golang.org/genproto/googleapis/type/money": label.New("go_googleapis", "google/type", "money_go_proto"),
"google.golang.org/genproto/googleapis/type/postaladdress": label.New("go_googleapis", "google/type", "postaladdress_go_proto"),
"google.golang.org/genproto/googleapis/type/quaternion": label.New("go_googleapis", "google/type", "quaternion_go_proto"),
"google.golang.org/genproto/googleapis/type/timeofday": label.New("go_googleapis", "google/type", "timeofday_go_proto"),
"google.golang.org/genproto/googleapis/watcher/v1": label.New("go_googleapis", "google/watcher/v1", "watcher_go_proto"),
"google.golang.org/genproto/googleapis/grafeas/v1": label.New("go_googleapis", "grafeas/v1", "grafeas_go_proto"),
}

File diff suppressed because it is too large Load Diff

View File

@ -19,6 +19,7 @@ import (
"fmt"
"log"
"path"
"regexp"
"sort"
"strings"
@ -486,3 +487,21 @@ func (si *platformStringInfo) convertToPlatforms() {
si.archs = nil
}
}
var semverRex = regexp.MustCompile(`^.*?(/v\d+)(?:/.*)?$`)
// pathWithoutSemver removes a semantic version suffix from path.
// For example, if path is "example.com/foo/v2/bar", pathWithoutSemver
// will return "example.com/foo/bar". If there is no semantic version suffix,
// "" will be returned.
func pathWithoutSemver(path string) string {
m := semverRex.FindStringSubmatchIndex(path)
if m == nil {
return ""
}
v := path[m[2]+2 : m[3]]
if v[0] == '0' || v == "1" {
return ""
}
return path[:m[2]] + path[m[3]:]
}

View File

@ -21,6 +21,7 @@ import (
"go/build"
"log"
"path"
"regexp"
"strings"
"github.com/bazelbuild/bazel-gazelle/config"
@ -178,7 +179,7 @@ func resolveGo(c *config.Config, ix *resolve.RuleIndex, rc *repo.RemoteCache, r
}
if gc.depMode == externalMode {
return resolveExternal(rc, imp)
return resolveExternal(gc.moduleMode, rc, imp)
} else {
return resolveVendored(rc, imp)
}
@ -227,7 +228,8 @@ func resolveWithIndexGo(ix *resolve.RuleIndex, imp string, from label.Label) (la
// Current match is worse
} else {
// Match is ambiguous
matchError = fmt.Errorf("multiple rules (%s and %s) may be imported with %q from %s", bestMatch.Label, m.Label, imp, from)
// TODO: consider listing all the ambiguous rules here.
matchError = fmt.Errorf("rule %s imports %q which matches multiple rules: %s and %s. # gazelle:resolve may be used to disambiguate", from, imp, bestMatch.Label, m.Label)
}
}
if matchError != nil {
@ -242,15 +244,39 @@ func resolveWithIndexGo(ix *resolve.RuleIndex, imp string, from label.Label) (la
return bestMatch.Label, nil
}
func resolveExternal(rc *repo.RemoteCache, imp string) (label.Label, error) {
prefix, repo, err := rc.Root(imp)
var modMajorRex = regexp.MustCompile(`/v\d+(?:/|$)`)
func resolveExternal(moduleMode bool, rc *repo.RemoteCache, imp string) (label.Label, error) {
// If we're in module mode, use "go list" to find the module path and
// repository name. Otherwise, use special cases (for github.com, golang.org)
// or send a GET with ?go-get=1 to find the root. If the path contains
// a major version suffix (e.g., /v2), treat it as a module anyway though.
//
// Eventually module mode will be the only mode. But for now, it's expensive
// and not the common case, especially when known repositories aren't
// listed in WORKSPACE (which is currently the case within go_repository).
if !moduleMode {
moduleMode = pathWithoutSemver(imp) != ""
}
var prefix, repo string
var err error
if moduleMode {
prefix, repo, err = rc.Mod(imp)
} else {
prefix, repo, err = rc.Root(imp)
}
if err != nil {
return label.NoLabel, err
}
var pkg string
if imp != prefix {
if pathtools.HasPrefix(imp, prefix) {
pkg = pathtools.TrimPrefix(imp, prefix)
} else if impWithoutSemver := pathWithoutSemver(imp); pathtools.HasPrefix(impWithoutSemver, prefix) {
// We may have used minimal module compatibility to resolve a path
// without a semantic import version suffix to a repository that has one.
pkg = pathtools.TrimPrefix(impWithoutSemver, prefix)
}
return label.New(repo, pkg, defaultLibName), nil
@ -306,7 +332,7 @@ func resolveProto(c *config.Config, ix *resolve.RuleIndex, rc *repo.RemoteCache,
var wellKnownProtos = map[string]bool{
"google/protobuf/any.proto": true,
"google/protobuf/api.proto": true,
"google/protobuf/compiler_plugin.proto": true,
"google/protobuf/compiler/plugin.proto": true,
"google/protobuf/descriptor.proto": true,
"google/protobuf/duration.proto": true,
"google/protobuf/empty.proto": true,

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -106,7 +106,7 @@ func MergeFile(oldFile *rule.File, emptyRules, genRules []*rule.Rule, phase Phas
// Merge empty rules into the file and delete any rules which become empty.
for _, emptyRule := range emptyRules {
if oldRule, _ := match(oldFile.Rules, emptyRule, kinds[emptyRule.Kind()]); oldRule != nil {
if oldRule, _ := Match(oldFile.Rules, emptyRule, kinds[emptyRule.Kind()]); oldRule != nil {
if oldRule.ShouldKeep() {
continue
}
@ -124,7 +124,7 @@ func MergeFile(oldFile *rule.File, emptyRules, genRules []*rule.Rule, phase Phas
matchErrors := make([]error, len(genRules))
substitutions := make(map[string]string)
for i, genRule := range genRules {
oldRule, err := match(oldFile.Rules, genRule, kinds[genRule.Kind()])
oldRule, err := Match(oldFile.Rules, genRule, kinds[genRule.Kind()])
if err != nil {
// TODO(jayconrod): add a verbose mode and log errors. They are too chatty
// to print by default.
@ -179,7 +179,7 @@ func substituteRule(r *rule.Rule, substitutions map[string]string, info rule.Kin
}
}
// match searches for a rule that can be merged with x in rules.
// Match searches for a rule that can be merged with x in rules.
//
// A rule is considered a match if its kind is equal to x's kind AND either its
// name is equal OR at least one of the attributes in matchAttrs is equal.
@ -195,7 +195,7 @@ func substituteRule(r *rule.Rule, substitutions map[string]string, info rule.Kin
// the quality of the match (name match is best, then attribute match in the
// order that attributes are listed). If disambiguation is successful,
// the rule and nil are returned. Otherwise, nil and an error are returned.
func match(rules []*rule.Rule, x *rule.Rule, info rule.KindInfo) (*rule.Rule, error) {
func Match(rules []*rule.Rule, x *rule.Rule, info rule.KindInfo) (*rule.Rule, error) {
xname := x.Name()
xkind := x.Kind()
var nameMatches []*rule.Rule

View File

@ -14,6 +14,7 @@ go_library(
visibility = ["//visibility:public"],
deps = [
"//vendor/github.com/bazelbuild/bazel-gazelle/label:go_default_library",
"//vendor/github.com/bazelbuild/bazel-gazelle/merger:go_default_library",
"//vendor/github.com/bazelbuild/bazel-gazelle/pathtools:go_default_library",
"//vendor/github.com/bazelbuild/bazel-gazelle/rule:go_default_library",
"//vendor/github.com/pelletier/go-toml:go_default_library",

View File

@ -18,97 +18,146 @@ package repo
import (
"bytes"
"encoding/json"
"go/build"
"io"
"io/ioutil"
"log"
"os"
"os/exec"
"path/filepath"
"regexp"
"runtime"
"sort"
"strings"
"github.com/bazelbuild/bazel-gazelle/label"
)
type module struct {
Path, Version string
Main bool
}
// Per the `go help modules` documentation:
// There are three pseudo-version forms:
//
// vX.0.0-yyyymmddhhmmss-abcdefabcdef is used when there is no earlier
// versioned commit with an appropriate major version before the target commit.
// (This was originally the only form, so some older go.mod files use this form
// even for commits that do follow tags.)
//
// vX.Y.Z-pre.0.yyyymmddhhmmss-abcdefabcdef is used when the most
// recent versioned commit before the target commit is vX.Y.Z-pre.
//
// vX.Y.(Z+1)-0.yyyymmddhhmmss-abcdefabcdef is used when the most
// recent versioned commit before the target commit is vX.Y.Z.
//
// We need to match all three of these with the following regexp.
var regexMixedVersioning = regexp.MustCompile(`^(.*?)[-.]((?:0\.|)[0-9]{14})-([a-fA-F0-9]{12})$`)
func toRepoRule(mod module) Repo {
var tag, commit string
if gr := regexMixedVersioning.FindStringSubmatch(mod.Version); gr != nil {
commit = gr[3]
} else {
tag = strings.TrimSuffix(mod.Version, "+incompatible")
}
return Repo{
Name: label.ImportPathToBazelRepoName(mod.Path),
GoPrefix: mod.Path,
Commit: commit,
Tag: tag,
}
}
func importRepoRulesModules(filename string, _ *RemoteCache) (repos []Repo, err error) {
// Copy go.mod to temporary directory. We may run commands that modify it,
// and we want to leave the original alone.
tempDir, err := copyGoModToTemp(filename)
if err != nil {
return nil, err
}
defer os.RemoveAll(tempDir)
data, err := goListModulesFn(tempDir)
// List all modules except for the main module, including implicit indirect
// dependencies.
type module struct {
Path, Version, Sum string
Main bool
Replace *struct {
Path, Version string
}
}
// path@version can be used as a unique identifier for looking up sums
pathToModule := map[string]*module{}
data, err := goListModules(tempDir)
if err != nil {
return nil, err
}
dec := json.NewDecoder(bytes.NewReader(data))
for dec.More() {
var mod module
if err := dec.Decode(&mod); err != nil {
mod := new(module)
if err := dec.Decode(mod); err != nil {
return nil, err
}
if mod.Main {
continue
}
repos = append(repos, toRepoRule(mod))
if mod.Replace != nil {
if filepath.IsAbs(mod.Replace.Path) || build.IsLocalImport(mod.Replace.Path) {
log.Printf("go_repository does not support file path replacements for %s -> %s", mod.Path,
mod.Replace.Path)
continue
}
pathToModule[mod.Replace.Path + "@" + mod.Replace.Version] = mod
} else {
pathToModule[mod.Path + "@" + mod.Version] = mod
}
}
// Load sums from go.sum. Ideally, they're all there.
goSumPath := filepath.Join(filepath.Dir(filename), "go.sum")
data, _ = ioutil.ReadFile(goSumPath)
lines := bytes.Split(data, []byte("\n"))
for _, line := range lines {
line = bytes.TrimSpace(line)
fields := bytes.Fields(line)
if len(fields) != 3 {
continue
}
path, version, sum := string(fields[0]), string(fields[1]), string(fields[2])
if strings.HasSuffix(version, "/go.mod") {
continue
}
if mod, ok := pathToModule[path + "@" + version]; ok {
mod.Sum = sum
}
}
// If sums are missing, run go mod download to get them.
var missingSumArgs []string
for pathVer, mod := range pathToModule {
if mod.Sum == "" {
missingSumArgs = append(missingSumArgs, pathVer)
}
}
if len(missingSumArgs) > 0 {
data, err := goModDownload(tempDir, missingSumArgs)
if err != nil {
return nil, err
}
dec = json.NewDecoder(bytes.NewReader(data))
for dec.More() {
var dl module
if err := dec.Decode(&dl); err != nil {
return nil, err
}
if mod, ok := pathToModule[dl.Path + "@" + dl.Version]; ok {
mod.Sum = dl.Sum
}
}
}
// Translate to repo metadata.
repos = make([]Repo, 0, len(pathToModule))
for pathVer, mod := range pathToModule {
if mod.Sum == "" {
log.Printf("could not determine sum for module %s", pathVer)
continue
}
repo := Repo{
Name: label.ImportPathToBazelRepoName(mod.Path),
GoPrefix: mod.Path,
Version: mod.Version,
Sum: mod.Sum,
}
if mod.Replace != nil {
repo.Replace = mod.Replace.Path
repo.Version = mod.Replace.Version
}
repos = append(repos, repo)
}
sort.Slice(repos, func(i, j int) bool { return repos[i].Name < repos[j].Name })
return repos, nil
}
// goListModulesFn may be overridden by tests.
var goListModulesFn = goListModules
// goListModules invokes "go list" in a directory containing a go.mod file.
func goListModules(dir string) ([]byte, error) {
var goListModules = func(dir string) ([]byte, error) {
goTool := findGoTool()
cmd := exec.Command(goTool, "list", "-m", "-json", "all")
cmd.Stderr = os.Stderr
cmd.Dir = dir
data, err := cmd.Output()
return data, err
return cmd.Output()
}
// goModDownload invokes "go mod download" in a directory containing a
// go.mod file.
var goModDownload = func(dir string, args []string) ([]byte, error) {
goTool := findGoTool()
cmd := exec.Command(goTool, "mod", "download", "-json")
cmd.Args = append(cmd.Args, args...)
cmd.Stderr = os.Stderr
cmd.Dir = dir
return cmd.Output()
}
// copyGoModToTemp copies to given go.mod file to a temporary directory.

View File

@ -17,9 +17,13 @@ package repo
import (
"bytes"
"encoding/json"
"fmt"
"io/ioutil"
"os"
"os/exec"
"path"
"path/filepath"
"regexp"
"strings"
"sync"
@ -36,28 +40,17 @@ import (
// Depending on how the RemoteCache was initialized and used earlier, some
// information may already be locally available. Frequently though, information
// will be fetched over the network, so this function may be slow.
func UpdateRepo(rc *RemoteCache, importPath string) (Repo, error) {
root, name, err := rc.Root(importPath)
func UpdateRepo(rc *RemoteCache, modPath string) (Repo, error) {
name, version, sum, err := rc.ModVersion(modPath, "latest")
if err != nil {
return Repo{}, err
}
remote, vcs, err := rc.Remote(root)
if err != nil {
return Repo{}, err
}
commit, tag, err := rc.Head(remote, vcs)
if err != nil {
return Repo{}, err
}
repo := Repo{
return Repo{
Name: name,
GoPrefix: root,
Commit: commit,
Tag: tag,
Remote: remote,
VCS: vcs,
}
return repo, nil
GoPrefix: modPath,
Version: version,
Sum: sum,
}, nil
}
// RemoteCache stores information about external repositories. The cache may
@ -67,6 +60,9 @@ func UpdateRepo(rc *RemoteCache, importPath string) (Repo, error) {
//
// Public methods of RemoteCache may be slow in cases where a network fetch
// is needed. Public methods may be called concurrently.
//
// TODO(jayconrod): this is very Go-centric. It should be moved to language/go.
// Unfortunately, doing so would break the resolve.Resolver interface.
type RemoteCache struct {
// RepoRootForImportPath is vcs.RepoRootForImportPath by default. It may
// be overridden so that tests may avoid accessing the network.
@ -76,7 +72,21 @@ type RemoteCache struct {
// repository. This is used by Head. It may be stubbed out for tests.
HeadCmd func(remote, vcs string) (string, error)
root, remote, head remoteCacheMap
// ModInfo returns the module path and version that provides the package
// with the given import path. This is used by Mod. It may be stubbed
// out for tests.
ModInfo func(importPath string) (modPath string, err error)
// ModVersionInfo returns the module path, true version, and sum for
// the module that provides the package with the given import path.
// This is used by ModVersion. It may be stubbed out for tests.
ModVersionInfo func(modPath, query string) (version, sum string, err error)
root, remote, head, mod, modVersion remoteCacheMap
tmpOnce sync.Once
tmpDir string
tmpErr error
}
// remoteCacheMap is a thread-safe, idempotent cache. It is used to store
@ -110,18 +120,39 @@ type headValue struct {
commit, tag string
}
type modValue struct {
path, name string
known bool
}
type modVersionValue struct {
path, name, version, sum string
}
// NewRemoteCache creates a new RemoteCache with a set of known repositories.
// The Root and Remote methods will return information about repositories listed
// here without accessing the network. However, the Head method will still
// access the network for these repositories to retrieve information about new
// versions.
func NewRemoteCache(knownRepos []Repo) *RemoteCache {
r := &RemoteCache{
//
// A cleanup function is also returned. The caller must call this when
// RemoteCache is no longer needed. RemoteCache may write files to a temporary
// directory. This will delete them.
func NewRemoteCache(knownRepos []Repo) (r *RemoteCache, cleanup func() error) {
r = &RemoteCache{
RepoRootForImportPath: vcs.RepoRootForImportPath,
HeadCmd: defaultHeadCmd,
root: remoteCacheMap{cache: make(map[string]*remoteCacheEntry)},
remote: remoteCacheMap{cache: make(map[string]*remoteCacheEntry)},
head: remoteCacheMap{cache: make(map[string]*remoteCacheEntry)},
mod: remoteCacheMap{cache: make(map[string]*remoteCacheEntry)},
modVersion: remoteCacheMap{cache: make(map[string]*remoteCacheEntry)},
}
r.ModInfo = func(importPath string) (string, error) {
return defaultModInfo(r, importPath)
}
r.ModVersionInfo = func(modPath, query string) (string, string, error) {
return defaultModVersionInfo(r, modPath, query)
}
for _, repo := range knownRepos {
r.root.cache[repo.GoPrefix] = &remoteCacheEntry{
@ -138,8 +169,43 @@ func NewRemoteCache(knownRepos []Repo) *RemoteCache {
},
}
}
r.mod.cache[repo.GoPrefix] = &remoteCacheEntry{
value: modValue{
path: repo.GoPrefix,
name: repo.Name,
known: true,
},
}
}
return r
// Augment knownRepos with additional prefixes for
// minimal module compatibility. For example, if repo "com_example_foo_v2"
// has prefix "example.com/foo/v2", map "example.com/foo" to the same
// entry.
// TODO(jayconrod): there should probably be some control over whether
// callers can use these mappings: packages within modules should not be
// allowed to use them. However, we'll return the same result nearly all
// the time, and simpler is better.
for _, repo := range knownRepos {
path := pathWithoutSemver(repo.GoPrefix)
if path == "" || r.root.cache[path] != nil {
continue
}
r.root.cache[path] = r.root.cache[repo.GoPrefix]
if e := r.remote.cache[repo.GoPrefix]; e != nil {
r.remote.cache[path] = e
}
r.mod.cache[path] = r.mod.cache[repo.GoPrefix]
}
return r, r.cleanup
}
func (r *RemoteCache) cleanup() error {
if r.tmpDir == "" {
return nil
}
return os.RemoveAll(r.tmpDir)
}
var gopkginPattern = regexp.MustCompile("^(gopkg.in/(?:[^/]+/)?[^/]+\\.v\\d+)(?:/|$)")
@ -280,7 +346,11 @@ func defaultHeadCmd(remote, vcs string) (string, error) {
cmd := exec.Command("git", "ls-remote", remote, "HEAD")
out, err := cmd.Output()
if err != nil {
return "", err
var stdErr []byte
if e, ok := err.(*exec.ExitError); ok {
stdErr = e.Stderr
}
return "", fmt.Errorf("git ls-remote for %s : %v : %s", remote, err, stdErr)
}
ix := bytes.IndexByte(out, '\t')
if ix < 0 {
@ -293,6 +363,146 @@ func defaultHeadCmd(remote, vcs string) (string, error) {
}
}
// Mod returns the module path for the module that contains the package
// named by importPath. The name of the go_repository rule for the module
// is also returned. For example, calling Mod on "github.com/foo/bar/v2/baz"
// would give the module path "github.com/foo/bar/v2" and the name
// "com_github_foo_bar_v2".
//
// If a known repository *could* provide importPath (because its "importpath"
// is a prefix of importPath), Mod will assume that it does. This may give
// inaccurate results if importPath is in an undeclared nested module. Run
// "gazelle update-repos -from_file=go.mod" first for best results.
//
// If no known repository could provide importPath, Mod will run "go list" to
// find the module. The special patterns that Root uses are ignored. Results are
// cached. Use GOPROXY for faster results.
func (r *RemoteCache) Mod(importPath string) (modPath, name string, err error) {
// Check if any of the known repositories is a prefix.
prefix := importPath
for {
v, ok, err := r.mod.get(prefix)
if ok {
if err != nil {
return "", "", err
}
value := v.(modValue)
if value.known {
return value.path, value.name, nil
} else {
break
}
}
prefix = path.Dir(prefix)
if prefix == "." || prefix == "/" {
break
}
}
// Ask "go list".
v, err := r.mod.ensure(importPath, func() (interface{}, error) {
modPath, err := r.ModInfo(importPath)
if err != nil {
return nil, err
}
return modValue{
path: modPath,
name: label.ImportPathToBazelRepoName(modPath),
}, nil
})
if err != nil {
return "", "", err
}
value := v.(modValue)
return value.path, value.name, nil
}
func defaultModInfo(rc *RemoteCache, importPath string) (modPath string, err error) {
rc.initTmp()
if rc.tmpErr != nil {
return "", rc.tmpErr
}
goTool := findGoTool()
cmd := exec.Command(goTool, "list", "-find", "-f", "{{.Module.Path}}", "--", importPath)
cmd.Dir = rc.tmpDir
cmd.Env = append(os.Environ(), "GO111MODULE=on")
out, err := cmd.Output()
if err != nil {
var stdErr []byte
if e, ok := err.(*exec.ExitError); ok {
stdErr = e.Stderr
}
return "", fmt.Errorf("finding module path for import %s: %v: %s", importPath, err, stdErr)
}
return strings.TrimSpace(string(out)), nil
}
// ModVersion looks up information about a module at a given version.
// The path must be the module path, not a package within the module.
// The version may be a canonical semantic version, a query like "latest",
// or a branch, tag, or revision name. ModVersion returns the name of
// the repository rule providing the module (if any), the true version,
// and the sum.
func (r *RemoteCache) ModVersion(modPath, query string) (name, version, sum string, err error) {
// Ask "go list".
arg := modPath + "@" + query
v, err := r.modVersion.ensure(arg, func() (interface{}, error) {
version, sum, err := r.ModVersionInfo(modPath, query)
if err != nil {
return nil, err
}
return modVersionValue{
path: modPath,
version: version,
sum: sum,
}, nil
})
if err != nil {
return "", "", "", err
}
value := v.(modVersionValue)
// Try to find the repository name for the module, if there's already
// a repository rule that provides it.
v, ok, err := r.mod.get(modPath)
if ok && err == nil {
name = v.(modValue).name
} else {
name = label.ImportPathToBazelRepoName(modPath)
}
return name, value.version, value.sum, nil
}
func defaultModVersionInfo(rc *RemoteCache, modPath, query string) (version, sum string, err error) {
rc.initTmp()
if rc.tmpErr != nil {
return "", "", rc.tmpErr
}
goTool := findGoTool()
cmd := exec.Command(goTool, "mod", "download", "-json", "--", modPath+"@"+query)
cmd.Dir = rc.tmpDir
cmd.Env = append(os.Environ(), "GO111MODULE=on")
out, err := cmd.Output()
if err != nil {
var stdErr []byte
if e, ok := err.(*exec.ExitError); ok {
stdErr = e.Stderr
}
return "", "", fmt.Errorf("finding module version and sum for %s@%s: %v: %s", modPath, query, err, stdErr)
}
var result struct{ Version, Sum string }
if err := json.Unmarshal(out, &result); err != nil {
fmt.Println(out)
return "", "", fmt.Errorf("finding module version and sum for %s@%s: invalid output from 'go mod download': %v", modPath, query, err)
}
return result.Version, result.Sum, nil
}
// get retrieves a value associated with the given key from the cache. ok will
// be true if the key exists in the cache, even if it's in the process of
// being fetched.
@ -330,3 +540,33 @@ func (m *remoteCacheMap) ensure(key string, load func() (interface{}, error)) (i
}
return e.value, e.err
}
func (rc *RemoteCache) initTmp() {
rc.tmpOnce.Do(func() {
rc.tmpDir, rc.tmpErr = ioutil.TempDir("", "gazelle-remotecache-")
if rc.tmpErr != nil {
return
}
rc.tmpErr = ioutil.WriteFile(filepath.Join(rc.tmpDir, "go.mod"), []byte(`module gazelle_remote_cache__\n`), 0666)
})
}
var semverRex = regexp.MustCompile(`^.*?(/v\d+)(?:/.*)?$`)
// pathWithoutSemver removes a semantic version suffix from path.
// For example, if path is "example.com/foo/v2/bar", pathWithoutSemver
// will return "example.com/foo/bar". If there is no semantic version suffix,
// "" will be returned.
// TODO(jayconrod): copied from language/go. This whole type should be
// migrated there.
func pathWithoutSemver(path string) string {
m := semverRex.FindStringSubmatchIndex(path)
if m == nil {
return ""
}
v := path[m[2]+2 : m[3]]
if v == "0" || v == "1" {
return ""
}
return path[:m[2]] + path[m[3]:]
}

View File

@ -13,6 +13,15 @@ See the License for the specific language governing permissions and
limitations under the License.
*/
// Package repo provides functionality for managing Go repository rules.
//
// UNSTABLE: The exported APIs in this package may change. In the future,
// language extensions should implement an interface for repository
// rule management. The update-repos command will call interface methods,
// and most if this package's functionality will move to language/go.
// Moving this package to an internal directory would break existing
// extensions, since RemoteCache is referenced through the resolve.Resolver
// interface, which extensions are required to implement.
package repo
import (
@ -22,11 +31,12 @@ import (
"sort"
"strings"
"github.com/bazelbuild/bazel-gazelle/merger"
"github.com/bazelbuild/bazel-gazelle/rule"
)
// Repo describes an external repository rule declared in a Bazel
// WORKSPACE file.
// WORKSPACE file or macro file.
type Repo struct {
// Name is the value of the "name" attribute of the repository rule.
Name string
@ -48,6 +58,17 @@ type Repo struct {
// VCS is the version control system used to check out the repository.
// May also be "http" for HTTP archives.
VCS string
// Version is the semantic version of the module to download. Exactly one
// of Version, Commit, and Tag must be set.
Version string
// Sum is the hash of the module to be verified after download.
Sum string
// Replace is the Go import path of the module configured by the replace
// directive in go.mod.
Replace string
}
type byName []Repo
@ -56,6 +77,12 @@ func (s byName) Len() int { return len(s) }
func (s byName) Less(i, j int) bool { return s[i].Name < s[j].Name }
func (s byName) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
type byRuleName []*rule.Rule
func (s byRuleName) Len() int { return len(s) }
func (s byRuleName) Less(i, j int) bool { return s[i].Name() < s[j].Name() }
func (s byRuleName) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
type lockFileFormat int
const (
@ -93,6 +120,81 @@ func ImportRepoRules(filename string, repoCache *RemoteCache) ([]*rule.Rule, err
return rules, nil
}
// MergeRules merges a list of generated repo rules with the already defined repo rules,
// and then updates each rule's underlying file. If the generated rule matches an existing
// one, then it inherits the file where the existing rule was defined. If the rule is new then
// its file is set as the destFile parameter. If pruneRules is set, then this function will prune
// any existing rules that no longer have an equivalent repo defined in the Gopkg.lock/go.mod file.
// A list of the updated files is returned.
func MergeRules(genRules []*rule.Rule, existingRules map[*rule.File][]string, destFile *rule.File, kinds map[string]rule.KindInfo, pruneRules bool) []*rule.File {
sort.Stable(byRuleName(genRules))
ruleMap := make(map[string]bool)
if pruneRules {
for _, r := range genRules {
ruleMap[r.Name()] = true
}
}
repoMap := make(map[string]*rule.File)
emptyRules := make([]*rule.Rule, 0)
for file, repoNames := range existingRules {
// Avoid writing to the same file by matching destFile with its definition in existingRules
if file.Path == destFile.Path && file.MacroName() != "" && file.MacroName() == destFile.MacroName() {
file = destFile
}
for _, name := range repoNames {
if pruneRules && !ruleMap[name] {
emptyRules = append(emptyRules, rule.NewRule("go_repository", name))
}
repoMap[name] = file
}
}
rulesByFile := make(map[*rule.File][]*rule.Rule)
for _, rule := range genRules {
dest := destFile
if file, ok := repoMap[rule.Name()]; ok {
dest = file
}
rulesByFile[dest] = append(rulesByFile[dest], rule)
}
emptyRulesByFile := make(map[*rule.File][]*rule.Rule)
for _, rule := range emptyRules {
if file, ok := repoMap[rule.Name()]; ok {
emptyRulesByFile[file] = append(emptyRulesByFile[file], rule)
}
}
updatedFiles := make(map[string]*rule.File)
for f, rules := range rulesByFile {
merger.MergeFile(f, emptyRulesByFile[f], rules, merger.PreResolve, kinds)
delete(emptyRulesByFile, f)
f.Sync()
if uf, ok := updatedFiles[f.Path]; ok {
uf.SyncMacroFile(f)
} else {
updatedFiles[f.Path] = f
}
}
// Merge the remaining files that have empty rules, but no genRules
for f, rules := range emptyRulesByFile {
merger.MergeFile(f, rules, nil, merger.PreResolve, kinds)
f.Sync()
if uf, ok := updatedFiles[f.Path]; ok {
uf.SyncMacroFile(f)
} else {
updatedFiles[f.Path] = f
}
}
files := make([]*rule.File, 0, len(updatedFiles))
for _, f := range updatedFiles {
files = append(files, f)
}
return files
}
func getLockFileFormat(filename string) lockFileFormat {
switch filepath.Base(filename) {
case "Gopkg.lock":
@ -123,6 +225,15 @@ func GenerateRule(repo Repo) *rule.Rule {
if repo.VCS != "" {
r.SetAttr("vcs", repo.VCS)
}
if repo.Version != "" {
r.SetAttr("version", repo.Version)
}
if repo.Sum != "" {
r.SetAttr("sum", repo.Sum)
}
if repo.Replace != "" {
r.SetAttr("replace", repo.Replace)
}
return r
}
@ -154,13 +265,45 @@ func FindExternalRepo(repoRoot, name string) (string, error) {
}
// ListRepositories extracts metadata about repositories declared in a
// WORKSPACE file.
//
// The set of repositories returned is necessarily incomplete, since we don't
// evaluate the file, and repositories may be declared in macros in other files.
func ListRepositories(workspace *rule.File) []Repo {
var repos []Repo
for _, r := range workspace.Rules {
// file.
func ListRepositories(workspace *rule.File) (repos []Repo, repoNamesByFile map[*rule.File][]string, err error) {
repoNamesByFile = make(map[*rule.File][]string)
repos, repoNamesByFile[workspace] = getRepos(workspace.Rules)
for _, d := range workspace.Directives {
switch d.Key {
case "repository_macro":
f, defName, err := parseRepositoryMacroDirective(d.Value)
if err != nil {
return nil, nil, err
}
f = filepath.Join(filepath.Dir(workspace.Path), filepath.Clean(f))
macroFile, err := rule.LoadMacroFile(f, "", defName)
if err != nil {
return nil, nil, err
}
currRepos, names := getRepos(macroFile.Rules)
repoNamesByFile[macroFile] = names
repos = append(repos, currRepos...)
}
}
return repos, repoNamesByFile, nil
}
func parseRepositoryMacroDirective(directive string) (string, string, error) {
vals := strings.Split(directive, "%")
if len(vals) != 2 {
return "", "", fmt.Errorf("Failure parsing repository_macro: %s, expected format is macroFile%%defName", directive)
}
f := vals[0]
if strings.HasPrefix(f, "..") {
return "", "", fmt.Errorf("Failure parsing repository_macro: %s, macro file path %s should not start with \"..\"", directive, f)
}
return f, vals[1], nil
}
func getRepos(rules []*rule.Rule) (repos []Repo, names []string) {
for _, r := range rules {
name := r.Name()
if name == "" {
continue
@ -172,7 +315,11 @@ func ListRepositories(workspace *rule.File) []Repo {
// Currently, we don't use the result of this function to produce new
// go_repository rules, so it doesn't matter.
goPrefix := r.AttrString("importpath")
version := r.AttrString("version")
sum := r.AttrString("sum")
replace := r.AttrString("replace")
revision := r.AttrString("commit")
tag := r.AttrString("tag")
remote := r.AttrString("remote")
vcs := r.AttrString("vcs")
if goPrefix == "" {
@ -181,7 +328,11 @@ func ListRepositories(workspace *rule.File) []Repo {
repo = Repo{
Name: name,
GoPrefix: goPrefix,
Version: version,
Sum: sum,
Replace: replace,
Commit: revision,
Tag: tag,
Remote: remote,
VCS: vcs,
}
@ -193,10 +344,7 @@ func ListRepositories(workspace *rule.File) []Repo {
continue
}
repos = append(repos, repo)
names = append(names, repo.Name)
}
// TODO(jayconrod): look for directives that describe repositories that
// aren't declared in the top-level of WORKSPACE (e.g., behind a macro).
return repos
return repos, names
}

View File

@ -67,7 +67,7 @@ type RuleIndex struct {
rules []*ruleRecord
labelMap map[label.Label]*ruleRecord
importMap map[ImportSpec][]*ruleRecord
mrslv func(r *rule.Rule, f *rule.File) Resolver
mrslv func(r *rule.Rule, pkgRel string) Resolver
}
// ruleRecord contains information about a rule relevant to import indexing.
@ -97,7 +97,7 @@ type ruleRecord struct {
//
// kindToResolver is a map from rule kinds (for example, "go_library") to
// Resolvers that support those kinds.
func NewRuleIndex(mrslv func(r *rule.Rule, f *rule.File) Resolver) *RuleIndex {
func NewRuleIndex(mrslv func(r *rule.Rule, pkgRel string) Resolver) *RuleIndex {
return &RuleIndex{
labelMap: make(map[label.Label]*ruleRecord),
mrslv: mrslv,
@ -111,7 +111,7 @@ func NewRuleIndex(mrslv func(r *rule.Rule, f *rule.File) Resolver) *RuleIndex {
// AddRule may only be called before Finish.
func (ix *RuleIndex) AddRule(c *config.Config, r *rule.Rule, f *rule.File) {
var imps []ImportSpec
if rslv := ix.mrslv(r, f); rslv != nil {
if rslv := ix.mrslv(r, f.Pkg); rslv != nil {
imps = rslv.Imports(c, r, f)
}
// If imps == nil, the rule is not importable. If imps is the empty slice,
@ -151,7 +151,7 @@ func (ix *RuleIndex) collectEmbeds(r *ruleRecord) {
if r.didCollectEmbeds {
return
}
resolver := ix.mrslv(r.rule, r.file)
resolver := ix.mrslv(r.rule, r.file.Pkg)
r.didCollectEmbeds = true
embedLabels := resolver.Embeds(r.rule, r.label)
r.embeds = embedLabels
@ -161,7 +161,7 @@ func (ix *RuleIndex) collectEmbeds(r *ruleRecord) {
continue
}
ix.collectEmbeds(er)
if resolver == ix.mrslv(er.rule, er.file) {
if resolver == ix.mrslv(er.rule, er.file.Pkg) {
er.embedded = true
r.embeds = append(r.embeds, er.embeds...)
}
@ -218,7 +218,7 @@ func (ix *RuleIndex) FindRulesByImport(imp ImportSpec, lang string) []FindResult
matches := ix.importMap[imp]
results := make([]FindResult, 0, len(matches))
for _, m := range matches {
if ix.mrslv(m.rule, nil).Name() != lang {
if ix.mrslv(m.rule, "").Name() != lang {
continue
}
results = append(results, FindResult{

View File

@ -51,7 +51,7 @@ func MergeRules(src, dst *Rule, mergeable map[string]bool, filename string) {
if _, ok := src.attrs[key]; ok || !mergeable[key] || ShouldKeep(dstAttr) {
continue
}
dstValue := dstAttr.Y
dstValue := dstAttr.RHS
if mergedValue, err := mergeExprs(nil, dstValue); err != nil {
start, end := dstValue.Span()
log.Printf("%s:%d.%d-%d.%d: could not merge expression", filename, start.Line, start.LineRune, end.Line, end.LineRune)
@ -64,11 +64,11 @@ func MergeRules(src, dst *Rule, mergeable map[string]bool, filename string) {
// Merge attributes from src into dst.
for key, srcAttr := range src.attrs {
srcValue := srcAttr.Y
srcValue := srcAttr.RHS
if dstAttr, ok := dst.attrs[key]; !ok {
dst.SetAttr(key, srcValue)
} else if mergeable[key] && !ShouldKeep(dstAttr) {
dstValue := dstAttr.Y
dstValue := dstAttr.RHS
if mergedValue, err := mergeExprs(srcValue, dstValue); err != nil {
start, end := dstValue.Span()
log.Printf("%s:%d.%d-%d.%d: could not merge expression", filename, start.Line, start.LineRune, end.Line, end.LineRune)
@ -275,11 +275,11 @@ func SquashRules(src, dst *Rule, filename string) error {
}
for key, srcAttr := range src.attrs {
srcValue := srcAttr.Y
srcValue := srcAttr.RHS
if dstAttr, ok := dst.attrs[key]; !ok {
dst.SetAttr(key, srcValue)
} else if !ShouldKeep(dstAttr) {
dstValue := dstAttr.Y
dstValue := dstAttr.RHS
if squashedValue, err := squashExprs(srcValue, dstValue); err != nil {
start, end := dstValue.Span()
return fmt.Errorf("%s:%d.%d-%d.%d: could not squash expression", filename, start.Line, start.LineRune, end.Line, end.LineRune)

View File

@ -45,6 +45,11 @@ type File struct {
// may modify this, but editing is not complete until Sync() is called.
File *bzl.File
// function is the underlying syntax tree of a bzl file function.
// This is used for editing the bzl file function specified by the
// update-repos -to_macro option.
function *function
// Pkg is the Bazel package this build file defines.
Pkg string
@ -67,7 +72,7 @@ type File struct {
// EmptyFile creates a File wrapped around an empty syntax tree.
func EmptyFile(path, pkg string) *File {
return &File{
File: &bzl.File{Path: path},
File: &bzl.File{Path: path, Type: bzl.TypeBuild},
Path: path,
Pkg: pkg,
}
@ -97,6 +102,30 @@ func LoadWorkspaceFile(path, pkg string) (*File, error) {
return LoadWorkspaceData(path, pkg, data)
}
// LoadMacroFile loads a bzl file from disk, parses it, then scans for the load
// statements and the rules called from the given Starlark function. If there is
// no matching function name, then a new function with that name will be created.
// The function's syntax tree will be returned within File and can be modified by
// Sync and Save calls.
func LoadMacroFile(path, pkg, defName string) (*File, error) {
data, err := ioutil.ReadFile(path)
if err != nil {
return nil, err
}
return LoadMacroData(path, pkg, defName, data)
}
// EmptyMacroFile creates a bzl file at the given path and within the file creates
// a Starlark function with the provided name. The function can then be modified
// by Sync and Save calls.
func EmptyMacroFile(path, pkg, defName string) (*File, error) {
_, err := os.Create(path)
if err != nil {
return nil, err
}
return LoadMacroData(path, pkg, defName, nil)
}
// LoadData parses a build file from a byte slice and scans it for rules and
// load statements. The syntax tree within the returned File will be modified
// by editing methods.
@ -118,29 +147,82 @@ func LoadWorkspaceData(path, pkg string, data []byte) (*File, error) {
return ScanAST(pkg, ast), nil
}
// LoadMacroData parses a bzl file from a byte slice and scans for the load
// statements and the rules called from the given Starlark function. If there is
// no matching function name, then a new function will be created, and added to the
// File the next time Sync is called. The function's syntax tree will be returned
// within File and can be modified by Sync and Save calls.
func LoadMacroData(path, pkg, defName string, data []byte) (*File, error) {
ast, err := bzl.ParseBzl(path, data)
if err != nil {
return nil, err
}
return ScanASTBody(pkg, defName, ast), nil
}
// ScanAST creates a File wrapped around the given syntax tree. This tree
// will be modified by editing methods.
func ScanAST(pkg string, bzlFile *bzl.File) *File {
return ScanASTBody(pkg, "", bzlFile)
}
type function struct {
stmt *bzl.DefStmt
inserted, hasPass bool
}
// ScanASTBody creates a File wrapped around the given syntax tree. It will also
// scan the AST for a function matching the given defName, and if the function
// does not exist it will create a new one and mark it to be added to the File
// the next time Sync is called.
func ScanASTBody(pkg, defName string, bzlFile *bzl.File) *File {
f := &File{
File: bzlFile,
Pkg: pkg,
Path: bzlFile.Path,
}
for i, stmt := range f.File.Stmt {
switch stmt := stmt.(type) {
case *bzl.LoadStmt:
l := loadFromExpr(i, stmt)
f.Loads = append(f.Loads, l)
case *bzl.CallExpr:
if r := ruleFromExpr(i, stmt); r != nil {
f.Rules = append(f.Rules, r)
var defStmt *bzl.DefStmt
f.Rules, f.Loads, defStmt = scanExprs(defName, bzlFile.Stmt)
if defStmt != nil {
f.Rules, _, _ = scanExprs("", defStmt.Body)
f.function = &function{
stmt: defStmt,
inserted: true,
}
if len(defStmt.Body) == 1 {
if v, ok := defStmt.Body[0].(*bzl.BranchStmt); ok && v.Token == "pass" {
f.function.hasPass = true
}
}
} else if defName != "" {
f.function = &function{
stmt: &bzl.DefStmt{Name: defName},
inserted: false,
}
}
f.Directives = ParseDirectives(bzlFile)
return f
}
func scanExprs(defName string, stmt []bzl.Expr) (rules []*Rule, loads []*Load, fn *bzl.DefStmt) {
for i, expr := range stmt {
switch expr := expr.(type) {
case *bzl.LoadStmt:
l := loadFromExpr(i, expr)
loads = append(loads, l)
case *bzl.CallExpr:
if r := ruleFromExpr(i, expr); r != nil {
rules = append(rules, r)
}
case *bzl.DefStmt:
if expr.Name == defName {
fn = expr
}
}
}
return rules, loads, fn
}
// MatchBuildFileName looks for a file in files that has a name from names.
// If there is at least one matching file, a path will be returned by joining
// dir and the first matching name. If there are no matching files, the
@ -156,56 +238,101 @@ func MatchBuildFileName(dir string, names []string, files []os.FileInfo) string
return ""
}
// SyncMacroFile syncs the file's syntax tree with another file's. This is
// useful for keeping multiple macro definitions from the same .bzl file in sync.
func (f *File) SyncMacroFile(from *File) {
fromFunc := *from.function.stmt
_, _, toFunc := scanExprs(from.function.stmt.Name, f.File.Stmt)
if toFunc != nil {
*toFunc = fromFunc
} else {
f.File.Stmt = append(f.File.Stmt, &fromFunc)
}
}
// MacroName returns the name of the macro function that this file is editing,
// or an empty string if a macro function is not being edited.
func (f *File) MacroName() string {
if f.function != nil && f.function.stmt != nil {
return f.function.stmt.Name
}
return ""
}
// Sync writes all changes back to the wrapped syntax tree. This should be
// called after editing operations, before reading the syntax tree again.
func (f *File) Sync() {
var inserts, deletes, stmts []*stmt
var loadInserts, loadDeletes, loadStmts []*stmt
var r, w int
for r, w = 0, 0; r < len(f.Loads); r++ {
s := f.Loads[r]
s.sync()
if s.deleted {
deletes = append(deletes, &s.stmt)
loadDeletes = append(loadDeletes, &s.stmt)
continue
}
if s.inserted {
inserts = append(inserts, &s.stmt)
loadInserts = append(loadInserts, &s.stmt)
s.inserted = false
} else {
stmts = append(stmts, &s.stmt)
loadStmts = append(loadStmts, &s.stmt)
}
f.Loads[w] = s
w++
}
f.Loads = f.Loads[:w]
var ruleInserts, ruleDeletes, ruleStmts []*stmt
for r, w = 0, 0; r < len(f.Rules); r++ {
s := f.Rules[r]
s.sync()
if s.deleted {
deletes = append(deletes, &s.stmt)
ruleDeletes = append(ruleDeletes, &s.stmt)
continue
}
if s.inserted {
inserts = append(inserts, &s.stmt)
ruleInserts = append(ruleInserts, &s.stmt)
s.inserted = false
} else {
stmts = append(stmts, &s.stmt)
ruleStmts = append(ruleStmts, &s.stmt)
}
f.Rules[w] = s
w++
}
f.Rules = f.Rules[:w]
if f.function == nil {
deletes := append(ruleDeletes, loadDeletes...)
inserts := append(ruleInserts, loadInserts...)
stmts := append(ruleStmts, loadStmts...)
updateStmt(&f.File.Stmt, inserts, deletes, stmts)
} else {
updateStmt(&f.File.Stmt, loadInserts, loadDeletes, loadStmts)
if f.function.hasPass && len(ruleInserts) > 0 {
f.function.stmt.Body = []bzl.Expr{}
f.function.hasPass = false
}
updateStmt(&f.function.stmt.Body, ruleInserts, ruleDeletes, ruleStmts)
if len(f.function.stmt.Body) == 0 {
f.function.stmt.Body = append(f.function.stmt.Body, &bzl.BranchStmt{Token: "pass"})
f.function.hasPass = true
}
if !f.function.inserted {
f.File.Stmt = append(f.File.Stmt, f.function.stmt)
f.function.inserted = true
}
}
}
func updateStmt(oldStmt *[]bzl.Expr, inserts, deletes, stmts []*stmt) {
sort.Stable(byIndex(deletes))
sort.Stable(byIndex(inserts))
sort.Stable(byIndex(stmts))
oldStmt := f.File.Stmt
f.File.Stmt = make([]bzl.Expr, 0, len(oldStmt)-len(deletes)+len(inserts))
newStmt := make([]bzl.Expr, 0, len(*oldStmt)-len(deletes)+len(inserts))
var ii, di, si int
for i, stmt := range oldStmt {
for i, stmt := range *oldStmt {
for ii < len(inserts) && inserts[ii].index == i {
inserts[ii].index = len(f.File.Stmt)
f.File.Stmt = append(f.File.Stmt, inserts[ii].expr)
inserts[ii].index = len(newStmt)
newStmt = append(newStmt, inserts[ii].expr)
ii++
}
if di < len(deletes) && deletes[di].index == i {
@ -213,16 +340,17 @@ func (f *File) Sync() {
continue
}
if si < len(stmts) && stmts[si].expr == stmt {
stmts[si].index = len(f.File.Stmt)
stmts[si].index = len(newStmt)
si++
}
f.File.Stmt = append(f.File.Stmt, stmt)
newStmt = append(newStmt, stmt)
}
for ii < len(inserts) {
inserts[ii].index = len(f.File.Stmt)
f.File.Stmt = append(f.File.Stmt, inserts[ii].expr)
inserts[ii].index = len(newStmt)
newStmt = append(newStmt, inserts[ii].expr)
ii++
}
*oldStmt = newStmt
}
// Format formats the build file in a form that can be written to disk.
@ -415,16 +543,16 @@ type Rule struct {
stmt
kind string
args []bzl.Expr
attrs map[string]*bzl.BinaryExpr
attrs map[string]*bzl.AssignExpr
private map[string]interface{}
}
// NewRule creates a new, empty rule with the given kind and name.
func NewRule(kind, name string) *Rule {
nameAttr := &bzl.BinaryExpr{
X: &bzl.Ident{Name: "name"},
Y: &bzl.StringExpr{Value: name},
Op: "=",
nameAttr := &bzl.AssignExpr{
LHS: &bzl.Ident{Name: "name"},
RHS: &bzl.StringExpr{Value: name},
Op: "=",
}
r := &Rule{
stmt: stmt{
@ -434,7 +562,7 @@ func NewRule(kind, name string) *Rule {
},
},
kind: kind,
attrs: map[string]*bzl.BinaryExpr{"name": nameAttr},
attrs: map[string]*bzl.AssignExpr{"name": nameAttr},
private: map[string]interface{}{},
}
return r
@ -451,11 +579,10 @@ func ruleFromExpr(index int, expr bzl.Expr) *Rule {
}
kind := x.Name
var args []bzl.Expr
attrs := make(map[string]*bzl.BinaryExpr)
attrs := make(map[string]*bzl.AssignExpr)
for _, arg := range call.List {
attr, ok := arg.(*bzl.BinaryExpr)
if ok && attr.Op == "=" {
key := attr.X.(*bzl.Ident) // required by parser
if attr, ok := arg.(*bzl.AssignExpr); ok {
key := attr.LHS.(*bzl.Ident) // required by parser
attrs[key.Name] = attr
} else {
args = append(args, arg)
@ -524,7 +651,7 @@ func (r *Rule) Attr(key string) bzl.Expr {
if !ok {
return nil
}
return attr.Y
return attr.RHS
}
// AttrString returns the value of the named attribute if it is a scalar string.
@ -534,7 +661,7 @@ func (r *Rule) AttrString(key string) string {
if !ok {
return ""
}
str, ok := attr.Y.(*bzl.StringExpr)
str, ok := attr.RHS.(*bzl.StringExpr)
if !ok {
return ""
}
@ -549,7 +676,7 @@ func (r *Rule) AttrStrings(key string) []string {
if !ok {
return nil
}
list, ok := attr.Y.(*bzl.ListExpr)
list, ok := attr.RHS.(*bzl.ListExpr)
if !ok {
return nil
}
@ -571,14 +698,14 @@ func (r *Rule) DelAttr(key string) {
// SetAttr adds or replaces the named attribute with an expression produced
// by ExprFromValue.
func (r *Rule) SetAttr(key string, value interface{}) {
y := ExprFromValue(value)
rhs := ExprFromValue(value)
if attr, ok := r.attrs[key]; ok {
attr.Y = y
attr.RHS = rhs
} else {
r.attrs[key] = &bzl.BinaryExpr{
X: &bzl.Ident{Name: key},
Y: y,
Op: "=",
r.attrs[key] = &bzl.AssignExpr{
LHS: &bzl.Ident{Name: key},
RHS: rhs,
Op: "=",
}
}
r.updated = true
@ -616,7 +743,13 @@ func (r *Rule) Args() []bzl.Expr {
func (r *Rule) Insert(f *File) {
// TODO(jayconrod): should rules always be inserted at the end? Should there
// be some sort order?
r.index = len(f.File.Stmt)
var stmt []bzl.Expr
if f.function == nil {
stmt = f.File.Stmt
} else {
stmt = f.function.stmt.Body
}
r.index = len(stmt)
r.inserted = true
f.Rules = append(f.Rules, r)
}
@ -644,12 +777,15 @@ func (r *Rule) sync() {
for _, k := range []string{"srcs", "deps"} {
if attr, ok := r.attrs[k]; ok {
bzl.Walk(attr.Y, sortExprLabels)
bzl.Walk(attr.RHS, sortExprLabels)
}
}
call := r.expr.(*bzl.CallExpr)
call.X.(*bzl.Ident).Name = r.kind
if len(r.attrs) > 1 {
call.ForceMultiLine = true
}
list := make([]bzl.Expr, 0, len(r.args)+len(r.attrs))
list = append(list, r.args...)
@ -657,7 +793,7 @@ func (r *Rule) sync() {
list = append(list, attr)
}
sortedAttrs := list[len(r.args):]
key := func(e bzl.Expr) string { return e.(*bzl.BinaryExpr).X.(*bzl.Ident).Name }
key := func(e bzl.Expr) string { return e.(*bzl.AssignExpr).LHS.(*bzl.Ident).Name }
sort.SliceStable(sortedAttrs, func(i, j int) bool {
ki := key(sortedAttrs[i])
kj := key(sortedAttrs[j])

View File

@ -23,7 +23,7 @@ type LoadInfo struct {
After []string
}
// KindInfo stores metadata for a kind or fule, for example, "go_library".
// KindInfo stores metadata for a kind of rule, for example, "go_library".
type KindInfo struct {
// MatchAny is true if a rule of this kind may be matched with any rule
// of the same kind, regardless of attributes, if exactly one rule is

View File

@ -24,6 +24,10 @@ import (
"github.com/bazelbuild/bazel-gazelle/rule"
)
// TODO(#472): store location information to validate each exclude. They
// may be set in one directory and used in another. Excludes work on
// declared generated files, so we can't just stat.
type walkConfig struct {
excludes []string
ignore bool
@ -37,6 +41,9 @@ func getWalkConfig(c *config.Config) *walkConfig {
}
func (wc *walkConfig) isExcluded(rel, base string) bool {
if base == ".git" {
return true
}
f := path.Join(rel, base)
for _, x := range wc.excludes {
if f == x {

View File

@ -136,6 +136,10 @@ func Walk(c *config.Config, cexts []config.Configurer, dirs []string, mode Mode,
c = configure(cexts, knownDirectives, c, rel, f)
wc := getWalkConfig(c)
if wc.isExcluded(rel, ".") {
return
}
var subdirs, regularFiles []string
for _, fi := range files {
base := fi.Name()

View File

@ -33,7 +33,6 @@ def go_yacc(src, out, visibility = None):
" $(location " + _GO_YACC_TOOL + ") " +
" -o $(location " + out + ") $(SRCS) > /dev/null"),
visibility = visibility,
local = 1,
)
def _extract_go_src(ctx):
@ -58,12 +57,46 @@ def genfile_check_test(src, gen):
native.genrule(
name = src + "_checksh",
outs = [src + "_check.sh"],
cmd = "echo 'diff $$@' > $@",
cmd = r"""cat >$@ <<'eof'
#!/bin/bash
# Script generated by @com_github_bazelbuild_buildtools//build:build_defs.bzl
# --- begin runfiles.bash initialization ---
# Copy-pasted from Bazel's Bash runfiles library (tools/bash/runfiles/runfiles.bash).
set -euo pipefail
if [[ ! -d "$${RUNFILES_DIR:-/dev/null}" && ! -f "$${RUNFILES_MANIFEST_FILE:-/dev/null}" ]]; then
if [[ -f "$$0.runfiles_manifest" ]]; then
export RUNFILES_MANIFEST_FILE="$$0.runfiles_manifest"
elif [[ -f "$$0.runfiles/MANIFEST" ]]; then
export RUNFILES_MANIFEST_FILE="$$0.runfiles/MANIFEST"
elif [[ -f "$$0.runfiles/bazel_tools/tools/bash/runfiles/runfiles.bash" ]]; then
export RUNFILES_DIR="$$0.runfiles"
fi
fi
if [[ -f "$${RUNFILES_DIR:-/dev/null}/bazel_tools/tools/bash/runfiles/runfiles.bash" ]]; then
source "$${RUNFILES_DIR}/bazel_tools/tools/bash/runfiles/runfiles.bash"
elif [[ -f "$${RUNFILES_MANIFEST_FILE:-/dev/null}" ]]; then
source "$$(grep -m1 "^bazel_tools/tools/bash/runfiles/runfiles.bash " \
"$$RUNFILES_MANIFEST_FILE" | cut -d ' ' -f 2-)"
else
echo >&2 "ERROR: cannot find @bazel_tools//tools/bash/runfiles:runfiles.bash"
exit 1
fi
# --- end runfiles.bash initialization ---
[[ "$$1" = external/* ]] && F1="$${1#external/}" || F1="$$TEST_WORKSPACE/$$1"
[[ "$$2" = external/* ]] && F2="$${2#external/}" || F2="$$TEST_WORKSPACE/$$2"
F1="$$(rlocation "$$F1")"
F2="$$(rlocation "$$F2")"
diff -q "$$F1" "$$F2"
eof
""",
)
native.sh_test(
name = src + "_checkshtest",
size = "small",
srcs = [src + "_check.sh"],
deps = ["@bazel_tools//tools/bash/runfiles"],
data = [src, gen],
args = ["$(location " + src + ")", "$(location " + gen + ")"],
)

View File

@ -33,22 +33,26 @@ import (
type FileType int
const (
// TypeDefault represents .bzl or other Starlark files
// TypeDefault represents general Starlark files
TypeDefault FileType = 1 << iota
// TypeBuild represents BUILD files
TypeBuild
// TypeWorkspace represents WORKSPACE files
TypeWorkspace
// TypeBzl represents .bzl files
TypeBzl
)
func (t FileType) String() string {
switch t {
case TypeDefault:
return ".bzl"
return "default"
case TypeBuild:
return "BUILD"
case TypeWorkspace:
return "WORKSPACE"
case TypeBzl:
return ".bzl"
}
return "unknown"
}
@ -77,7 +81,19 @@ func ParseWorkspace(filename string, data []byte) (*File, error) {
return f, err
}
// ParseDefault parses a file, marks it as not a BUILD file (e.g. bzl file) and returns the corresponding parse tree.
// ParseBzl parses a file, marks it as a .bzl file and returns the corresponding parse tree.
//
// The filename is used only for generating error messages.
func ParseBzl(filename string, data []byte) (*File, error) {
in := newInput(filename, data)
f, err := in.parse()
if f != nil {
f.Type = TypeBzl
}
return f, err
}
// ParseDefault parses a file, marks it as a generic Starlark file and returns the corresponding parse tree.
//
// The filename is used only for generating error messages.
func ParseDefault(filename string, data []byte) (*File, error) {
@ -91,11 +107,17 @@ func ParseDefault(filename string, data []byte) (*File, error) {
func getFileType(filename string) FileType {
if filename == "" { // stdin
return TypeBuild // For compatibility
return TypeDefault
}
basename := strings.ToLower(filepath.Base(filename))
if strings.HasSuffix(basename, ".oss") {
basename = basename[:len(basename)-4]
}
ext := filepath.Ext(basename)
if ext == ".bzl" || ext == ".sky" {
switch ext {
case ".bzl":
return TypeBzl
case ".sky":
return TypeDefault
}
base := basename[:len(basename)-len(ext)]
@ -118,6 +140,8 @@ func Parse(filename string, data []byte) (*File, error) {
return ParseBuild(filename, data)
case TypeWorkspace:
return ParseWorkspace(filename, data)
case TypeBzl:
return ParseBzl(filename, data)
}
return ParseDefault(filename, data)
}
@ -444,17 +468,35 @@ func (in *input) Lex(val *yySymType) int {
in.readRune()
return c
case '<', '>', '=', '!', '+', '-', '*', '/', '%', '|': // possibly followed by =
case '<', '>', '=', '!', '+', '-', '*', '/', '%', '|', '&', '~', '^': // possibly followed by =
in.readRune()
if c == '~' {
// unary bitwise not, shouldn't be followed by anything
return c
}
if c == '*' && in.peekRune() == '*' {
// double asterisk
in.readRune()
return _STAR_STAR
}
if c == '/' && in.peekRune() == '/' {
// integer division
in.readRune()
if c == in.peekRune() {
switch c {
case '/':
// integer division
in.readRune()
c = _INT_DIV
case '<':
// left shift
in.readRune()
c = _BIT_LSH
case '>':
// right shift
in.readRune()
c = _BIT_RSH
}
}
if in.peekRune() == '=' {
@ -531,7 +573,7 @@ func (in *input) Lex(val *yySymType) int {
}
}
in.endToken(val)
s, triple, err := unquote(val.tok)
s, triple, err := Unquote(val.tok)
if err != nil {
in.Error(fmt.Sprint(err))
}
@ -568,12 +610,10 @@ func (in *input) Lex(val *yySymType) int {
case "continue":
return _CONTINUE
}
for _, c := range val.tok {
if c > '9' || c < '0' {
return _IDENT
}
if len(val.tok) > 0 && val.tok[0] >= '0' && val.tok[0] <= '9' {
return _NUMBER
}
return _NUMBER
return _IDENT
}
// isIdent reports whether c is an identifier rune.
@ -695,6 +735,9 @@ func (in *input) order(v Expr) {
case *BinaryExpr:
in.order(v.X)
in.order(v.Y)
case *AssignExpr:
in.order(v.LHS)
in.order(v.RHS)
case *ConditionalExpr:
in.order(v.Then)
in.order(v.Test)

View File

@ -64,6 +64,9 @@ package build
%token <pos> '{'
%token <pos> '}'
%token <pos> '|'
%token <pos> '&'
%token <pos> '^'
%token <pos> '~'
// By convention, yacc token names are all caps.
// However, we do not want to export them from the Go package
@ -88,6 +91,9 @@ package build
%token <pos> _LE // operator <=
%token <pos> _NE // operator !=
%token <pos> _STAR_STAR // operator **
%token <pos> _INT_DIV // operator //
%token <pos> _BIT_LSH // bitwise operator <<
%token <pos> _BIT_RSH // bitwise operator >>
%token <pos> _NOT // keyword not
%token <pos> _OR // keyword or
%token <pos> _STRING // quoted string
@ -166,9 +172,12 @@ package build
%left _OR
%left _AND
%left '<' '>' _EQ _NE _LE _GE _NOT _IN
%left '+' '-'
%left '*' '/' '%'
%left '|'
%left '^'
%left '&'
%left _BIT_LSH _BIT_RSH
%left '+' '-'
%left '*' '/' '%' _INT_DIV
%left '.' '[' '('
%right _UNARY
%left _STRING
@ -324,6 +333,7 @@ block_stmt:
Body: $7,
},
Name: $<tok>2,
ColonPos: $6,
ForceCompact: forceCompact($3, $4, $5),
ForceMultiLine: forceMultiLine($3, $4, $5),
}
@ -708,6 +718,10 @@ parameter:
{
$$ = unary($1, $<tok>1, $2)
}
| '*'
{
$$ = unary($1, $<tok>1, nil)
}
| _STAR_STAR ident
{
$$ = unary($1, $<tok>1, $2)
@ -769,9 +783,12 @@ test:
}
| _NOT test %prec _UNARY { $$ = unary($1, $<tok>1, $2) }
| '-' test %prec _UNARY { $$ = unary($1, $<tok>1, $2) }
| '+' test %prec _UNARY { $$ = unary($1, $<tok>1, $2) }
| '~' test %prec _UNARY { $$ = unary($1, $<tok>1, $2) }
| test '*' test { $$ = binary($1, $2, $<tok>2, $3) }
| test '%' test { $$ = binary($1, $2, $<tok>2, $3) }
| test '/' test { $$ = binary($1, $2, $<tok>2, $3) }
| test _INT_DIV test { $$ = binary($1, $2, $<tok>2, $3) }
| test '+' test { $$ = binary($1, $2, $<tok>2, $3) }
| test '-' test { $$ = binary($1, $2, $<tok>2, $3) }
| test '<' test { $$ = binary($1, $2, $<tok>2, $3) }
@ -785,6 +802,10 @@ test:
| test _OR test { $$ = binary($1, $2, $<tok>2, $3) }
| test _AND test { $$ = binary($1, $2, $<tok>2, $3) }
| test '|' test { $$ = binary($1, $2, $<tok>2, $3) }
| test '&' test { $$ = binary($1, $2, $<tok>2, $3) }
| test '^' test { $$ = binary($1, $2, $<tok>2, $3) }
| test _BIT_LSH test { $$ = binary($1, $2, $<tok>2, $3) }
| test _BIT_RSH test { $$ = binary($1, $2, $<tok>2, $3) }
| test _IS test
{
if b, ok := $3.(*UnaryExpr); ok && b.Op == "not" {
@ -972,12 +993,24 @@ func unary(pos Position, op string, x Expr) Expr {
func binary(x Expr, pos Position, op string, y Expr) Expr {
_, xend := x.Span()
ystart, _ := y.Span()
switch op {
case "=", "+=", "-=", "*=", "/=", "//=", "%=", "|=":
return &AssignExpr{
LHS: x,
OpPos: pos,
Op: op,
LineBreak: xend.Line < ystart.Line,
RHS: y,
}
}
return &BinaryExpr{
X: x,
OpStart: pos,
Op: op,
X: x,
OpStart: pos,
Op: op,
LineBreak: xend.Line < ystart.Line,
Y: y,
Y: y,
}
}

File diff suppressed because it is too large Load Diff

View File

@ -176,7 +176,17 @@ func (p *printer) nestedStatements(stmts []Expr) {
p.level--
}
func (p *printer) statements(stmts []Expr) {
func (p *printer) statements(rawStmts []Expr) {
// rawStmts may contain nils if a refactoring tool replaces an actual statement with nil.
// It means the statements don't exist anymore, just ignore them.
stmts := []Expr{}
for _, stmt := range rawStmts {
if stmt != nil {
stmts = append(stmts, stmt)
}
}
for i, stmt := range stmts {
switch stmt := stmt.(type) {
case *CommentBlock:
@ -226,7 +236,7 @@ func (p *printer) compactStmt(s1, s2 Expr) bool {
} else if isCommentBlock(s1) || isCommentBlock(s2) {
// Standalone comment blocks shouldn't be attached to other statements
return false
} else if p.fileType != TypeDefault && p.level == 0 {
} else if (p.fileType == TypeBuild || p.fileType == TypeWorkspace) && p.level == 0 {
// Top-level statements in a BUILD or WORKSPACE file
return false
} else if isFunctionDefinition(s1) || isFunctionDefinition(s2) {
@ -299,6 +309,10 @@ const (
precOr
precAnd
precCmp
precBitwiseOr
precBitwiseXor
precBitwiseAnd
precBitwiseShift
precAdd
precMultiply
precUnary
@ -307,13 +321,6 @@ const (
// opPrec gives the precedence for operators found in a BinaryExpr.
var opPrec = map[string]int{
"=": precAssign,
"+=": precAssign,
"-=": precAssign,
"*=": precAssign,
"/=": precAssign,
"//=": precAssign,
"%=": precAssign,
"or": precOr,
"and": precAnd,
"in": precCmp,
@ -330,7 +337,11 @@ var opPrec = map[string]int{
"/": precMultiply,
"//": precMultiply,
"%": precMultiply,
"|": precMultiply,
"|": precBitwiseOr,
"&": precBitwiseAnd,
"^": precBitwiseXor,
"<<": precBitwiseShift,
">>": precBitwiseShift,
}
// expr prints the expression v to the print buffer.
@ -401,7 +412,7 @@ func (p *printer) expr(v Expr, outerPrec int) {
// If the Token is a correct quoting of Value and has double quotes, use it,
// also use it if it has single quotes and the value itself contains a double quote symbol.
// This preserves the specific escaping choices that BUILD authors have made.
s, triple, err := unquote(v.Token)
s, triple, err := Unquote(v.Token)
if s == v.Value && triple == v.TripleQuote && err == nil {
if strings.HasPrefix(v.Token, `"`) || strings.ContainsRune(v.Value, '"') {
p.printf("%s", v.Token)
@ -463,7 +474,11 @@ func (p *printer) expr(v Expr, outerPrec int) {
} else {
p.printf("%s", v.Op)
}
p.expr(v.X, precUnary)
// Use the next precedence level (precSuffix), so that nested unary expressions are parenthesized,
// for example: `not (-(+(~foo)))` instead of `not -+~foo`
if v.X != nil {
p.expr(v.X, precSuffix)
}
case *LambdaExpr:
addParen(precColon)
@ -498,9 +513,6 @@ func (p *printer) expr(v Expr, outerPrec int) {
m := p.margin
if v.LineBreak {
p.margin = p.indent()
if v.Op == "=" {
p.margin += listIndentation
}
}
p.expr(v.X, prec)
@ -513,6 +525,23 @@ func (p *printer) expr(v Expr, outerPrec int) {
p.expr(v.Y, prec+1)
p.margin = m
case *AssignExpr:
addParen(precAssign)
m := p.margin
if v.LineBreak {
p.margin = p.indent() + listIndentation
}
p.expr(v.LHS, precAssign)
p.printf(" %s", v.Op)
if v.LineBreak {
p.breakline()
} else {
p.printf(" ")
}
p.expr(v.RHS, precAssign+1)
p.margin = m
case *ParenExpr:
p.seq("()", &v.Start, &[]Expr{v.X}, &v.End, modeParen, false, v.ForceMultiLine)
@ -536,10 +565,10 @@ func (p *printer) expr(v Expr, outerPrec int) {
arg = from.asString()
arg.Comment().Before = to.Comment().Before
} else {
arg = &BinaryExpr{
X: to,
Op: "=",
Y: from.asString(),
arg = &AssignExpr{
LHS: to,
Op: "=",
RHS: from.asString(),
}
}
args = append(args, arg)
@ -622,13 +651,18 @@ func (p *printer) expr(v Expr, outerPrec int) {
// If the else-block contains just one statement which is an IfStmt, flatten it as a part
// of if-elif chain.
// Don't do it if the "else" statement has a suffix comment.
if len(block.ElsePos.Comment().Suffix) == 0 && len(block.False) == 1 {
next, ok := block.False[0].(*IfStmt)
if ok {
block = next
continue
}
// Don't do it if the "else" statement has a suffix comment or if the next "if" statement
// has a before-comment.
if len(block.False) != 1 {
break
}
next, ok := block.False[0].(*IfStmt)
if !ok {
break
}
if len(block.ElsePos.Comment().Suffix) == 0 && len(next.Comment().Before) == 0 {
block = next
continue
}
break
}
@ -685,7 +719,7 @@ func (p *printer) useCompactMode(start *Position, list *[]Expr, end *End, mode s
// If there are line comments, use multiline
// so we can print the comments before the closing bracket.
for _, x := range *list {
if len(x.Comment().Before) > 0 {
if len(x.Comment().Before) > 0 || (len(x.Comment().Suffix) > 0 && mode != modeDef) {
return false
}
}
@ -698,17 +732,20 @@ func (p *printer) useCompactMode(start *Position, list *[]Expr, end *End, mode s
return true
}
// In the Default printing mode try to keep the original printing style.
// In the Default and .bzl printing modes try to keep the original printing style.
// Non-top-level statements and lists of arguments of a function definition
// should also keep the original style regardless of the mode.
if (p.level != 0 || p.fileType == TypeDefault || mode == modeDef) && mode != modeLoad {
if (p.level != 0 || p.fileType == TypeDefault || p.fileType == TypeBzl || mode == modeDef) && mode != modeLoad {
// If every element (including the brackets) ends on the same line where the next element starts,
// use the compact mode, otherwise use multiline mode.
// If an node's line number is 0, it means it doesn't appear in the original file,
// its position shouldn't be taken into account.
// its position shouldn't be taken into account. Unless a sequence is new,
// then use multiline mode if ForceMultiLine mode was set.
previousEnd := start
isNewSeq := start.Line == 0
for _, x := range *list {
start, end := x.Span()
isNewSeq = isNewSeq && start.Line == 0
if isDifferentLines(&start, previousEnd) {
return false
}
@ -716,10 +753,17 @@ func (p *printer) useCompactMode(start *Position, list *[]Expr, end *End, mode s
previousEnd = &end
}
}
if end != nil && isDifferentLines(previousEnd, &end.Pos) {
return false
if end != nil {
isNewSeq = isNewSeq && end.Pos.Line == 0
if isDifferentLines(previousEnd, &end.Pos) {
return false
}
}
return true
if !isNewSeq {
return true
}
// Use the forceMultiline value for new sequences.
return !forceMultiLine
}
// In Build mode, use the forceMultiline and forceCompact values
if forceMultiLine {

View File

@ -59,10 +59,10 @@ var esc = [256]byte{
// being used as shell arguments containing regular expressions.
const notEsc = " !#$%&()*+,-./:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ{|}~"
// unquote unquotes the quoted string, returning the actual
// Unquote unquotes the quoted string, returning the actual
// string value, whether the original was triple-quoted, and
// an error describing invalid input.
func unquote(quoted string) (s string, triple bool, err error) {
func Unquote(quoted string) (s string, triple bool, err error) {
// Check for raw prefix: means don't interpret the inner \.
raw := false
if strings.HasPrefix(quoted, "r") {

View File

@ -18,13 +18,12 @@ distributed under the License is distributed on an "AS IS" BASIS,
package build
import (
"github.com/bazelbuild/buildtools/tables"
"path"
"path/filepath"
"regexp"
"sort"
"strings"
"github.com/bazelbuild/buildtools/tables"
)
// For debugging: flag to disable certain rewrites.
@ -80,45 +79,29 @@ type RewriteInfo struct {
SortLoad int // number of load argument lists sorted
FormatDocstrings int // number of reindented docstrings
ReorderArguments int // number of reordered function call arguments
EditOctal int // number of edited octals
Log []string // log entries - may change
}
func (info *RewriteInfo) String() string {
s := ""
if info.EditLabel > 0 {
s += " label"
// Stats returns a map with statistics about applied rewrites
func (info *RewriteInfo) Stats() map[string]int {
return map[string]int{
"label": info.EditLabel,
"callname": info.NameCall,
"callsort": info.SortCall,
"listsort": info.SortStringList,
"unsafesort": info.UnsafeSort,
"sortload": info.SortLoad,
"formatdocstrings": info.FormatDocstrings,
"reorderarguments": info.ReorderArguments,
"editoctal": info.EditOctal,
}
if info.NameCall > 0 {
s += " callname"
}
if info.SortCall > 0 {
s += " callsort"
}
if info.SortStringList > 0 {
s += " listsort"
}
if info.UnsafeSort > 0 {
s += " unsafesort"
}
if info.SortLoad > 0 {
s += " sortload"
}
if info.FormatDocstrings > 0 {
s += " formatdocstrings"
}
if info.ReorderArguments > 0 {
s += " reorderarguments"
}
if s != "" {
s = s[1:]
}
return s
}
// Each rewrite function can be either applied for BUILD files, other files (such as .bzl),
// or all files.
const (
scopeDefault = TypeDefault
scopeDefault = TypeDefault | TypeBzl // .bzl and generic Starlark files
scopeBuild = TypeBuild | TypeWorkspace // BUILD and WORKSPACE files
scopeBoth = scopeDefault | scopeBuild
)
@ -135,9 +118,10 @@ var rewrites = []struct {
{"label", fixLabels, scopeBuild},
{"listsort", sortStringLists, scopeBoth},
{"multiplus", fixMultilinePlus, scopeBuild},
{"loadsort", sortLoadArgs, scopeBoth},
{"loadsort", sortAllLoadArgs, scopeBoth},
{"formatdocstrings", formatDocstrings, scopeBoth},
{"reorderarguments", reorderArguments, scopeBoth},
{"editoctal", editOctals, scopeBoth},
}
// DisableLoadSortForBuildFiles disables the loadsort transformation for BUILD files.
@ -299,18 +283,18 @@ func fixLabels(f *File, info *RewriteInfo) {
if leaveAlone1(v.List[i]) {
continue
}
as, ok := v.List[i].(*BinaryExpr)
if !ok || as.Op != "=" {
as, ok := v.List[i].(*AssignExpr)
if !ok {
continue
}
key, ok := as.X.(*Ident)
key, ok := as.LHS.(*Ident)
if !ok || !tables.IsLabelArg[key.Name] || tables.LabelBlacklist[callName(v)+"."+key.Name] {
continue
}
if leaveAlone1(as.Y) {
if leaveAlone1(as.RHS) {
continue
}
if list, ok := as.Y.(*ListExpr); ok {
if list, ok := as.RHS.(*ListExpr); ok {
for i := range list.List {
if leaveAlone1(list.List[i]) {
continue
@ -319,7 +303,7 @@ func fixLabels(f *File, info *RewriteInfo) {
shortenLabel(list.List[i])
}
}
if set, ok := as.Y.(*SetExpr); ok {
if set, ok := as.RHS.(*SetExpr); ok {
for i := range set.List {
if leaveAlone1(set.List[i]) {
continue
@ -328,8 +312,8 @@ func fixLabels(f *File, info *RewriteInfo) {
shortenLabel(set.List[i])
}
} else {
joinLabel(&as.Y)
shortenLabel(as.Y)
joinLabel(&as.RHS)
shortenLabel(as.RHS)
}
}
}
@ -412,8 +396,8 @@ func ruleNamePriority(rule, arg string) int {
// If x is of the form key=value, argName returns the string key.
// Otherwise argName returns "".
func argName(x Expr) string {
if as, ok := x.(*BinaryExpr); ok && as.Op == "=" {
if id, ok := as.X.(*Ident); ok {
if as, ok := x.(*AssignExpr); ok {
if id, ok := as.LHS.(*Ident); ok {
return id.Name
}
}
@ -460,31 +444,31 @@ func sortStringLists(f *File, info *RewriteInfo) {
if leaveAlone1(arg) {
continue
}
as, ok := arg.(*BinaryExpr)
if !ok || as.Op != "=" || leaveAlone1(as) || doNotSort(as) {
as, ok := arg.(*AssignExpr)
if !ok || leaveAlone1(as) || doNotSort(as) {
continue
}
key, ok := as.X.(*Ident)
key, ok := as.LHS.(*Ident)
if !ok {
continue
}
context := rule + "." + key.Name
if !tables.IsSortableListArg[key.Name] || tables.SortableBlacklist[context] || f.Type == TypeDefault {
if !tables.IsSortableListArg[key.Name] || tables.SortableBlacklist[context] || f.Type == TypeDefault || f.Type == TypeBzl {
continue
}
if disabled("unsafesort") && !tables.SortableWhitelist[context] && !allowedSort(context) {
continue
}
sortStringList(as.Y, info, context)
sortStringList(as.RHS, info, context)
}
case *BinaryExpr:
case *AssignExpr:
if disabled("unsafesort") {
return
}
// "keep sorted" comment on x = list forces sorting of list.
as := v
if as.Op == "=" && keepSorted(as) {
sortStringList(as.Y, info, "?")
if keepSorted(as) {
sortStringList(as.RHS, info, "?")
}
case *KeyValueExpr:
if disabled("unsafesort") {
@ -838,16 +822,13 @@ func fixMultilinePlus(f *File, info *RewriteInfo) {
})
}
func sortLoadArgs(f *File, info *RewriteInfo) {
// sortAllLoadArgs sorts all load arguments in the file
func sortAllLoadArgs(f *File, info *RewriteInfo) {
Walk(f, func(v Expr, stk []Expr) {
load, ok := v.(*LoadStmt)
if !ok {
return
}
args := loadArgs{From: load.From, To: load.To}
sort.Sort(args)
if args.modified {
info.SortLoad++
if load, ok := v.(*LoadStmt); ok {
if SortLoadArgs(load) {
info.SortLoad++
}
}
})
}
@ -906,6 +887,13 @@ func (args loadArgs) Less(i, j int) bool {
return args.To[i].Name < args.To[j].Name
}
// SortLoadArgs sorts a load statement arguments (lexicographically, but positional first)
func SortLoadArgs(load *LoadStmt) bool {
args := loadArgs{From: load.From, To: load.To}
sort.Sort(args)
return args.modified
}
// formatDocstrings fixes the indentation and trailing whitespace of docstrings
func formatDocstrings(f *File, info *RewriteInfo) {
Walk(f, func(v Expr, stk []Expr) {
@ -927,7 +915,7 @@ func formatDocstrings(f *File, info *RewriteInfo) {
if updatedToken != docstring.Token {
docstring.Token = updatedToken
// Update the value to keep it consistent with Token
docstring.Value, _, _ = unquote(updatedToken)
docstring.Value, _, _ = Unquote(updatedToken)
info.FormatDocstrings++
}
})
@ -974,10 +962,8 @@ func argumentType(expr Expr) int {
case "*":
return 3
}
case *BinaryExpr:
if expr.Op == "=" {
return 2
}
case *AssignExpr:
return 2
}
return 1
}
@ -999,3 +985,18 @@ func reorderArguments(f *File, info *RewriteInfo) {
}
})
}
// editOctals inserts 'o' into octal numbers to make it more obvious they are octal
// 0123 -> 0o123
func editOctals(f *File, info *RewriteInfo) {
Walk(f, func(expr Expr, stack []Expr) {
l, ok := expr.(*LiteralExpr)
if !ok {
return
}
if len(l.Token) > 1 && l.Token[0] == '0' && l.Token[1] >= '0' && l.Token[1] <= '9' {
l.Token = "0o" + l.Token[1:]
info.EditOctal++
}
})
}

View File

@ -184,10 +184,15 @@ func (r *Rule) SetKind(kind string) {
r.Call.X = expr
}
// ExplicitName returns the rule's target name if it's explicitly provided as a string value, "" otherwise.
func (r *Rule) ExplicitName() string {
return r.AttrString("name")
}
// Name returns the rule's target name.
// If the rule has no explicit target name, Name returns the implicit name if there is one, else the empty string.
func (r *Rule) Name() string {
explicitName := r.AttrString("name")
explicitName := r.ExplicitName()
if explicitName == "" && r.Kind() != "package" {
return r.ImplicitName
}
@ -198,8 +203,8 @@ func (r *Rule) Name() string {
func (r *Rule) AttrKeys() []string {
var keys []string
for _, expr := range r.Call.List {
if binExpr, ok := expr.(*BinaryExpr); ok && binExpr.Op == "=" {
if keyExpr, ok := binExpr.X.(*Ident); ok {
if as, ok := expr.(*AssignExpr); ok {
if keyExpr, ok := as.LHS.(*Ident); ok {
keys = append(keys, keyExpr.Name)
}
}
@ -207,16 +212,15 @@ func (r *Rule) AttrKeys() []string {
return keys
}
// AttrDefn returns the BinaryExpr defining the rule's attribute with the given key.
// That is, the result is a *BinaryExpr with Op == "=".
// AttrDefn returns the AssignExpr defining the rule's attribute with the given key.
// If the rule has no such attribute, AttrDefn returns nil.
func (r *Rule) AttrDefn(key string) *BinaryExpr {
func (r *Rule) AttrDefn(key string) *AssignExpr {
for _, kv := range r.Call.List {
as, ok := kv.(*BinaryExpr)
if !ok || as.Op != "=" {
as, ok := kv.(*AssignExpr)
if !ok {
continue
}
k, ok := as.X.(*Ident)
k, ok := as.LHS.(*Ident)
if !ok || k.Name != key {
continue
}
@ -233,7 +237,7 @@ func (r *Rule) Attr(key string) Expr {
if as == nil {
return nil
}
return as.Y
return as.RHS
}
// DelAttr deletes the rule's attribute with the named key.
@ -241,17 +245,17 @@ func (r *Rule) Attr(key string) Expr {
func (r *Rule) DelAttr(key string) Expr {
list := r.Call.List
for i, kv := range list {
as, ok := kv.(*BinaryExpr)
if !ok || as.Op != "=" {
as, ok := kv.(*AssignExpr)
if !ok {
continue
}
k, ok := as.X.(*Ident)
k, ok := as.LHS.(*Ident)
if !ok || k.Name != key {
continue
}
copy(list[i:], list[i+1:])
r.Call.List = list[:len(list)-1]
return as.Y
return as.RHS
}
return nil
}
@ -262,15 +266,15 @@ func (r *Rule) DelAttr(key string) Expr {
func (r *Rule) SetAttr(key string, val Expr) {
as := r.AttrDefn(key)
if as != nil {
as.Y = val
as.RHS = val
return
}
r.Call.List = append(r.Call.List,
&BinaryExpr{
X: &Ident{Name: key},
Op: "=",
Y: val,
&AssignExpr{
LHS: &Ident{Name: key},
Op: "=",
RHS: val,
},
)
}

View File

@ -79,6 +79,17 @@ func (c *Comments) Comment() *Comments {
return c
}
// stmtsEnd returns the end position of the last non-nil statement
func stmtsEnd(stmts []Expr) Position {
for i := len(stmts) - 1; i >= 0; i-- {
if stmts[i] != nil {
_, end := stmts[i].Span()
return end
}
}
return Position{}
}
// A File represents an entire BUILD file.
type File struct {
Path string // file path, relative to workspace directory
@ -97,10 +108,11 @@ func (f *File) DisplayPath() string {
func (f *File) Span() (start, end Position) {
if len(f.Stmt) == 0 {
return
p := Position{Line: 1, LineRune: 1}
return p, p
}
start, _ = f.Stmt[0].Span()
_, end = f.Stmt[len(f.Stmt)-1].Span()
start = Position{}
end = stmtsEnd(f.Stmt)
return start, end
}
@ -126,7 +138,7 @@ func (x *Ident) Span() (start, end Position) {
return x.NamePos, x.NamePos.add(x.Name)
}
// BranchStmt represents a `pass` statement.
// BranchStmt represents a `pass`, `break`, or `continue` statement.
type BranchStmt struct {
Comments
Token string // pass, break, continue
@ -341,6 +353,9 @@ type UnaryExpr struct {
}
func (x *UnaryExpr) Span() (start, end Position) {
if x.X == nil {
return x.OpStart, x.OpStart
}
_, end = x.X.Span()
return x.OpStart, end
}
@ -361,6 +376,22 @@ func (x *BinaryExpr) Span() (start, end Position) {
return start, end
}
// An AssignExpr represents a binary expression with `=`: LHS = RHS.
type AssignExpr struct {
Comments
LHS Expr
OpPos Position
Op string
LineBreak bool // insert line break between Op and RHS
RHS Expr
}
func (x *AssignExpr) Span() (start, end Position) {
start, _ = x.LHS.Span()
_, end = x.RHS.Span()
return start, end
}
// A ParenExpr represents a parenthesized expression: (X).
type ParenExpr struct {
Comments
@ -474,14 +505,20 @@ type DefStmt struct {
Comments
Function
Name string
ForceCompact bool // force compact (non-multiline) form when printing the arguments
ForceMultiLine bool // force multiline form when printing the arguments
ColonPos Position // position of the ":"
ForceCompact bool // force compact (non-multiline) form when printing the arguments
ForceMultiLine bool // force multiline form when printing the arguments
}
func (x *DefStmt) Span() (start, end Position) {
return x.Function.Span()
}
// HeaderSpan returns the span of the function header `def f(...):`
func (x *DefStmt) HeaderSpan() (start, end Position) {
return x.Function.StartPos, x.ColonPos
}
// A ReturnStmt represents a return statement: return f(x).
type ReturnStmt struct {
Comments
@ -508,7 +545,7 @@ type ForStmt struct {
}
func (x *ForStmt) Span() (start, end Position) {
_, end = x.Body[len(x.Body)-1].Span()
end = stmtsEnd(x.Body)
return x.For, end
}
@ -528,6 +565,6 @@ func (x *IfStmt) Span() (start, end Position) {
if body == nil {
body = x.True
}
_, end = body[len(body)-1].Span()
end = stmtsEnd(body)
return x.If, end
}

View File

@ -24,7 +24,16 @@ package build
//
func Walk(v Expr, f func(x Expr, stk []Expr)) {
var stack []Expr
walk1(&v, &stack, func(x Expr, stk []Expr) Expr {
walk1(&v, &stack, func(x *Expr, stk []Expr) Expr {
f(*x, stk)
return nil
})
}
// WalkPointers is the same as Walk but calls the callback function with pointers to nodes.
func WalkPointers(v Expr, f func(x *Expr, stk []Expr)) {
var stack []Expr
walk1(&v, &stack, func(x *Expr, stk []Expr) Expr {
f(x, stk)
return nil
})
@ -39,7 +48,9 @@ func Walk(v Expr, f func(x Expr, stk []Expr)) {
//
func Edit(v Expr, f func(x Expr, stk []Expr) Expr) Expr {
var stack []Expr
return walk1(&v, &stack, f)
return walk1(&v, &stack, func(x *Expr, stk []Expr) Expr {
return f(*x, stk)
})
}
// EditChildren is similar to Edit but doesn't visit the initial node, instead goes
@ -47,17 +58,19 @@ func Edit(v Expr, f func(x Expr, stk []Expr) Expr) Expr {
func EditChildren(v Expr, f func(x Expr, stk []Expr) Expr) {
stack := []Expr{v}
WalkOnce(v, func(x *Expr) {
walk1(x, &stack, f)
walk1(x, &stack, func(x *Expr, stk []Expr) Expr {
return f(*x, stk)
})
})
}
// walk1 is a helper function for Walk, WalkWithPostfix, and Edit.
func walk1(v *Expr, stack *[]Expr, f func(x Expr, stk []Expr) Expr) Expr {
func walk1(v *Expr, stack *[]Expr, f func(x *Expr, stk []Expr) Expr) Expr {
if v == nil {
return nil
}
if res := f(*v, *stack); res != nil {
if res := f(v, *stack); res != nil {
*v = res
}
*stack = append(*stack, *v)
@ -103,6 +116,9 @@ func WalkOnce(v Expr, f func(x *Expr)) {
case *BinaryExpr:
f(&v.X)
f(&v.Y)
case *AssignExpr:
f(&v.LHS)
f(&v.RHS)
case *LambdaExpr:
for i := range v.Params {
f(&v.Params[i])

View File

@ -208,6 +208,71 @@ var StripLabelLeadingSlashes = false
var ShortenAbsoluteLabelsToRelative = false
// AndroidNativeRules lists all Android rules that are being migrated from Native to Starlark.
var AndroidNativeRules = []string{
"aar_import",
"android_binary",
"android_device",
"android_instrumentation_test",
"android_library",
"android_local_test",
"android_ndk_respository",
"android_sdk_repository",
}
// AndroidLoadPath is the load path for the Starlark Android Rules.
var AndroidLoadPath = "@rules_android//android:rules.bzl"
// CcNativeRules lists all C++ rules that are being migrated from Native to Starlark.
var CcNativeRules = []string{
"cc_binary",
"cc_test",
"cc_library",
"cc_import",
"cc_proto_library",
"fdo_prefetch_hints",
"fdo_profile",
"cc_toolchain",
"cc_toolchain_suite",
"objc_library",
"objc_import",
}
// CcLoadPath is the load path for the Starlark C++ Rules.
var CcLoadPath = "@rules_cc//cc:defs.bzl"
// JavaNativeRules lists all Java rules that are being migrated from Native to Starlark.
var JavaNativeRules = []string{
"java_binary",
"java_import",
"java_library",
"java_lite_proto_library",
"java_proto_library",
"java_test",
"java_package_configuration",
"java_plugin",
"java_runtime",
"java_toolchain",
}
// JavaLoadPath is the load path for the Starlark Java Rules.
var JavaLoadPath = "@rules_java//java:defs.bzl"
// ProtoNativeRules lists all Proto rules that are being migrated from Native to Starlark.
var ProtoNativeRules = []string{
"proto_lang_toolchain",
"proto_library",
}
// ProtoNativeSymbols lists all Proto symbols that are being migrated from Native to Starlark.
var ProtoNativeSymbols = []string{
"ProtoInfo",
"proto_common",
}
// ProtoLoadPath is the load path for the Starlark Proto Rules.
var ProtoLoadPath = "@rules_proto//proto:defs.bzl"
// OverrideTables allows a user of the build package to override the special-case rules. The user-provided tables replace the built-in tables.
func OverrideTables(labelArg, blacklist, listArg, sortableListArg, sortBlacklist, sortWhitelist map[string]bool, namePriority map[string]int, stripLabelLeadingSlashes, shortenAbsoluteLabelsToRelative bool) {
IsLabelArg = labelArg

10
vendor/modules.txt vendored
View File

@ -84,7 +84,7 @@ github.com/aws/aws-sdk-go/private/protocol/query/queryutil
github.com/aws/aws-sdk-go/service/sts/stsiface
github.com/aws/aws-sdk-go/aws/credentials/endpointcreds
github.com/aws/aws-sdk-go/private/protocol/json/jsonutil
# github.com/bazelbuild/bazel-gazelle v0.0.0-20190227183720-e443c54b396a
# github.com/bazelbuild/bazel-gazelle v0.18.2-0.20190823151146-67c9ddf12d8a
github.com/bazelbuild/bazel-gazelle/cmd/gazelle
github.com/bazelbuild/bazel-gazelle/config
github.com/bazelbuild/bazel-gazelle/flag
@ -100,7 +100,7 @@ github.com/bazelbuild/bazel-gazelle/rule
github.com/bazelbuild/bazel-gazelle/walk
github.com/bazelbuild/bazel-gazelle/internal/wspace
github.com/bazelbuild/bazel-gazelle/pathtools
# github.com/bazelbuild/buildtools v0.0.0-20190213131114-55b64c3d2ddf
# github.com/bazelbuild/buildtools v0.0.0-20190731111112-f720930ceb60
github.com/bazelbuild/buildtools/build
github.com/bazelbuild/buildtools/tables
# github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973
@ -455,9 +455,9 @@ golang.org/x/sys/unix
golang.org/x/sys/windows
golang.org/x/sys/cpu
# golang.org/x/text v0.3.1-0.20181227161524-e6919f6577db
golang.org/x/text/encoding/unicode
golang.org/x/text/transform
golang.org/x/text/unicode/norm
golang.org/x/text/encoding/unicode
golang.org/x/text/encoding
golang.org/x/text/encoding/internal
golang.org/x/text/encoding/internal/identifier
@ -575,7 +575,6 @@ k8s.io/apimachinery/pkg/selection
k8s.io/apimachinery/pkg/conversion/queryparams
k8s.io/apimachinery/pkg/util/naming
k8s.io/apimachinery/pkg/apis/meta/v1/unstructured
k8s.io/apimachinery/pkg/apis/meta/v1/unstructured/unstructuredscheme
k8s.io/apimachinery/pkg/util/mergepatch
k8s.io/apimachinery/pkg/runtime/serializer/streaming
k8s.io/apimachinery/pkg/util/version
@ -588,6 +587,7 @@ k8s.io/apimachinery/pkg/runtime/serializer/protobuf
k8s.io/apimachinery/pkg/runtime/serializer/recognizer
k8s.io/apimachinery/pkg/runtime/serializer/versioning
k8s.io/apimachinery/third_party/forked/golang/json
k8s.io/apimachinery/pkg/apis/meta/v1/unstructured/unstructuredscheme
k8s.io/apimachinery/pkg/apis/meta/v1beta1
k8s.io/apimachinery/pkg/util/cache
k8s.io/apimachinery/pkg/util/diff
@ -600,8 +600,8 @@ k8s.io/apimachinery/pkg/util/httpstream/spdy
k8s.io/apimachinery/third_party/forked/golang/netutil
# k8s.io/cli-runtime v0.0.0 => k8s.io/cli-runtime v0.0.0-20190819144027-541433d7ce35
k8s.io/cli-runtime/pkg/genericclioptions
k8s.io/cli-runtime/pkg/resource
k8s.io/cli-runtime/pkg/printers
k8s.io/cli-runtime/pkg/resource
k8s.io/cli-runtime/pkg/kustomize
k8s.io/cli-runtime/pkg/kustomize/k8sdeps
k8s.io/cli-runtime/pkg/kustomize/k8sdeps/kunstruct