aboutsummaryrefslogtreecommitdiff
path: root/go/analysis/internal/checker/checker.go
diff options
context:
space:
mode:
Diffstat (limited to 'go/analysis/internal/checker/checker.go')
-rw-r--r--go/analysis/internal/checker/checker.go296
1 files changed, 165 insertions, 131 deletions
diff --git a/go/analysis/internal/checker/checker.go b/go/analysis/internal/checker/checker.go
index e405a2ae1..5346acd76 100644
--- a/go/analysis/internal/checker/checker.go
+++ b/go/analysis/internal/checker/checker.go
@@ -15,7 +15,6 @@ import (
"flag"
"fmt"
"go/format"
- "go/parser"
"go/token"
"go/types"
"io/ioutil"
@@ -33,8 +32,8 @@ import (
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/internal/analysisflags"
"golang.org/x/tools/go/packages"
- "golang.org/x/tools/internal/analysisinternal"
- "golang.org/x/tools/internal/span"
+ "golang.org/x/tools/internal/diff"
+ "golang.org/x/tools/internal/robustio"
)
var (
@@ -51,6 +50,9 @@ var (
// Log files for optional performance tracing.
CPUProfile, MemProfile, Trace string
+ // IncludeTests indicates whether test files should be analyzed too.
+ IncludeTests = true
+
// Fix determines whether to apply all suggested fixes.
Fix bool
)
@@ -65,6 +67,7 @@ func RegisterFlags() {
flag.StringVar(&CPUProfile, "cpuprofile", "", "write CPU profile to this file")
flag.StringVar(&MemProfile, "memprofile", "", "write memory profile to this file")
flag.StringVar(&Trace, "trace", "", "write trace log to this file")
+ flag.BoolVar(&IncludeTests, "test", IncludeTests, "indicates whether test files should be analyzed, too")
flag.BoolVar(&Fix, "fix", false, "apply all suggested fixes")
}
@@ -143,7 +146,11 @@ func Run(args []string, analyzers []*analysis.Analyzer) (exitcode int) {
roots := analyze(initial, analyzers)
if Fix {
- applyFixes(roots)
+ if err := applyFixes(roots); err != nil {
+ // Fail when applying fixes failed.
+ log.Print(err)
+ return 1
+ }
}
return printDiagnostics(roots)
}
@@ -163,7 +170,7 @@ func load(patterns []string, allSyntax bool) ([]*packages.Package, error) {
}
conf := packages.Config{
Mode: mode,
- Tests: true,
+ Tests: IncludeTests,
}
initial, err := packages.Load(&conf, patterns...)
if err == nil {
@@ -301,7 +308,10 @@ func analyze(pkgs []*packages.Package, analyzers []*analysis.Analyzer) []*action
return roots
}
-func applyFixes(roots []*action) {
+func applyFixes(roots []*action) error {
+ // visit all of the actions and accumulate the suggested edits.
+ paths := make(map[robustio.FileID]string)
+ editsByAction := make(map[robustio.FileID]map[*action][]diff.Edit)
visited := make(map[*action]bool)
var apply func(*action) error
var visitAll func(actions []*action) error
@@ -309,7 +319,9 @@ func applyFixes(roots []*action) {
for _, act := range actions {
if !visited[act] {
visited[act] = true
- visitAll(act.deps)
+ if err := visitAll(act.deps); err != nil {
+ return err
+ }
if err := apply(act); err != nil {
return err
}
@@ -318,116 +330,167 @@ func applyFixes(roots []*action) {
return nil
}
- // TODO(matloob): Is this tree business too complicated? (After all this is Go!)
- // Just create a set (map) of edits, sort by pos and call it a day?
- type offsetedit struct {
- start, end int
- newText []byte
- } // TextEdit using byteOffsets instead of pos
- type node struct {
- edit offsetedit
- left, right *node
- }
-
- var insert func(tree **node, edit offsetedit) error
- insert = func(treeptr **node, edit offsetedit) error {
- if *treeptr == nil {
- *treeptr = &node{edit, nil, nil}
- return nil
- }
- tree := *treeptr
- if edit.end <= tree.edit.start {
- return insert(&tree.left, edit)
- } else if edit.start >= tree.edit.end {
- return insert(&tree.right, edit)
- }
-
- // Overlapping text edit.
- return fmt.Errorf("analyses applying overlapping text edits affecting pos range (%v, %v) and (%v, %v)",
- edit.start, edit.end, tree.edit.start, tree.edit.end)
-
- }
-
- editsForFile := make(map[*token.File]*node)
-
apply = func(act *action) error {
+ editsForTokenFile := make(map[*token.File][]diff.Edit)
for _, diag := range act.diagnostics {
for _, sf := range diag.SuggestedFixes {
for _, edit := range sf.TextEdits {
// Validate the edit.
+ // Any error here indicates a bug in the analyzer.
+ file := act.pkg.Fset.File(edit.Pos)
+ if file == nil {
+ return fmt.Errorf("analysis %q suggests invalid fix: missing file info for pos (%v)",
+ act.a.Name, edit.Pos)
+ }
if edit.Pos > edit.End {
- return fmt.Errorf(
- "diagnostic for analysis %v contains Suggested Fix with malformed edit: pos (%v) > end (%v)",
+ return fmt.Errorf("analysis %q suggests invalid fix: pos (%v) > end (%v)",
act.a.Name, edit.Pos, edit.End)
}
- file, endfile := act.pkg.Fset.File(edit.Pos), act.pkg.Fset.File(edit.End)
- if file == nil || endfile == nil || file != endfile {
- return (fmt.Errorf(
- "diagnostic for analysis %v contains Suggested Fix with malformed spanning files %v and %v",
- act.a.Name, file.Name(), endfile.Name()))
+ if eof := token.Pos(file.Base() + file.Size()); edit.End > eof {
+ return fmt.Errorf("analysis %q suggests invalid fix: end (%v) past end of file (%v)",
+ act.a.Name, edit.End, eof)
}
- start, end := file.Offset(edit.Pos), file.Offset(edit.End)
-
- // TODO(matloob): Validate that edits do not affect other packages.
- root := editsForFile[file]
- if err := insert(&root, offsetedit{start, end, edit.NewText}); err != nil {
- return err
- }
- editsForFile[file] = root // In case the root changed
+ edit := diff.Edit{Start: file.Offset(edit.Pos), End: file.Offset(edit.End), New: string(edit.NewText)}
+ editsForTokenFile[file] = append(editsForTokenFile[file], edit)
}
}
}
+
+ for f, edits := range editsForTokenFile {
+ id, _, err := robustio.GetFileID(f.Name())
+ if err != nil {
+ return err
+ }
+ if _, hasId := paths[id]; !hasId {
+ paths[id] = f.Name()
+ editsByAction[id] = make(map[*action][]diff.Edit)
+ }
+ editsByAction[id][act] = edits
+ }
return nil
}
- visitAll(roots)
+ if err := visitAll(roots); err != nil {
+ return err
+ }
- fset := token.NewFileSet() // Shared by parse calls below
- // Now we've got a set of valid edits for each file. Get the new file contents.
- for f, tree := range editsForFile {
- contents, err := ioutil.ReadFile(f.Name())
- if err != nil {
- log.Fatal(err)
+ // Validate and group the edits to each actual file.
+ editsByPath := make(map[string][]diff.Edit)
+ for id, actToEdits := range editsByAction {
+ path := paths[id]
+ actions := make([]*action, 0, len(actToEdits))
+ for act := range actToEdits {
+ actions = append(actions, act)
}
- cur := 0 // current position in the file
-
- var out bytes.Buffer
-
- var recurse func(*node)
- recurse = func(node *node) {
- if node.left != nil {
- recurse(node.left)
+ // Does any action create conflicting edits?
+ for _, act := range actions {
+ edits := actToEdits[act]
+ if _, invalid := validateEdits(edits); invalid > 0 {
+ name, x, y := act.a.Name, edits[invalid-1], edits[invalid]
+ return diff3Conflict(path, name, name, []diff.Edit{x}, []diff.Edit{y})
}
+ }
- edit := node.edit
- if edit.start > cur {
- out.Write(contents[cur:edit.start])
- out.Write(edit.newText)
+ // Does any pair of different actions create edits that conflict?
+ for j := range actions {
+ for k := range actions[:j] {
+ x, y := actions[j], actions[k]
+ if x.a.Name > y.a.Name {
+ x, y = y, x
+ }
+ xedits, yedits := actToEdits[x], actToEdits[y]
+ combined := append(xedits, yedits...)
+ if _, invalid := validateEdits(combined); invalid > 0 {
+ // TODO: consider applying each action's consistent list of edits entirely,
+ // and then using a three-way merge (such as GNU diff3) on the resulting
+ // files to report more precisely the parts that actually conflict.
+ return diff3Conflict(path, x.a.Name, y.a.Name, xedits, yedits)
+ }
}
- cur = edit.end
+ }
- if node.right != nil {
- recurse(node.right)
- }
+ var edits []diff.Edit
+ for act := range actToEdits {
+ edits = append(edits, actToEdits[act]...)
}
- recurse(tree)
- // Write out the rest of the file.
- if cur < len(contents) {
- out.Write(contents[cur:])
+ editsByPath[path], _ = validateEdits(edits) // remove duplicates. already validated.
+ }
+
+ // Now we've got a set of valid edits for each file. Apply them.
+ for path, edits := range editsByPath {
+ contents, err := ioutil.ReadFile(path)
+ if err != nil {
+ return err
+ }
+
+ out, err := diff.ApplyBytes(contents, edits)
+ if err != nil {
+ return err
}
// Try to format the file.
- ff, err := parser.ParseFile(fset, f.Name(), out.Bytes(), parser.ParseComments)
- if err == nil {
- var buf bytes.Buffer
- if err = format.Node(&buf, fset, ff); err == nil {
- out = buf
+ if formatted, err := format.Source(out); err == nil {
+ out = formatted
+ }
+
+ if err := ioutil.WriteFile(path, out, 0644); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+// validateEdits returns a list of edits that is sorted and
+// contains no duplicate edits. Returns the index of some
+// overlapping adjacent edits if there is one and <0 if the
+// edits are valid.
+func validateEdits(edits []diff.Edit) ([]diff.Edit, int) {
+ if len(edits) == 0 {
+ return nil, -1
+ }
+ equivalent := func(x, y diff.Edit) bool {
+ return x.Start == y.Start && x.End == y.End && x.New == y.New
+ }
+ diff.SortEdits(edits)
+ unique := []diff.Edit{edits[0]}
+ invalid := -1
+ for i := 1; i < len(edits); i++ {
+ prev, cur := edits[i-1], edits[i]
+ // We skip over equivalent edits without considering them
+ // an error. This handles identical edits coming from the
+ // multiple ways of loading a package into a
+ // *go/packages.Packages for testing, e.g. packages "p" and "p [p.test]".
+ if !equivalent(prev, cur) {
+ unique = append(unique, cur)
+ if prev.End > cur.Start {
+ invalid = i
}
}
+ }
+ return unique, invalid
+}
- ioutil.WriteFile(f.Name(), out.Bytes(), 0644)
+// diff3Conflict returns an error describing two conflicting sets of
+// edits on a file at path.
+func diff3Conflict(path string, xlabel, ylabel string, xedits, yedits []diff.Edit) error {
+ contents, err := ioutil.ReadFile(path)
+ if err != nil {
+ return err
}
+ oldlabel, old := "base", string(contents)
+
+ xdiff, err := diff.ToUnified(oldlabel, xlabel, old, xedits)
+ if err != nil {
+ return err
+ }
+ ydiff, err := diff.ToUnified(oldlabel, ylabel, old, yedits)
+ if err != nil {
+ return err
+ }
+
+ return fmt.Errorf("conflicting edits from %s and %s on %s\nfirst edits:\n%s\nsecond edits:\n%s",
+ xlabel, ylabel, path, xdiff, ydiff)
}
// printDiagnostics prints the diagnostics for the root packages in either
@@ -574,7 +637,6 @@ type action struct {
deps []*action
objectFacts map[objectFactKey]analysis.Fact
packageFacts map[packageFactKey]analysis.Fact
- inputs map[*analysis.Analyzer]interface{}
result interface{}
diagnostics []analysis.Diagnostic
err error
@@ -672,14 +734,16 @@ func (act *action) execOnce() {
// Run the analysis.
pass := &analysis.Pass{
- Analyzer: act.a,
- Fset: act.pkg.Fset,
- Files: act.pkg.Syntax,
- OtherFiles: act.pkg.OtherFiles,
- IgnoredFiles: act.pkg.IgnoredFiles,
- Pkg: act.pkg.Types,
- TypesInfo: act.pkg.TypesInfo,
- TypesSizes: act.pkg.TypesSizes,
+ Analyzer: act.a,
+ Fset: act.pkg.Fset,
+ Files: act.pkg.Syntax,
+ OtherFiles: act.pkg.OtherFiles,
+ IgnoredFiles: act.pkg.IgnoredFiles,
+ Pkg: act.pkg.Types,
+ TypesInfo: act.pkg.TypesInfo,
+ TypesSizes: act.pkg.TypesSizes,
+ TypeErrors: act.pkg.TypeErrors,
+
ResultOf: inputs,
Report: func(d analysis.Diagnostic) { act.diagnostics = append(act.diagnostics, d) },
ImportObjectFact: act.importObjectFact,
@@ -691,36 +755,6 @@ func (act *action) execOnce() {
}
act.pass = pass
- var errors []types.Error
- // Get any type errors that are attributed to the pkg.
- // This is necessary to test analyzers that provide
- // suggested fixes for compiler/type errors.
- for _, err := range act.pkg.Errors {
- if err.Kind != packages.TypeError {
- continue
- }
- // err.Pos is a string of form: "file:line:col" or "file:line" or "" or "-"
- spn := span.Parse(err.Pos)
- // Extract the token positions from the error string.
- line, col, offset := spn.Start().Line(), spn.Start().Column(), -1
- act.pkg.Fset.Iterate(func(f *token.File) bool {
- if f.Name() != spn.URI().Filename() {
- return true
- }
- offset = int(f.LineStart(line)) + col - 1
- return false
- })
- if offset == -1 {
- continue
- }
- errors = append(errors, types.Error{
- Fset: act.pkg.Fset,
- Msg: err.Msg,
- Pos: token.Pos(offset),
- })
- }
- analysisinternal.SetTypeErrors(pass, errors)
-
var err error
if act.pkg.IllTyped && !pass.Analyzer.RunDespiteErrors {
err = fmt.Errorf("analysis skipped due to errors in package")
@@ -762,7 +796,7 @@ func inheritFacts(act, dep *action) {
if serialize {
encodedFact, err := codeFact(fact)
if err != nil {
- log.Panicf("internal error: encoding of %T fact failed in %v", fact, act)
+ log.Panicf("internal error: encoding of %T fact failed in %v: %v", fact, act, err)
}
fact = encodedFact
}
@@ -826,7 +860,7 @@ func codeFact(fact analysis.Fact) (analysis.Fact, error) {
// exportedFrom reports whether obj may be visible to a package that imports pkg.
// This includes not just the exported members of pkg, but also unexported
-// constants, types, fields, and methods, perhaps belonging to oether packages,
+// constants, types, fields, and methods, perhaps belonging to other packages,
// that find there way into the API.
// This is an overapproximation of the more accurate approach used by
// gc export data, which walks the type graph, but it's much simpler.
@@ -890,7 +924,7 @@ func (act *action) exportObjectFact(obj types.Object, fact analysis.Fact) {
func (act *action) allObjectFacts() []analysis.ObjectFact {
facts := make([]analysis.ObjectFact, 0, len(act.objectFacts))
for k := range act.objectFacts {
- facts = append(facts, analysis.ObjectFact{k.obj, act.objectFacts[k]})
+ facts = append(facts, analysis.ObjectFact{Object: k.obj, Fact: act.objectFacts[k]})
}
return facts
}
@@ -932,11 +966,11 @@ func factType(fact analysis.Fact) reflect.Type {
return t
}
-// allObjectFacts implements Pass.AllObjectFacts.
+// allPackageFacts implements Pass.AllPackageFacts.
func (act *action) allPackageFacts() []analysis.PackageFact {
facts := make([]analysis.PackageFact, 0, len(act.packageFacts))
for k := range act.packageFacts {
- facts = append(facts, analysis.PackageFact{k.pkg, act.packageFacts[k]})
+ facts = append(facts, analysis.PackageFact{Package: k.pkg, Fact: act.packageFacts[k]})
}
return facts
}