summaryrefslogtreecommitdiff
path: root/vendor/golang.org/x/tools/internal/imports
diff options
context:
space:
mode:
authorLibravatar Terin Stock <terinjokes@gmail.com>2025-03-09 17:47:56 +0100
committerLibravatar Terin Stock <terinjokes@gmail.com>2025-12-01 22:08:04 +0100
commitb1af8fd87760b34e3ff2fd3bda38f211815a0473 (patch)
tree9317fad1a7ec298d7a8d2678e4e422953bbc6f33 /vendor/golang.org/x/tools/internal/imports
parent[chore] update URLs to forked source (diff)
downloadgotosocial-b1af8fd87760b34e3ff2fd3bda38f211815a0473.tar.xz
[chore] remove vendor
Diffstat (limited to 'vendor/golang.org/x/tools/internal/imports')
-rw-r--r--vendor/golang.org/x/tools/internal/imports/fix.go1896
-rw-r--r--vendor/golang.org/x/tools/internal/imports/imports.go359
-rw-r--r--vendor/golang.org/x/tools/internal/imports/mod.go841
-rw-r--r--vendor/golang.org/x/tools/internal/imports/mod_cache.go331
-rw-r--r--vendor/golang.org/x/tools/internal/imports/sortimports.go298
-rw-r--r--vendor/golang.org/x/tools/internal/imports/source.go63
-rw-r--r--vendor/golang.org/x/tools/internal/imports/source_env.go129
-rw-r--r--vendor/golang.org/x/tools/internal/imports/source_modindex.go100
8 files changed, 0 insertions, 4017 deletions
diff --git a/vendor/golang.org/x/tools/internal/imports/fix.go b/vendor/golang.org/x/tools/internal/imports/fix.go
deleted file mode 100644
index 1b4dc0cb5..000000000
--- a/vendor/golang.org/x/tools/internal/imports/fix.go
+++ /dev/null
@@ -1,1896 +0,0 @@
-// Copyright 2013 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package imports
-
-import (
- "bytes"
- "context"
- "encoding/json"
- "fmt"
- "go/ast"
- "go/build"
- "go/parser"
- "go/token"
- "go/types"
- "io/fs"
- "io/ioutil"
- "maps"
- "os"
- "path"
- "path/filepath"
- "reflect"
- "sort"
- "strconv"
- "strings"
- "sync"
- "unicode"
- "unicode/utf8"
-
- "golang.org/x/tools/go/ast/astutil"
- "golang.org/x/tools/internal/event"
- "golang.org/x/tools/internal/gocommand"
- "golang.org/x/tools/internal/gopathwalk"
- "golang.org/x/tools/internal/stdlib"
-)
-
-// importToGroup is a list of functions which map from an import path to
-// a group number.
-var importToGroup = []func(localPrefix, importPath string) (num int, ok bool){
- func(localPrefix, importPath string) (num int, ok bool) {
- if localPrefix == "" {
- return
- }
- for p := range strings.SplitSeq(localPrefix, ",") {
- if strings.HasPrefix(importPath, p) || strings.TrimSuffix(p, "/") == importPath {
- return 3, true
- }
- }
- return
- },
- func(_, importPath string) (num int, ok bool) {
- if strings.HasPrefix(importPath, "appengine") {
- return 2, true
- }
- return
- },
- func(_, importPath string) (num int, ok bool) {
- firstComponent := strings.Split(importPath, "/")[0]
- if strings.Contains(firstComponent, ".") {
- return 1, true
- }
- return
- },
-}
-
-func importGroup(localPrefix, importPath string) int {
- for _, fn := range importToGroup {
- if n, ok := fn(localPrefix, importPath); ok {
- return n
- }
- }
- return 0
-}
-
-type ImportFixType int
-
-const (
- AddImport ImportFixType = iota
- DeleteImport
- SetImportName
-)
-
-type ImportFix struct {
- // StmtInfo represents the import statement this fix will add, remove, or change.
- StmtInfo ImportInfo
- // IdentName is the identifier that this fix will add or remove.
- IdentName string
- // FixType is the type of fix this is (AddImport, DeleteImport, SetImportName).
- FixType ImportFixType
- Relevance float64 // see pkg
-}
-
-// parseOtherFiles parses all the Go files in srcDir except filename, including
-// test files if filename looks like a test.
-//
-// It returns an error only if ctx is cancelled. Files with parse errors are
-// ignored.
-func parseOtherFiles(ctx context.Context, fset *token.FileSet, srcDir, filename string) ([]*ast.File, error) {
- // This could use go/packages but it doesn't buy much, and it fails
- // with https://golang.org/issue/26296 in LoadFiles mode in some cases.
- considerTests := strings.HasSuffix(filename, "_test.go")
-
- fileBase := filepath.Base(filename)
- packageFileInfos, err := os.ReadDir(srcDir)
- if err != nil {
- return nil, ctx.Err()
- }
-
- var files []*ast.File
- for _, fi := range packageFileInfos {
- if ctx.Err() != nil {
- return nil, ctx.Err()
- }
- if fi.Name() == fileBase || !strings.HasSuffix(fi.Name(), ".go") {
- continue
- }
- if !considerTests && strings.HasSuffix(fi.Name(), "_test.go") {
- continue
- }
-
- f, err := parser.ParseFile(fset, filepath.Join(srcDir, fi.Name()), nil, parser.SkipObjectResolution)
- if err != nil {
- continue
- }
-
- files = append(files, f)
- }
-
- return files, ctx.Err()
-}
-
-// addGlobals puts the names of package vars into the provided map.
-func addGlobals(f *ast.File, globals map[string]bool) {
- for _, decl := range f.Decls {
- genDecl, ok := decl.(*ast.GenDecl)
- if !ok {
- continue
- }
-
- for _, spec := range genDecl.Specs {
- valueSpec, ok := spec.(*ast.ValueSpec)
- if !ok {
- continue
- }
- globals[valueSpec.Names[0].Name] = true
- }
- }
-}
-
-// collectReferences builds a map of selector expressions, from
-// left hand side (X) to a set of right hand sides (Sel).
-func collectReferences(f *ast.File) References {
- refs := References{}
-
- var visitor visitFn
- visitor = func(node ast.Node) ast.Visitor {
- if node == nil {
- return visitor
- }
- switch v := node.(type) {
- case *ast.SelectorExpr:
- xident, ok := v.X.(*ast.Ident)
- if !ok {
- break
- }
- if xident.Obj != nil {
- // If the parser can resolve it, it's not a package ref.
- break
- }
- if !ast.IsExported(v.Sel.Name) {
- // Whatever this is, it's not exported from a package.
- break
- }
- pkgName := xident.Name
- r := refs[pkgName]
- if r == nil {
- r = make(map[string]bool)
- refs[pkgName] = r
- }
- r[v.Sel.Name] = true
- }
- return visitor
- }
- ast.Walk(visitor, f)
- return refs
-}
-
-// collectImports returns all the imports in f.
-// Unnamed imports (., _) and "C" are ignored.
-func collectImports(f *ast.File) []*ImportInfo {
- var imports []*ImportInfo
- for _, imp := range f.Imports {
- var name string
- if imp.Name != nil {
- name = imp.Name.Name
- }
- if imp.Path.Value == `"C"` || name == "_" || name == "." {
- continue
- }
- path := strings.Trim(imp.Path.Value, `"`)
- imports = append(imports, &ImportInfo{
- Name: name,
- ImportPath: path,
- })
- }
- return imports
-}
-
-// findMissingImport searches pass's candidates for an import that provides
-// pkg, containing all of syms.
-func (p *pass) findMissingImport(pkg string, syms map[string]bool) *ImportInfo {
- for _, candidate := range p.candidates {
- pkgInfo, ok := p.knownPackages[candidate.ImportPath]
- if !ok {
- continue
- }
- if p.importIdentifier(candidate) != pkg {
- continue
- }
-
- allFound := true
- for right := range syms {
- if !pkgInfo.Exports[right] {
- allFound = false
- break
- }
- }
-
- if allFound {
- return candidate
- }
- }
- return nil
-}
-
-// A pass contains all the inputs and state necessary to fix a file's imports.
-// It can be modified in some ways during use; see comments below.
-type pass struct {
- // Inputs. These must be set before a call to load, and not modified after.
- fset *token.FileSet // fset used to parse f and its siblings.
- f *ast.File // the file being fixed.
- srcDir string // the directory containing f.
- logf func(string, ...any)
- source Source // the environment to use for go commands, etc.
- loadRealPackageNames bool // if true, load package names from disk rather than guessing them.
- otherFiles []*ast.File // sibling files.
- goroot string
-
- // Intermediate state, generated by load.
- existingImports map[string][]*ImportInfo
- allRefs References
- missingRefs References
-
- // Inputs to fix. These can be augmented between successive fix calls.
- lastTry bool // indicates that this is the last call and fix should clean up as best it can.
- candidates []*ImportInfo // candidate imports in priority order.
- knownPackages map[string]*PackageInfo // information about all known packages.
-}
-
-// loadPackageNames saves the package names for everything referenced by imports.
-func (p *pass) loadPackageNames(ctx context.Context, imports []*ImportInfo) error {
- if p.logf != nil {
- p.logf("loading package names for %v packages", len(imports))
- defer func() {
- p.logf("done loading package names for %v packages", len(imports))
- }()
- }
- var unknown []string
- for _, imp := range imports {
- if _, ok := p.knownPackages[imp.ImportPath]; ok {
- continue
- }
- unknown = append(unknown, imp.ImportPath)
- }
-
- names, err := p.source.LoadPackageNames(ctx, p.srcDir, unknown)
- if err != nil {
- return err
- }
-
- // TODO(rfindley): revisit this. Why do we need to store known packages with
- // no exports? The inconsistent data is confusing.
- for path, name := range names {
- p.knownPackages[path] = &PackageInfo{
- Name: name,
- Exports: map[string]bool{},
- }
- }
- return nil
-}
-
-// WithoutVersion removes a trailing major version, if there is one.
-func WithoutVersion(nm string) string {
- if v := path.Base(nm); len(v) > 0 && v[0] == 'v' {
- if _, err := strconv.Atoi(v[1:]); err == nil {
- // this is, for instance, called with rand/v2 and returns rand
- if len(v) < len(nm) {
- xnm := nm[:len(nm)-len(v)-1]
- return path.Base(xnm)
- }
- }
- }
- return nm
-}
-
-// importIdentifier returns the identifier that imp will introduce. It will
-// guess if the package name has not been loaded, e.g. because the source
-// is not available.
-func (p *pass) importIdentifier(imp *ImportInfo) string {
- if imp.Name != "" {
- return imp.Name
- }
- known := p.knownPackages[imp.ImportPath]
- if known != nil && known.Name != "" {
- return WithoutVersion(known.Name)
- }
- return ImportPathToAssumedName(imp.ImportPath)
-}
-
-// load reads in everything necessary to run a pass, and reports whether the
-// file already has all the imports it needs. It fills in p.missingRefs with the
-// file's missing symbols, if any, or removes unused imports if not.
-func (p *pass) load(ctx context.Context) ([]*ImportFix, bool) {
- p.knownPackages = map[string]*PackageInfo{}
- p.missingRefs = References{}
- p.existingImports = map[string][]*ImportInfo{}
-
- // Load basic information about the file in question.
- p.allRefs = collectReferences(p.f)
-
- // Load stuff from other files in the same package:
- // global variables so we know they don't need resolving, and imports
- // that we might want to mimic.
- globals := map[string]bool{}
- for _, otherFile := range p.otherFiles {
- // Don't load globals from files that are in the same directory
- // but a different package. Using them to suggest imports is OK.
- if p.f.Name.Name == otherFile.Name.Name {
- addGlobals(otherFile, globals)
- }
- p.candidates = append(p.candidates, collectImports(otherFile)...)
- }
-
- // Resolve all the import paths we've seen to package names, and store
- // f's imports by the identifier they introduce.
- imports := collectImports(p.f)
- if p.loadRealPackageNames {
- err := p.loadPackageNames(ctx, append(imports, p.candidates...))
- if err != nil {
- if p.logf != nil {
- p.logf("loading package names: %v", err)
- }
- return nil, false
- }
- }
- for _, imp := range imports {
- p.existingImports[p.importIdentifier(imp)] = append(p.existingImports[p.importIdentifier(imp)], imp)
- }
-
- // Find missing references.
- for left, rights := range p.allRefs {
- if globals[left] {
- continue
- }
- _, ok := p.existingImports[left]
- if !ok {
- p.missingRefs[left] = rights
- continue
- }
- }
- if len(p.missingRefs) != 0 {
- return nil, false
- }
-
- return p.fix()
-}
-
-// fix attempts to satisfy missing imports using p.candidates. If it finds
-// everything, or if p.lastTry is true, it updates fixes to add the imports it found,
-// delete anything unused, and update import names, and returns true.
-func (p *pass) fix() ([]*ImportFix, bool) {
- // Find missing imports.
- var selected []*ImportInfo
- for left, rights := range p.missingRefs {
- if imp := p.findMissingImport(left, rights); imp != nil {
- selected = append(selected, imp)
- }
- }
-
- if !p.lastTry && len(selected) != len(p.missingRefs) {
- return nil, false
- }
-
- // Found everything, or giving up. Add the new imports and remove any unused.
- var fixes []*ImportFix
- for _, identifierImports := range p.existingImports {
- for _, imp := range identifierImports {
- // We deliberately ignore globals here, because we can't be sure
- // they're in the same package. People do things like put multiple
- // main packages in the same directory, and we don't want to
- // remove imports if they happen to have the same name as a var in
- // a different package.
- if _, ok := p.allRefs[p.importIdentifier(imp)]; !ok {
- fixes = append(fixes, &ImportFix{
- StmtInfo: *imp,
- IdentName: p.importIdentifier(imp),
- FixType: DeleteImport,
- })
- continue
- }
-
- // An existing import may need to update its import name to be correct.
- if name := p.importSpecName(imp); name != imp.Name {
- fixes = append(fixes, &ImportFix{
- StmtInfo: ImportInfo{
- Name: name,
- ImportPath: imp.ImportPath,
- },
- IdentName: p.importIdentifier(imp),
- FixType: SetImportName,
- })
- }
- }
- }
- // Collecting fixes involved map iteration, so sort for stability. See
- // golang/go#59976.
- sortFixes(fixes)
-
- // collect selected fixes in a separate slice, so that it can be sorted
- // separately. Note that these fixes must occur after fixes to existing
- // imports. TODO(rfindley): figure out why.
- var selectedFixes []*ImportFix
- for _, imp := range selected {
- selectedFixes = append(selectedFixes, &ImportFix{
- StmtInfo: ImportInfo{
- Name: p.importSpecName(imp),
- ImportPath: imp.ImportPath,
- },
- IdentName: p.importIdentifier(imp),
- FixType: AddImport,
- })
- }
- sortFixes(selectedFixes)
-
- return append(fixes, selectedFixes...), true
-}
-
-func sortFixes(fixes []*ImportFix) {
- sort.Slice(fixes, func(i, j int) bool {
- fi, fj := fixes[i], fixes[j]
- if fi.StmtInfo.ImportPath != fj.StmtInfo.ImportPath {
- return fi.StmtInfo.ImportPath < fj.StmtInfo.ImportPath
- }
- if fi.StmtInfo.Name != fj.StmtInfo.Name {
- return fi.StmtInfo.Name < fj.StmtInfo.Name
- }
- if fi.IdentName != fj.IdentName {
- return fi.IdentName < fj.IdentName
- }
- return fi.FixType < fj.FixType
- })
-}
-
-// importSpecName gets the import name of imp in the import spec.
-//
-// When the import identifier matches the assumed import name, the import name does
-// not appear in the import spec.
-func (p *pass) importSpecName(imp *ImportInfo) string {
- // If we did not load the real package names, or the name is already set,
- // we just return the existing name.
- if !p.loadRealPackageNames || imp.Name != "" {
- return imp.Name
- }
-
- ident := p.importIdentifier(imp)
- if ident == ImportPathToAssumedName(imp.ImportPath) {
- return "" // ident not needed since the assumed and real names are the same.
- }
- return ident
-}
-
-// apply will perform the fixes on f in order.
-func apply(fset *token.FileSet, f *ast.File, fixes []*ImportFix) {
- for _, fix := range fixes {
- switch fix.FixType {
- case DeleteImport:
- astutil.DeleteNamedImport(fset, f, fix.StmtInfo.Name, fix.StmtInfo.ImportPath)
- case AddImport:
- astutil.AddNamedImport(fset, f, fix.StmtInfo.Name, fix.StmtInfo.ImportPath)
- case SetImportName:
- // Find the matching import path and change the name.
- for _, spec := range f.Imports {
- path := strings.Trim(spec.Path.Value, `"`)
- if path == fix.StmtInfo.ImportPath {
- spec.Name = &ast.Ident{
- Name: fix.StmtInfo.Name,
- NamePos: spec.Pos(),
- }
- }
- }
- }
- }
-}
-
-// assumeSiblingImportsValid assumes that siblings' use of packages is valid,
-// adding the exports they use.
-func (p *pass) assumeSiblingImportsValid() {
- for _, f := range p.otherFiles {
- refs := collectReferences(f)
- imports := collectImports(f)
- importsByName := map[string]*ImportInfo{}
- for _, imp := range imports {
- importsByName[p.importIdentifier(imp)] = imp
- }
- for left, rights := range refs {
- if imp, ok := importsByName[left]; ok {
- if m, ok := stdlib.PackageSymbols[imp.ImportPath]; ok {
- // We have the stdlib in memory; no need to guess.
- rights = symbolNameSet(m)
- }
- // TODO(rfindley): we should set package name here, for consistency.
- p.addCandidate(imp, &PackageInfo{
- // no name; we already know it.
- Exports: rights,
- })
- }
- }
- }
-}
-
-// addCandidate adds a candidate import to p, and merges in the information
-// in pkg.
-func (p *pass) addCandidate(imp *ImportInfo, pkg *PackageInfo) {
- p.candidates = append(p.candidates, imp)
- if existing, ok := p.knownPackages[imp.ImportPath]; ok {
- if existing.Name == "" {
- existing.Name = pkg.Name
- }
- for export := range pkg.Exports {
- existing.Exports[export] = true
- }
- } else {
- p.knownPackages[imp.ImportPath] = pkg
- }
-}
-
-// fixImports adds and removes imports from f so that all its references are
-// satisfied and there are no unused imports.
-//
-// This is declared as a variable rather than a function so goimports can
-// easily be extended by adding a file with an init function.
-//
-// DO NOT REMOVE: used internally at Google.
-var fixImports = fixImportsDefault
-
-func fixImportsDefault(fset *token.FileSet, f *ast.File, filename string, env *ProcessEnv) error {
- fixes, err := getFixes(context.Background(), fset, f, filename, env)
- if err != nil {
- return err
- }
- apply(fset, f, fixes)
- return nil
-}
-
-// getFixes gets the import fixes that need to be made to f in order to fix the imports.
-// It does not modify the ast.
-func getFixes(ctx context.Context, fset *token.FileSet, f *ast.File, filename string, env *ProcessEnv) ([]*ImportFix, error) {
- source, err := NewProcessEnvSource(env, filename, f.Name.Name)
- if err != nil {
- return nil, err
- }
- goEnv, err := env.goEnv()
- if err != nil {
- return nil, err
- }
- return getFixesWithSource(ctx, fset, f, filename, goEnv["GOROOT"], env.logf, source)
-}
-
-func getFixesWithSource(ctx context.Context, fset *token.FileSet, f *ast.File, filename string, goroot string, logf func(string, ...any), source Source) ([]*ImportFix, error) {
- // This logic is defensively duplicated from getFixes.
- abs, err := filepath.Abs(filename)
- if err != nil {
- return nil, err
- }
- srcDir := filepath.Dir(abs)
-
- if logf != nil {
- logf("fixImports(filename=%q), srcDir=%q ...", filename, srcDir)
- }
-
- // First pass: looking only at f, and using the naive algorithm to
- // derive package names from import paths, see if the file is already
- // complete. We can't add any imports yet, because we don't know
- // if missing references are actually package vars.
- p := &pass{
- fset: fset,
- f: f,
- srcDir: srcDir,
- logf: logf,
- goroot: goroot,
- source: source,
- }
- if fixes, done := p.load(ctx); done {
- return fixes, nil
- }
-
- otherFiles, err := parseOtherFiles(ctx, fset, srcDir, filename)
- if err != nil {
- return nil, err
- }
-
- // Second pass: add information from other files in the same package,
- // like their package vars and imports.
- p.otherFiles = otherFiles
- if fixes, done := p.load(ctx); done {
- return fixes, nil
- }
-
- // Now we can try adding imports from the stdlib.
- p.assumeSiblingImportsValid()
- addStdlibCandidates(p, p.missingRefs)
- if fixes, done := p.fix(); done {
- return fixes, nil
- }
-
- // Third pass: get real package names where we had previously used
- // the naive algorithm.
- p = &pass{
- fset: fset,
- f: f,
- srcDir: srcDir,
- logf: logf,
- goroot: goroot,
- source: p.source, // safe to reuse, as it's just a wrapper around env
- }
- p.loadRealPackageNames = true
- p.otherFiles = otherFiles
- if fixes, done := p.load(ctx); done {
- return fixes, nil
- }
-
- if err := addStdlibCandidates(p, p.missingRefs); err != nil {
- return nil, err
- }
- p.assumeSiblingImportsValid()
- if fixes, done := p.fix(); done {
- return fixes, nil
- }
-
- // Go look for candidates in $GOPATH, etc. We don't necessarily load
- // the real exports of sibling imports, so keep assuming their contents.
- if err := addExternalCandidates(ctx, p, p.missingRefs, filename); err != nil {
- return nil, err
- }
-
- p.lastTry = true
- fixes, _ := p.fix()
- return fixes, nil
-}
-
-// MaxRelevance is the highest relevance, used for the standard library.
-// Chosen arbitrarily to match pre-existing gopls code.
-const MaxRelevance = 7.0
-
-// getCandidatePkgs works with the passed callback to find all acceptable packages.
-// It deduplicates by import path, and uses a cached stdlib rather than reading
-// from disk.
-func getCandidatePkgs(ctx context.Context, wrappedCallback *scanCallback, filename, filePkg string, env *ProcessEnv) error {
- notSelf := func(p *pkg) bool {
- return p.packageName != filePkg || p.dir != filepath.Dir(filename)
- }
- goenv, err := env.goEnv()
- if err != nil {
- return err
- }
-
- var mu sync.Mutex // to guard asynchronous access to dupCheck
- dupCheck := map[string]struct{}{}
-
- // Start off with the standard library.
- for importPath, symbols := range stdlib.PackageSymbols {
- p := &pkg{
- dir: filepath.Join(goenv["GOROOT"], "src", importPath),
- importPathShort: importPath,
- packageName: path.Base(importPath),
- relevance: MaxRelevance,
- }
- dupCheck[importPath] = struct{}{}
- if notSelf(p) && wrappedCallback.dirFound(p) && wrappedCallback.packageNameLoaded(p) {
- var exports []stdlib.Symbol
- for _, sym := range symbols {
- switch sym.Kind {
- case stdlib.Func, stdlib.Type, stdlib.Var, stdlib.Const:
- exports = append(exports, sym)
- }
- }
- wrappedCallback.exportsLoaded(p, exports)
- }
- }
-
- scanFilter := &scanCallback{
- rootFound: func(root gopathwalk.Root) bool {
- // Exclude goroot results -- getting them is relatively expensive, not cached,
- // and generally redundant with the in-memory version.
- return root.Type != gopathwalk.RootGOROOT && wrappedCallback.rootFound(root)
- },
- dirFound: wrappedCallback.dirFound,
- packageNameLoaded: func(pkg *pkg) bool {
- mu.Lock()
- defer mu.Unlock()
- if _, ok := dupCheck[pkg.importPathShort]; ok {
- return false
- }
- dupCheck[pkg.importPathShort] = struct{}{}
- return notSelf(pkg) && wrappedCallback.packageNameLoaded(pkg)
- },
- exportsLoaded: func(pkg *pkg, exports []stdlib.Symbol) {
- // If we're an x_test, load the package under test's test variant.
- if strings.HasSuffix(filePkg, "_test") && pkg.dir == filepath.Dir(filename) {
- var err error
- _, exports, err = loadExportsFromFiles(ctx, env, pkg.dir, true)
- if err != nil {
- return
- }
- }
- wrappedCallback.exportsLoaded(pkg, exports)
- },
- }
- resolver, err := env.GetResolver()
- if err != nil {
- return err
- }
- return resolver.scan(ctx, scanFilter)
-}
-
-func ScoreImportPaths(ctx context.Context, env *ProcessEnv, paths []string) (map[string]float64, error) {
- result := make(map[string]float64)
- resolver, err := env.GetResolver()
- if err != nil {
- return nil, err
- }
- for _, path := range paths {
- result[path] = resolver.scoreImportPath(ctx, path)
- }
- return result, nil
-}
-
-func PrimeCache(ctx context.Context, resolver Resolver) error {
- // Fully scan the disk for directories, but don't actually read any Go files.
- callback := &scanCallback{
- rootFound: func(root gopathwalk.Root) bool {
- // See getCandidatePkgs: walking GOROOT is apparently expensive and
- // unnecessary.
- return root.Type != gopathwalk.RootGOROOT
- },
- dirFound: func(pkg *pkg) bool {
- return false
- },
- // packageNameLoaded and exportsLoaded must never be called.
- }
-
- return resolver.scan(ctx, callback)
-}
-
-func candidateImportName(pkg *pkg) string {
- if ImportPathToAssumedName(pkg.importPathShort) != pkg.packageName {
- return pkg.packageName
- }
- return ""
-}
-
-// GetAllCandidates calls wrapped for each package whose name starts with
-// searchPrefix, and can be imported from filename with the package name filePkg.
-//
-// Beware that the wrapped function may be called multiple times concurrently.
-// TODO(adonovan): encapsulate the concurrency.
-func GetAllCandidates(ctx context.Context, wrapped func(ImportFix), searchPrefix, filename, filePkg string, env *ProcessEnv) error {
- callback := &scanCallback{
- rootFound: func(gopathwalk.Root) bool {
- return true
- },
- dirFound: func(pkg *pkg) bool {
- if !CanUse(filename, pkg.dir) {
- return false
- }
- // Try the assumed package name first, then a simpler path match
- // in case of packages named vN, which are not uncommon.
- return strings.HasPrefix(ImportPathToAssumedName(pkg.importPathShort), searchPrefix) ||
- strings.HasPrefix(path.Base(pkg.importPathShort), searchPrefix)
- },
- packageNameLoaded: func(pkg *pkg) bool {
- if !strings.HasPrefix(pkg.packageName, searchPrefix) {
- return false
- }
- wrapped(ImportFix{
- StmtInfo: ImportInfo{
- ImportPath: pkg.importPathShort,
- Name: candidateImportName(pkg),
- },
- IdentName: pkg.packageName,
- FixType: AddImport,
- Relevance: pkg.relevance,
- })
- return false
- },
- }
- return getCandidatePkgs(ctx, callback, filename, filePkg, env)
-}
-
-// GetImportPaths calls wrapped for each package whose import path starts with
-// searchPrefix, and can be imported from filename with the package name filePkg.
-func GetImportPaths(ctx context.Context, wrapped func(ImportFix), searchPrefix, filename, filePkg string, env *ProcessEnv) error {
- callback := &scanCallback{
- rootFound: func(gopathwalk.Root) bool {
- return true
- },
- dirFound: func(pkg *pkg) bool {
- if !CanUse(filename, pkg.dir) {
- return false
- }
- return strings.HasPrefix(pkg.importPathShort, searchPrefix)
- },
- packageNameLoaded: func(pkg *pkg) bool {
- wrapped(ImportFix{
- StmtInfo: ImportInfo{
- ImportPath: pkg.importPathShort,
- Name: candidateImportName(pkg),
- },
- IdentName: pkg.packageName,
- FixType: AddImport,
- Relevance: pkg.relevance,
- })
- return false
- },
- }
- return getCandidatePkgs(ctx, callback, filename, filePkg, env)
-}
-
-// A PackageExport is a package and its exports.
-type PackageExport struct {
- Fix *ImportFix
- Exports []stdlib.Symbol
-}
-
-// GetPackageExports returns all known packages with name pkg and their exports.
-func GetPackageExports(ctx context.Context, wrapped func(PackageExport), searchPkg, filename, filePkg string, env *ProcessEnv) error {
- callback := &scanCallback{
- rootFound: func(gopathwalk.Root) bool {
- return true
- },
- dirFound: func(pkg *pkg) bool {
- return pkgIsCandidate(filename, References{searchPkg: nil}, pkg)
- },
- packageNameLoaded: func(pkg *pkg) bool {
- return pkg.packageName == searchPkg
- },
- exportsLoaded: func(pkg *pkg, exports []stdlib.Symbol) {
- sortSymbols(exports)
- wrapped(PackageExport{
- Fix: &ImportFix{
- StmtInfo: ImportInfo{
- ImportPath: pkg.importPathShort,
- Name: candidateImportName(pkg),
- },
- IdentName: pkg.packageName,
- FixType: AddImport,
- Relevance: pkg.relevance,
- },
- Exports: exports,
- })
- },
- }
- return getCandidatePkgs(ctx, callback, filename, filePkg, env)
-}
-
-// TODO(rfindley): we should depend on GOOS and GOARCH, to provide accurate
-// imports when doing cross-platform development.
-var requiredGoEnvVars = []string{
- "GO111MODULE",
- "GOFLAGS",
- "GOINSECURE",
- "GOMOD",
- "GOMODCACHE",
- "GONOPROXY",
- "GONOSUMDB",
- "GOPATH",
- "GOPROXY",
- "GOROOT",
- "GOSUMDB",
- "GOWORK",
-}
-
-// ProcessEnv contains environment variables and settings that affect the use of
-// the go command, the go/build package, etc.
-//
-// ...a ProcessEnv *also* overwrites its Env along with derived state in the
-// form of the resolver. And because it is lazily initialized, an env may just
-// be broken and unusable, but there is no way for the caller to detect that:
-// all queries will just fail.
-//
-// TODO(rfindley): refactor this package so that this type (perhaps renamed to
-// just Env or Config) is an immutable configuration struct, to be exchanged
-// for an initialized object via a constructor that returns an error. Perhaps
-// the signature should be `func NewResolver(*Env) (*Resolver, error)`, where
-// resolver is a concrete type used for resolving imports. Via this
-// refactoring, we can avoid the need to call ProcessEnv.init and
-// ProcessEnv.GoEnv everywhere, and implicitly fix all the places where this
-// these are misused. Also, we'd delegate the caller the decision of how to
-// handle a broken environment.
-type ProcessEnv struct {
- GocmdRunner *gocommand.Runner
-
- BuildFlags []string
- ModFlag string
-
- // SkipPathInScan returns true if the path should be skipped from scans of
- // the RootCurrentModule root type. The function argument is a clean,
- // absolute path.
- SkipPathInScan func(string) bool
-
- // Env overrides the OS environment, and can be used to specify
- // GOPROXY, GO111MODULE, etc. PATH cannot be set here, because
- // exec.Command will not honor it.
- // Specifying all of requiredGoEnvVars avoids a call to `go env`.
- Env map[string]string
-
- WorkingDir string
-
- // If Logf is non-nil, debug logging is enabled through this function.
- Logf func(format string, args ...any)
-
- // If set, ModCache holds a shared cache of directory info to use across
- // multiple ProcessEnvs.
- ModCache *DirInfoCache
-
- initialized bool // see TODO above
-
- // resolver and resolverErr are lazily evaluated (see GetResolver).
- // This is unclean, but see the big TODO in the docstring for ProcessEnv
- // above: for now, we can't be sure that the ProcessEnv is fully initialized.
- resolver Resolver
- resolverErr error
-}
-
-func (e *ProcessEnv) goEnv() (map[string]string, error) {
- if err := e.init(); err != nil {
- return nil, err
- }
- return e.Env, nil
-}
-
-func (e *ProcessEnv) matchFile(dir, name string) (bool, error) {
- bctx, err := e.buildContext()
- if err != nil {
- return false, err
- }
- return bctx.MatchFile(dir, name)
-}
-
-// CopyConfig copies the env's configuration into a new env.
-func (e *ProcessEnv) CopyConfig() *ProcessEnv {
- copy := &ProcessEnv{
- GocmdRunner: e.GocmdRunner,
- initialized: e.initialized,
- BuildFlags: e.BuildFlags,
- Logf: e.Logf,
- WorkingDir: e.WorkingDir,
- resolver: nil,
- Env: map[string]string{},
- }
- maps.Copy(copy.Env, e.Env)
- return copy
-}
-
-func (e *ProcessEnv) init() error {
- if e.initialized {
- return nil
- }
-
- foundAllRequired := true
- for _, k := range requiredGoEnvVars {
- if _, ok := e.Env[k]; !ok {
- foundAllRequired = false
- break
- }
- }
- if foundAllRequired {
- e.initialized = true
- return nil
- }
-
- if e.Env == nil {
- e.Env = map[string]string{}
- }
-
- goEnv := map[string]string{}
- stdout, err := e.invokeGo(context.TODO(), "env", append([]string{"-json"}, requiredGoEnvVars...)...)
- if err != nil {
- return err
- }
- if err := json.Unmarshal(stdout.Bytes(), &goEnv); err != nil {
- return err
- }
- maps.Copy(e.Env, goEnv)
- e.initialized = true
- return nil
-}
-
-func (e *ProcessEnv) env() []string {
- var env []string // the gocommand package will prepend os.Environ.
- for k, v := range e.Env {
- env = append(env, k+"="+v)
- }
- return env
-}
-
-func (e *ProcessEnv) GetResolver() (Resolver, error) {
- if err := e.init(); err != nil {
- return nil, err
- }
-
- if e.resolver == nil && e.resolverErr == nil {
- // TODO(rfindley): we should only use a gopathResolver here if the working
- // directory is actually *in* GOPATH. (I seem to recall an open gopls issue
- // for this behavior, but I can't find it).
- //
- // For gopls, we can optionally explicitly choose a resolver type, since we
- // already know the view type.
- if e.Env["GOMOD"] == "" && (e.Env["GOWORK"] == "" || e.Env["GOWORK"] == "off") {
- e.resolver = newGopathResolver(e)
- e.logf("created gopath resolver")
- } else if r, err := newModuleResolver(e, e.ModCache); err != nil {
- e.resolverErr = err
- e.logf("failed to create module resolver: %v", err)
- } else {
- e.resolver = Resolver(r)
- e.logf("created module resolver")
- }
- }
-
- return e.resolver, e.resolverErr
-}
-
-// logf logs if e.Logf is non-nil.
-func (e *ProcessEnv) logf(format string, args ...any) {
- if e.Logf != nil {
- e.Logf(format, args...)
- }
-}
-
-// buildContext returns the build.Context to use for matching files.
-//
-// TODO(rfindley): support dynamic GOOS, GOARCH here, when doing cross-platform
-// development.
-func (e *ProcessEnv) buildContext() (*build.Context, error) {
- ctx := build.Default
- goenv, err := e.goEnv()
- if err != nil {
- return nil, err
- }
- ctx.GOROOT = goenv["GOROOT"]
- ctx.GOPATH = goenv["GOPATH"]
-
- // As of Go 1.14, build.Context has a Dir field
- // (see golang.org/issue/34860).
- // Populate it only if present.
- rc := reflect.ValueOf(&ctx).Elem()
- dir := rc.FieldByName("Dir")
- if dir.IsValid() && dir.Kind() == reflect.String {
- dir.SetString(e.WorkingDir)
- }
-
- // Since Go 1.11, go/build.Context.Import may invoke 'go list' depending on
- // the value in GO111MODULE in the process's environment. We always want to
- // run in GOPATH mode when calling Import, so we need to prevent this from
- // happening. In Go 1.16, GO111MODULE defaults to "on", so this problem comes
- // up more frequently.
- //
- // HACK: setting any of the Context I/O hooks prevents Import from invoking
- // 'go list', regardless of GO111MODULE. This is undocumented, but it's
- // unlikely to change before GOPATH support is removed.
- ctx.ReadDir = ioutil.ReadDir
-
- return &ctx, nil
-}
-
-func (e *ProcessEnv) invokeGo(ctx context.Context, verb string, args ...string) (*bytes.Buffer, error) {
- inv := gocommand.Invocation{
- Verb: verb,
- Args: args,
- BuildFlags: e.BuildFlags,
- Env: e.env(),
- Logf: e.Logf,
- WorkingDir: e.WorkingDir,
- }
- return e.GocmdRunner.Run(ctx, inv)
-}
-
-func addStdlibCandidates(pass *pass, refs References) error {
- localbase := func(nm string) string {
- ans := path.Base(nm)
- if ans[0] == 'v' {
- // this is called, for instance, with math/rand/v2 and returns rand/v2
- if _, err := strconv.Atoi(ans[1:]); err == nil {
- ix := strings.LastIndex(nm, ans)
- more := path.Base(nm[:ix])
- ans = path.Join(more, ans)
- }
- }
- return ans
- }
- add := func(pkg string) {
- // Prevent self-imports.
- if path.Base(pkg) == pass.f.Name.Name && filepath.Join(pass.goroot, "src", pkg) == pass.srcDir {
- return
- }
- exports := symbolNameSet(stdlib.PackageSymbols[pkg])
- pass.addCandidate(
- &ImportInfo{ImportPath: pkg},
- &PackageInfo{Name: localbase(pkg), Exports: exports})
- }
- for left := range refs {
- if left == "rand" {
- // Make sure we try crypto/rand before any version of math/rand as both have Int()
- // and our policy is to recommend crypto
- add("crypto/rand")
- // if the user's no later than go1.21, this should be "math/rand"
- // but we have no way of figuring out what the user is using
- // TODO: investigate using the toolchain version to disambiguate in the stdlib
- add("math/rand/v2")
- // math/rand has an overlapping API
- // TestIssue66407 fails without this
- add("math/rand")
- continue
- }
- for importPath := range stdlib.PackageSymbols {
- if path.Base(importPath) == left {
- add(importPath)
- }
- }
- }
- return nil
-}
-
-// A Resolver does the build-system-specific parts of goimports.
-type Resolver interface {
- // loadPackageNames loads the package names in importPaths.
- loadPackageNames(importPaths []string, srcDir string) (map[string]string, error)
-
- // scan works with callback to search for packages. See scanCallback for details.
- scan(ctx context.Context, callback *scanCallback) error
-
- // loadExports returns the package name and set of exported symbols in the
- // package at dir. loadExports may be called concurrently.
- loadExports(ctx context.Context, pkg *pkg, includeTest bool) (string, []stdlib.Symbol, error)
-
- // scoreImportPath returns the relevance for an import path.
- scoreImportPath(ctx context.Context, path string) float64
-
- // ClearForNewScan returns a new Resolver based on the receiver that has
- // cleared its internal caches of directory contents.
- //
- // The new resolver should be primed and then set via
- // [ProcessEnv.UpdateResolver].
- ClearForNewScan() Resolver
-}
-
-// A scanCallback controls a call to scan and receives its results.
-// In general, minor errors will be silently discarded; a user should not
-// expect to receive a full series of calls for everything.
-type scanCallback struct {
- // rootFound is called before scanning a new root dir. If it returns true,
- // the root will be scanned. Returning false will not necessarily prevent
- // directories from that root making it to dirFound.
- rootFound func(gopathwalk.Root) bool
- // dirFound is called when a directory is found that is possibly a Go package.
- // pkg will be populated with everything except packageName.
- // If it returns true, the package's name will be loaded.
- dirFound func(pkg *pkg) bool
- // packageNameLoaded is called when a package is found and its name is loaded.
- // If it returns true, the package's exports will be loaded.
- packageNameLoaded func(pkg *pkg) bool
- // exportsLoaded is called when a package's exports have been loaded.
- exportsLoaded func(pkg *pkg, exports []stdlib.Symbol)
-}
-
-func addExternalCandidates(ctx context.Context, pass *pass, refs References, filename string) error {
- ctx, done := event.Start(ctx, "imports.addExternalCandidates")
- defer done()
-
- results, err := pass.source.ResolveReferences(ctx, filename, refs)
- if err != nil {
- return err
- }
-
- for _, result := range results {
- if result == nil {
- continue
- }
- // Don't offer completions that would shadow predeclared
- // names, such as github.com/coreos/etcd/error.
- if types.Universe.Lookup(result.Package.Name) != nil { // predeclared
- // Ideally we would skip this candidate only
- // if the predeclared name is actually
- // referenced by the file, but that's a lot
- // trickier to compute and would still create
- // an import that is likely to surprise the
- // user before long.
- continue
- }
- pass.addCandidate(result.Import, result.Package)
- }
- return nil
-}
-
-// notIdentifier reports whether ch is an invalid identifier character.
-func notIdentifier(ch rune) bool {
- return !('a' <= ch && ch <= 'z' || 'A' <= ch && ch <= 'Z' ||
- '0' <= ch && ch <= '9' ||
- ch == '_' ||
- ch >= utf8.RuneSelf && (unicode.IsLetter(ch) || unicode.IsDigit(ch)))
-}
-
-// ImportPathToAssumedName returns the assumed package name of an import path.
-// It does this using only string parsing of the import path.
-// It picks the last element of the path that does not look like a major
-// version, and then picks the valid identifier off the start of that element.
-// It is used to determine if a local rename should be added to an import for
-// clarity.
-// This function could be moved to a standard package and exported if we want
-// for use in other tools.
-func ImportPathToAssumedName(importPath string) string {
- base := path.Base(importPath)
- if strings.HasPrefix(base, "v") {
- if _, err := strconv.Atoi(base[1:]); err == nil {
- dir := path.Dir(importPath)
- if dir != "." {
- base = path.Base(dir)
- }
- }
- }
- base = strings.TrimPrefix(base, "go-")
- if i := strings.IndexFunc(base, notIdentifier); i >= 0 {
- base = base[:i]
- }
- return base
-}
-
-// gopathResolver implements resolver for GOPATH workspaces.
-type gopathResolver struct {
- env *ProcessEnv
- cache *DirInfoCache
- scanSema chan struct{} // scanSema prevents concurrent scans.
-}
-
-func newGopathResolver(env *ProcessEnv) *gopathResolver {
- r := &gopathResolver{
- env: env,
- cache: NewDirInfoCache(),
- scanSema: make(chan struct{}, 1),
- }
- r.scanSema <- struct{}{}
- return r
-}
-
-func (r *gopathResolver) ClearForNewScan() Resolver {
- return newGopathResolver(r.env)
-}
-
-func (r *gopathResolver) loadPackageNames(importPaths []string, srcDir string) (map[string]string, error) {
- names := map[string]string{}
- bctx, err := r.env.buildContext()
- if err != nil {
- return nil, err
- }
- for _, path := range importPaths {
- names[path] = importPathToName(bctx, path, srcDir)
- }
- return names, nil
-}
-
-// importPathToName finds out the actual package name, as declared in its .go files.
-func importPathToName(bctx *build.Context, importPath, srcDir string) string {
- // Fast path for standard library without going to disk.
- if stdlib.HasPackage(importPath) {
- return path.Base(importPath) // stdlib packages always match their paths.
- }
-
- buildPkg, err := bctx.Import(importPath, srcDir, build.FindOnly)
- if err != nil {
- return ""
- }
- pkgName, err := packageDirToName(buildPkg.Dir)
- if err != nil {
- return ""
- }
- return pkgName
-}
-
-// packageDirToName is a faster version of build.Import if
-// the only thing desired is the package name. Given a directory,
-// packageDirToName then only parses one file in the package,
-// trusting that the files in the directory are consistent.
-func packageDirToName(dir string) (packageName string, err error) {
- d, err := os.Open(dir)
- if err != nil {
- return "", err
- }
- names, err := d.Readdirnames(-1)
- d.Close()
- if err != nil {
- return "", err
- }
- sort.Strings(names) // to have predictable behavior
- var lastErr error
- var nfile int
- for _, name := range names {
- if !strings.HasSuffix(name, ".go") {
- continue
- }
- if strings.HasSuffix(name, "_test.go") {
- continue
- }
- nfile++
- fullFile := filepath.Join(dir, name)
-
- fset := token.NewFileSet()
- f, err := parser.ParseFile(fset, fullFile, nil, parser.PackageClauseOnly)
- if err != nil {
- lastErr = err
- continue
- }
- pkgName := f.Name.Name
- if pkgName == "documentation" {
- // Special case from go/build.ImportDir, not
- // handled by ctx.MatchFile.
- continue
- }
- if pkgName == "main" {
- // Also skip package main, assuming it's a +build ignore generator or example.
- // Since you can't import a package main anyway, there's no harm here.
- continue
- }
- return pkgName, nil
- }
- if lastErr != nil {
- return "", lastErr
- }
- return "", fmt.Errorf("no importable package found in %d Go files", nfile)
-}
-
-type pkg struct {
- dir string // absolute file path to pkg directory ("/usr/lib/go/src/net/http")
- importPathShort string // vendorless import path ("net/http", "a/b")
- packageName string // package name loaded from source if requested
- relevance float64 // a weakly-defined score of how relevant a package is. 0 is most relevant.
-}
-
-type pkgDistance struct {
- pkg *pkg
- distance int // relative distance to target
-}
-
-// byDistanceOrImportPathShortLength sorts by relative distance breaking ties
-// on the short import path length and then the import string itself.
-type byDistanceOrImportPathShortLength []pkgDistance
-
-func (s byDistanceOrImportPathShortLength) Len() int { return len(s) }
-func (s byDistanceOrImportPathShortLength) Less(i, j int) bool {
- di, dj := s[i].distance, s[j].distance
- if di == -1 {
- return false
- }
- if dj == -1 {
- return true
- }
- if di != dj {
- return di < dj
- }
-
- vi, vj := s[i].pkg.importPathShort, s[j].pkg.importPathShort
- if len(vi) != len(vj) {
- return len(vi) < len(vj)
- }
- return vi < vj
-}
-func (s byDistanceOrImportPathShortLength) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
-
-func distance(basepath, targetpath string) int {
- p, err := filepath.Rel(basepath, targetpath)
- if err != nil {
- return -1
- }
- if p == "." {
- return 0
- }
- return strings.Count(p, string(filepath.Separator)) + 1
-}
-
-func (r *gopathResolver) scan(ctx context.Context, callback *scanCallback) error {
- add := func(root gopathwalk.Root, dir string) {
- // We assume cached directories have not changed. We can skip them and their
- // children.
- if _, ok := r.cache.Load(dir); ok {
- return
- }
-
- importpath := filepath.ToSlash(dir[len(root.Path)+len("/"):])
- info := directoryPackageInfo{
- status: directoryScanned,
- dir: dir,
- rootType: root.Type,
- nonCanonicalImportPath: VendorlessPath(importpath),
- }
- r.cache.Store(dir, info)
- }
- processDir := func(info directoryPackageInfo) {
- // Skip this directory if we were not able to get the package information successfully.
- if scanned, err := info.reachedStatus(directoryScanned); !scanned || err != nil {
- return
- }
-
- p := &pkg{
- importPathShort: info.nonCanonicalImportPath,
- dir: info.dir,
- relevance: MaxRelevance - 1,
- }
- if info.rootType == gopathwalk.RootGOROOT {
- p.relevance = MaxRelevance
- }
-
- if !callback.dirFound(p) {
- return
- }
- var err error
- p.packageName, err = r.cache.CachePackageName(info)
- if err != nil {
- return
- }
-
- if !callback.packageNameLoaded(p) {
- return
- }
- if _, exports, err := r.loadExports(ctx, p, false); err == nil {
- callback.exportsLoaded(p, exports)
- }
- }
- stop := r.cache.ScanAndListen(ctx, processDir)
- defer stop()
-
- goenv, err := r.env.goEnv()
- if err != nil {
- return err
- }
- var roots []gopathwalk.Root
- roots = append(roots, gopathwalk.Root{Path: filepath.Join(goenv["GOROOT"], "src"), Type: gopathwalk.RootGOROOT})
- for _, p := range filepath.SplitList(goenv["GOPATH"]) {
- roots = append(roots, gopathwalk.Root{Path: filepath.Join(p, "src"), Type: gopathwalk.RootGOPATH})
- }
- // The callback is not necessarily safe to use in the goroutine below. Process roots eagerly.
- roots = filterRoots(roots, callback.rootFound)
- // We can't cancel walks, because we need them to finish to have a usable
- // cache. Instead, run them in a separate goroutine and detach.
- scanDone := make(chan struct{})
- go func() {
- select {
- case <-ctx.Done():
- return
- case <-r.scanSema:
- }
- defer func() { r.scanSema <- struct{}{} }()
- gopathwalk.Walk(roots, add, gopathwalk.Options{Logf: r.env.Logf, ModulesEnabled: false})
- close(scanDone)
- }()
- select {
- case <-ctx.Done():
- case <-scanDone:
- }
- return nil
-}
-
-func (r *gopathResolver) scoreImportPath(ctx context.Context, path string) float64 {
- if stdlib.HasPackage(path) {
- return MaxRelevance
- }
- return MaxRelevance - 1
-}
-
-func filterRoots(roots []gopathwalk.Root, include func(gopathwalk.Root) bool) []gopathwalk.Root {
- var result []gopathwalk.Root
- for _, root := range roots {
- if !include(root) {
- continue
- }
- result = append(result, root)
- }
- return result
-}
-
-func (r *gopathResolver) loadExports(ctx context.Context, pkg *pkg, includeTest bool) (string, []stdlib.Symbol, error) {
- if info, ok := r.cache.Load(pkg.dir); ok && !includeTest {
- return r.cache.CacheExports(ctx, r.env, info)
- }
- return loadExportsFromFiles(ctx, r.env, pkg.dir, includeTest)
-}
-
-// VendorlessPath returns the devendorized version of the import path ipath.
-// For example, VendorlessPath("foo/bar/vendor/a/b") returns "a/b".
-func VendorlessPath(ipath string) string {
- // Devendorize for use in import statement.
- if i := strings.LastIndex(ipath, "/vendor/"); i >= 0 {
- return ipath[i+len("/vendor/"):]
- }
- if strings.HasPrefix(ipath, "vendor/") {
- return ipath[len("vendor/"):]
- }
- return ipath
-}
-
-func loadExportsFromFiles(ctx context.Context, env *ProcessEnv, dir string, includeTest bool) (string, []stdlib.Symbol, error) {
- // Look for non-test, buildable .go files which could provide exports.
- all, err := os.ReadDir(dir)
- if err != nil {
- return "", nil, err
- }
- var files []fs.DirEntry
- for _, fi := range all {
- name := fi.Name()
- if !strings.HasSuffix(name, ".go") || (!includeTest && strings.HasSuffix(name, "_test.go")) {
- continue
- }
- match, err := env.matchFile(dir, fi.Name())
- if err != nil || !match {
- continue
- }
- files = append(files, fi)
- }
-
- if len(files) == 0 {
- return "", nil, fmt.Errorf("dir %v contains no buildable, non-test .go files", dir)
- }
-
- var pkgName string
- var exports []stdlib.Symbol
- fset := token.NewFileSet()
- for _, fi := range files {
- select {
- case <-ctx.Done():
- return "", nil, ctx.Err()
- default:
- }
-
- fullFile := filepath.Join(dir, fi.Name())
- // Legacy ast.Object resolution is needed here.
- f, err := parser.ParseFile(fset, fullFile, nil, 0)
- if err != nil {
- env.logf("error parsing %v: %v", fullFile, err)
- continue
- }
- if f.Name.Name == "documentation" {
- // Special case from go/build.ImportDir, not
- // handled by MatchFile above.
- continue
- }
- if includeTest && strings.HasSuffix(f.Name.Name, "_test") {
- // x_test package. We want internal test files only.
- continue
- }
- pkgName = f.Name.Name
- for name, obj := range f.Scope.Objects {
- if ast.IsExported(name) {
- var kind stdlib.Kind
- switch obj.Kind {
- case ast.Con:
- kind = stdlib.Const
- case ast.Typ:
- kind = stdlib.Type
- case ast.Var:
- kind = stdlib.Var
- case ast.Fun:
- kind = stdlib.Func
- }
- exports = append(exports, stdlib.Symbol{
- Name: name,
- Kind: kind,
- Version: 0, // unknown; be permissive
- })
- }
- }
- }
- sortSymbols(exports)
-
- env.logf("loaded exports in dir %v (package %v): %v", dir, pkgName, exports)
- return pkgName, exports, nil
-}
-
-func sortSymbols(syms []stdlib.Symbol) {
- sort.Slice(syms, func(i, j int) bool {
- return syms[i].Name < syms[j].Name
- })
-}
-
-// A symbolSearcher searches for a package with a set of symbols, among a set
-// of candidates. See [symbolSearcher.search].
-//
-// The search occurs within the scope of a single file, with context captured
-// in srcDir and xtest.
-type symbolSearcher struct {
- logf func(string, ...any)
- srcDir string // directory containing the file
- xtest bool // if set, the file containing is an x_test file
- loadExports func(ctx context.Context, pkg *pkg, includeTest bool) (string, []stdlib.Symbol, error)
-}
-
-// search searches the provided candidates for a package containing all
-// exported symbols.
-//
-// If successful, returns the resulting package.
-func (s *symbolSearcher) search(ctx context.Context, candidates []pkgDistance, pkgName string, symbols map[string]bool) (*pkg, error) {
- // Sort the candidates by their import package length,
- // assuming that shorter package names are better than long
- // ones. Note that this sorts by the de-vendored name, so
- // there's no "penalty" for vendoring.
- sort.Sort(byDistanceOrImportPathShortLength(candidates))
- if s.logf != nil {
- for i, c := range candidates {
- s.logf("%s candidate %d/%d: %v in %v", pkgName, i+1, len(candidates), c.pkg.importPathShort, c.pkg.dir)
- }
- }
-
- // Arrange rescv so that we can we can await results in order of relevance
- // and exit as soon as we find the first match.
- //
- // Search with bounded concurrency, returning as soon as the first result
- // among rescv is non-nil.
- rescv := make([]chan *pkg, len(candidates))
- for i := range candidates {
- rescv[i] = make(chan *pkg, 1)
- }
- const maxConcurrentPackageImport = 4
- loadExportsSem := make(chan struct{}, maxConcurrentPackageImport)
-
- // Ensure that all work is completed at exit.
- ctx, cancel := context.WithCancel(ctx)
- var wg sync.WaitGroup
- defer func() {
- cancel()
- wg.Wait()
- }()
-
- // Start the search.
- wg.Add(1)
- go func() {
- defer wg.Done()
- for i, c := range candidates {
- select {
- case loadExportsSem <- struct{}{}:
- case <-ctx.Done():
- return
- }
-
- i := i
- c := c
- wg.Add(1)
- go func() {
- defer func() {
- <-loadExportsSem
- wg.Done()
- }()
- if s.logf != nil {
- s.logf("loading exports in dir %s (seeking package %s)", c.pkg.dir, pkgName)
- }
- pkg, err := s.searchOne(ctx, c, symbols)
- if err != nil {
- if s.logf != nil && ctx.Err() == nil {
- s.logf("loading exports in dir %s (seeking package %s): %v", c.pkg.dir, pkgName, err)
- }
- pkg = nil
- }
- rescv[i] <- pkg // may be nil
- }()
- }
- }()
-
- // Await the first (best) result.
- for _, resc := range rescv {
- select {
- case r := <-resc:
- if r != nil {
- return r, nil
- }
- case <-ctx.Done():
- return nil, ctx.Err()
- }
- }
- return nil, nil
-}
-
-func (s *symbolSearcher) searchOne(ctx context.Context, c pkgDistance, symbols map[string]bool) (*pkg, error) {
- if ctx.Err() != nil {
- return nil, ctx.Err()
- }
- // If we're considering the package under test from an x_test, load the
- // test variant.
- includeTest := s.xtest && c.pkg.dir == s.srcDir
- _, exports, err := s.loadExports(ctx, c.pkg, includeTest)
- if err != nil {
- return nil, err
- }
-
- exportsMap := make(map[string]bool, len(exports))
- for _, sym := range exports {
- exportsMap[sym.Name] = true
- }
- for symbol := range symbols {
- if !exportsMap[symbol] {
- return nil, nil // no match
- }
- }
- return c.pkg, nil
-}
-
-// pkgIsCandidate reports whether pkg is a candidate for satisfying the
-// finding which package pkgIdent in the file named by filename is trying
-// to refer to.
-//
-// This check is purely lexical and is meant to be as fast as possible
-// because it's run over all $GOPATH directories to filter out poor
-// candidates in order to limit the CPU and I/O later parsing the
-// exports in candidate packages.
-//
-// filename is the file being formatted.
-// pkgIdent is the package being searched for, like "client" (if
-// searching for "client.New")
-func pkgIsCandidate(filename string, refs References, pkg *pkg) bool {
- // Check "internal" and "vendor" visibility:
- if !CanUse(filename, pkg.dir) {
- return false
- }
-
- // Speed optimization to minimize disk I/O:
- //
- // Use the matchesPath heuristic to filter to package paths that could
- // reasonably match a dangling reference.
- //
- // This permits mismatch naming like directory "go-foo" being package "foo",
- // or "pkg.v3" being "pkg", or directory
- // "google.golang.org/api/cloudbilling/v1" being package "cloudbilling", but
- // doesn't permit a directory "foo" to be package "bar", which is strongly
- // discouraged anyway. There's no reason goimports needs to be slow just to
- // accommodate that.
- for pkgIdent := range refs {
- if matchesPath(pkgIdent, pkg.importPathShort) {
- return true
- }
- }
- return false
-}
-
-// CanUse reports whether the package in dir is usable from filename,
-// respecting the Go "internal" and "vendor" visibility rules.
-func CanUse(filename, dir string) bool {
- // Fast path check, before any allocations. If it doesn't contain vendor
- // or internal, it's not tricky:
- // Note that this can false-negative on directories like "notinternal",
- // but we check it correctly below. This is just a fast path.
- if !strings.Contains(dir, "vendor") && !strings.Contains(dir, "internal") {
- return true
- }
-
- dirSlash := filepath.ToSlash(dir)
- if !strings.Contains(dirSlash, "/vendor/") && !strings.Contains(dirSlash, "/internal/") && !strings.HasSuffix(dirSlash, "/internal") {
- return true
- }
- // Vendor or internal directory only visible from children of parent.
- // That means the path from the current directory to the target directory
- // can contain ../vendor or ../internal but not ../foo/vendor or ../foo/internal
- // or bar/vendor or bar/internal.
- // After stripping all the leading ../, the only okay place to see vendor or internal
- // is at the very beginning of the path.
- absfile, err := filepath.Abs(filename)
- if err != nil {
- return false
- }
- absdir, err := filepath.Abs(dir)
- if err != nil {
- return false
- }
- rel, err := filepath.Rel(absfile, absdir)
- if err != nil {
- return false
- }
- relSlash := filepath.ToSlash(rel)
- if i := strings.LastIndex(relSlash, "../"); i >= 0 {
- relSlash = relSlash[i+len("../"):]
- }
- return !strings.Contains(relSlash, "/vendor/") && !strings.Contains(relSlash, "/internal/") && !strings.HasSuffix(relSlash, "/internal")
-}
-
-// matchesPath reports whether ident may match a potential package name
-// referred to by path, using heuristics to filter out unidiomatic package
-// names.
-//
-// Specifically, it checks whether either of the last two '/'- or '\'-delimited
-// path segments matches the identifier. The segment-matching heuristic must
-// allow for various conventions around segment naming, including go-foo,
-// foo-go, and foo.v3. To handle all of these, matching considers both (1) the
-// entire segment, ignoring '-' and '.', as well as (2) the last subsegment
-// separated by '-' or '.'. So the segment foo-go matches all of the following
-// identifiers: foo, go, and foogo. All matches are case insensitive (for ASCII
-// identifiers).
-//
-// See the docstring for [pkgIsCandidate] for an explanation of how this
-// heuristic filters potential candidate packages.
-func matchesPath(ident, path string) bool {
- // Ignore case, for ASCII.
- lowerIfASCII := func(b byte) byte {
- if 'A' <= b && b <= 'Z' {
- return b + ('a' - 'A')
- }
- return b
- }
-
- // match reports whether path[start:end] matches ident, ignoring [.-].
- match := func(start, end int) bool {
- ii := len(ident) - 1 // current byte in ident
- pi := end - 1 // current byte in path
- for ; pi >= start && ii >= 0; pi-- {
- pb := path[pi]
- if pb == '-' || pb == '.' {
- continue
- }
- pb = lowerIfASCII(pb)
- ib := lowerIfASCII(ident[ii])
- if pb != ib {
- return false
- }
- ii--
- }
- return ii < 0 && pi < start // all bytes matched
- }
-
- // segmentEnd and subsegmentEnd hold the end points of the current segment
- // and subsegment intervals.
- segmentEnd := len(path)
- subsegmentEnd := len(path)
-
- // Count slashes; we only care about the last two segments.
- nslash := 0
-
- for i := len(path) - 1; i >= 0; i-- {
- switch b := path[i]; b {
- // TODO(rfindley): we handle backlashes here only because the previous
- // heuristic handled backslashes. This is perhaps overly defensive, but is
- // the result of many lessons regarding Chesterton's fence and the
- // goimports codebase.
- //
- // However, this function is only ever called with something called an
- // 'importPath'. Is it possible that this is a real import path, and
- // therefore we need only consider forward slashes?
- case '/', '\\':
- if match(i+1, segmentEnd) || match(i+1, subsegmentEnd) {
- return true
- }
- nslash++
- if nslash == 2 {
- return false // did not match above
- }
- segmentEnd, subsegmentEnd = i, i // reset
- case '-', '.':
- if match(i+1, subsegmentEnd) {
- return true
- }
- subsegmentEnd = i
- }
- }
- return match(0, segmentEnd) || match(0, subsegmentEnd)
-}
-
-type visitFn func(node ast.Node) ast.Visitor
-
-func (fn visitFn) Visit(node ast.Node) ast.Visitor {
- return fn(node)
-}
-
-func symbolNameSet(symbols []stdlib.Symbol) map[string]bool {
- names := make(map[string]bool)
- for _, sym := range symbols {
- switch sym.Kind {
- case stdlib.Const, stdlib.Var, stdlib.Type, stdlib.Func:
- names[sym.Name] = true
- }
- }
- return names
-}
diff --git a/vendor/golang.org/x/tools/internal/imports/imports.go b/vendor/golang.org/x/tools/internal/imports/imports.go
deleted file mode 100644
index b5f5218b5..000000000
--- a/vendor/golang.org/x/tools/internal/imports/imports.go
+++ /dev/null
@@ -1,359 +0,0 @@
-// Copyright 2013 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// Package imports implements a Go pretty-printer (like package "go/format")
-// that also adds or removes import statements as necessary.
-package imports
-
-import (
- "bufio"
- "bytes"
- "context"
- "fmt"
- "go/ast"
- "go/format"
- "go/parser"
- "go/printer"
- "go/token"
- "io"
- "regexp"
- "strconv"
- "strings"
-
- "golang.org/x/tools/go/ast/astutil"
- "golang.org/x/tools/internal/event"
-)
-
-// Options is golang.org/x/tools/imports.Options with extra internal-only options.
-type Options struct {
- Env *ProcessEnv // The environment to use. Note: this contains the cached module and filesystem state.
-
- // LocalPrefix is a comma-separated string of import path prefixes, which, if
- // set, instructs Process to sort the import paths with the given prefixes
- // into another group after 3rd-party packages.
- LocalPrefix string
-
- Fragment bool // Accept fragment of a source file (no package statement)
- AllErrors bool // Report all errors (not just the first 10 on different lines)
-
- Comments bool // Print comments (true if nil *Options provided)
- TabIndent bool // Use tabs for indent (true if nil *Options provided)
- TabWidth int // Tab width (8 if nil *Options provided)
-
- FormatOnly bool // Disable the insertion and deletion of imports
-}
-
-// Process implements golang.org/x/tools/imports.Process with explicit context in opt.Env.
-func Process(filename string, src []byte, opt *Options) (formatted []byte, err error) {
- fileSet := token.NewFileSet()
- var parserMode parser.Mode
- if opt.Comments {
- parserMode |= parser.ParseComments
- }
- if opt.AllErrors {
- parserMode |= parser.AllErrors
- }
- file, adjust, err := parse(fileSet, filename, src, parserMode, opt.Fragment)
- if err != nil {
- return nil, err
- }
-
- if !opt.FormatOnly {
- if err := fixImports(fileSet, file, filename, opt.Env); err != nil {
- return nil, err
- }
- }
- return formatFile(fileSet, file, src, adjust, opt)
-}
-
-// FixImports returns a list of fixes to the imports that, when applied,
-// will leave the imports in the same state as Process. src and opt must
-// be specified.
-//
-// Note that filename's directory influences which imports can be chosen,
-// so it is important that filename be accurate.
-func FixImports(ctx context.Context, filename string, src []byte, goroot string, logf func(string, ...any), source Source) (fixes []*ImportFix, err error) {
- ctx, done := event.Start(ctx, "imports.FixImports")
- defer done()
-
- fileSet := token.NewFileSet()
- // TODO(rfindley): these default values for ParseComments and AllErrors were
- // extracted from gopls, but are they even needed?
- file, _, err := parse(fileSet, filename, src, parser.ParseComments|parser.AllErrors, true)
- if err != nil {
- return nil, err
- }
-
- return getFixesWithSource(ctx, fileSet, file, filename, goroot, logf, source)
-}
-
-// ApplyFixes applies all of the fixes to the file and formats it. extraMode
-// is added in when parsing the file. src and opts must be specified, but no
-// env is needed.
-func ApplyFixes(fixes []*ImportFix, filename string, src []byte, opt *Options, extraMode parser.Mode) (formatted []byte, err error) {
- // Don't use parse() -- we don't care about fragments or statement lists
- // here, and we need to work with unparsable files.
- fileSet := token.NewFileSet()
- parserMode := parser.SkipObjectResolution
- if opt.Comments {
- parserMode |= parser.ParseComments
- }
- if opt.AllErrors {
- parserMode |= parser.AllErrors
- }
- parserMode |= extraMode
-
- file, err := parser.ParseFile(fileSet, filename, src, parserMode)
- if file == nil {
- return nil, err
- }
-
- // Apply the fixes to the file.
- apply(fileSet, file, fixes)
-
- return formatFile(fileSet, file, src, nil, opt)
-}
-
-// formatFile formats the file syntax tree.
-// It may mutate the token.FileSet and the ast.File.
-//
-// If an adjust function is provided, it is called after formatting
-// with the original source (formatFile's src parameter) and the
-// formatted file, and returns the postpocessed result.
-func formatFile(fset *token.FileSet, file *ast.File, src []byte, adjust func(orig []byte, src []byte) []byte, opt *Options) ([]byte, error) {
- mergeImports(file)
- sortImports(opt.LocalPrefix, fset.File(file.FileStart), file)
- var spacesBefore []string // import paths we need spaces before
- for _, impSection := range astutil.Imports(fset, file) {
- // Within each block of contiguous imports, see if any
- // import lines are in different group numbers. If so,
- // we'll need to put a space between them so it's
- // compatible with gofmt.
- lastGroup := -1
- for _, importSpec := range impSection {
- importPath, _ := strconv.Unquote(importSpec.Path.Value)
- groupNum := importGroup(opt.LocalPrefix, importPath)
- if groupNum != lastGroup && lastGroup != -1 {
- spacesBefore = append(spacesBefore, importPath)
- }
- lastGroup = groupNum
- }
-
- }
-
- printerMode := printer.UseSpaces
- if opt.TabIndent {
- printerMode |= printer.TabIndent
- }
- printConfig := &printer.Config{Mode: printerMode, Tabwidth: opt.TabWidth}
-
- var buf bytes.Buffer
- err := printConfig.Fprint(&buf, fset, file)
- if err != nil {
- return nil, err
- }
- out := buf.Bytes()
- if adjust != nil {
- out = adjust(src, out)
- }
- if len(spacesBefore) > 0 {
- out, err = addImportSpaces(bytes.NewReader(out), spacesBefore)
- if err != nil {
- return nil, err
- }
- }
-
- out, err = format.Source(out)
- if err != nil {
- return nil, err
- }
- return out, nil
-}
-
-// parse parses src, which was read from filename,
-// as a Go source file or statement list.
-func parse(fset *token.FileSet, filename string, src []byte, parserMode parser.Mode, fragment bool) (*ast.File, func(orig, src []byte) []byte, error) {
- if parserMode&parser.SkipObjectResolution != 0 {
- panic("legacy ast.Object resolution is required")
- }
-
- // Try as whole source file.
- file, err := parser.ParseFile(fset, filename, src, parserMode)
- if err == nil {
- return file, nil, nil
- }
- // If the error is that the source file didn't begin with a
- // package line and we accept fragmented input, fall through to
- // try as a source fragment. Stop and return on any other error.
- if !fragment || !strings.Contains(err.Error(), "expected 'package'") {
- return nil, nil, err
- }
-
- // If this is a declaration list, make it a source file
- // by inserting a package clause.
- // Insert using a ;, not a newline, so that parse errors are on
- // the correct line.
- const prefix = "package main;"
- psrc := append([]byte(prefix), src...)
- file, err = parser.ParseFile(fset, filename, psrc, parserMode)
- if err == nil {
- // Gofmt will turn the ; into a \n.
- // Do that ourselves now and update the file contents,
- // so that positions and line numbers are correct going forward.
- psrc[len(prefix)-1] = '\n'
- fset.File(file.Package).SetLinesForContent(psrc)
-
- // If a main function exists, we will assume this is a main
- // package and leave the file.
- if containsMainFunc(file) {
- return file, nil, nil
- }
-
- adjust := func(orig, src []byte) []byte {
- // Remove the package clause.
- src = src[len(prefix):]
- return matchSpace(orig, src)
- }
- return file, adjust, nil
- }
- // If the error is that the source file didn't begin with a
- // declaration, fall through to try as a statement list.
- // Stop and return on any other error.
- if !strings.Contains(err.Error(), "expected declaration") {
- return nil, nil, err
- }
-
- // If this is a statement list, make it a source file
- // by inserting a package clause and turning the list
- // into a function body. This handles expressions too.
- // Insert using a ;, not a newline, so that the line numbers
- // in fsrc match the ones in src.
- fsrc := append(append([]byte("package p; func _() {"), src...), '}')
- file, err = parser.ParseFile(fset, filename, fsrc, parserMode)
- if err == nil {
- adjust := func(orig, src []byte) []byte {
- // Remove the wrapping.
- // Gofmt has turned the ; into a \n\n.
- src = src[len("package p\n\nfunc _() {"):]
- src = src[:len(src)-len("}\n")]
- // Gofmt has also indented the function body one level.
- // Remove that indent.
- src = bytes.ReplaceAll(src, []byte("\n\t"), []byte("\n"))
- return matchSpace(orig, src)
- }
- return file, adjust, nil
- }
-
- // Failed, and out of options.
- return nil, nil, err
-}
-
-// containsMainFunc checks if a file contains a function declaration with the
-// function signature 'func main()'
-func containsMainFunc(file *ast.File) bool {
- for _, decl := range file.Decls {
- if f, ok := decl.(*ast.FuncDecl); ok {
- if f.Name.Name != "main" {
- continue
- }
-
- if len(f.Type.Params.List) != 0 {
- continue
- }
-
- if f.Type.Results != nil && len(f.Type.Results.List) != 0 {
- continue
- }
-
- return true
- }
- }
-
- return false
-}
-
-func cutSpace(b []byte) (before, middle, after []byte) {
- i := 0
- for i < len(b) && (b[i] == ' ' || b[i] == '\t' || b[i] == '\n') {
- i++
- }
- j := len(b)
- for j > 0 && (b[j-1] == ' ' || b[j-1] == '\t' || b[j-1] == '\n') {
- j--
- }
- if i <= j {
- return b[:i], b[i:j], b[j:]
- }
- return nil, nil, b[j:]
-}
-
-// matchSpace reformats src to use the same space context as orig.
-// 1. If orig begins with blank lines, matchSpace inserts them at the beginning of src.
-// 2. matchSpace copies the indentation of the first non-blank line in orig
-// to every non-blank line in src.
-// 3. matchSpace copies the trailing space from orig and uses it in place
-// of src's trailing space.
-func matchSpace(orig []byte, src []byte) []byte {
- before, _, after := cutSpace(orig)
- i := bytes.LastIndex(before, []byte{'\n'})
- before, indent := before[:i+1], before[i+1:]
-
- _, src, _ = cutSpace(src)
-
- var b bytes.Buffer
- b.Write(before)
- for len(src) > 0 {
- line := src
- if i := bytes.IndexByte(line, '\n'); i >= 0 {
- line, src = line[:i+1], line[i+1:]
- } else {
- src = nil
- }
- if len(line) > 0 && line[0] != '\n' { // not blank
- b.Write(indent)
- }
- b.Write(line)
- }
- b.Write(after)
- return b.Bytes()
-}
-
-var impLine = regexp.MustCompile(`^\s+(?:[\w\.]+\s+)?"(.+?)"`)
-
-func addImportSpaces(r io.Reader, breaks []string) ([]byte, error) {
- var out bytes.Buffer
- in := bufio.NewReader(r)
- inImports := false
- done := false
- for {
- s, err := in.ReadString('\n')
- if err == io.EOF {
- break
- } else if err != nil {
- return nil, err
- }
-
- if !inImports && !done && strings.HasPrefix(s, "import") {
- inImports = true
- }
- if inImports && (strings.HasPrefix(s, "var") ||
- strings.HasPrefix(s, "func") ||
- strings.HasPrefix(s, "const") ||
- strings.HasPrefix(s, "type")) {
- done = true
- inImports = false
- }
- if inImports && len(breaks) > 0 {
- if m := impLine.FindStringSubmatch(s); m != nil {
- if m[1] == breaks[0] {
- out.WriteByte('\n')
- breaks = breaks[1:]
- }
- }
- }
-
- fmt.Fprint(&out, s)
- }
- return out.Bytes(), nil
-}
diff --git a/vendor/golang.org/x/tools/internal/imports/mod.go b/vendor/golang.org/x/tools/internal/imports/mod.go
deleted file mode 100644
index df94ec818..000000000
--- a/vendor/golang.org/x/tools/internal/imports/mod.go
+++ /dev/null
@@ -1,841 +0,0 @@
-// Copyright 2019 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package imports
-
-import (
- "bytes"
- "context"
- "encoding/json"
- "fmt"
- "os"
- "path"
- "path/filepath"
- "regexp"
- "slices"
- "sort"
- "strconv"
- "strings"
-
- "golang.org/x/mod/module"
- "golang.org/x/tools/internal/event"
- "golang.org/x/tools/internal/gocommand"
- "golang.org/x/tools/internal/gopathwalk"
- "golang.org/x/tools/internal/stdlib"
-)
-
-// Notes(rfindley): ModuleResolver appears to be heavily optimized for scanning
-// as fast as possible, which is desirable for a call to goimports from the
-// command line, but it doesn't work as well for gopls, where it suffers from
-// slow startup (golang/go#44863) and intermittent hanging (golang/go#59216),
-// both caused by populating the cache, albeit in slightly different ways.
-//
-// A high level list of TODOs:
-// - Optimize the scan itself, as there is some redundancy statting and
-// reading go.mod files.
-// - Invert the relationship between ProcessEnv and Resolver (see the
-// docstring of ProcessEnv).
-// - Make it easier to use an external resolver implementation.
-//
-// Smaller TODOs are annotated in the code below.
-
-// ModuleResolver implements the Resolver interface for a workspace using
-// modules.
-//
-// A goal of the ModuleResolver is to invoke the Go command as little as
-// possible. To this end, it runs the Go command only for listing module
-// information (i.e. `go list -m -e -json ...`). Package scanning, the process
-// of loading package information for the modules, is implemented internally
-// via the scan method.
-//
-// It has two types of state: the state derived from the go command, which
-// is populated by init, and the state derived from scans, which is populated
-// via scan. A root is considered scanned if it has been walked to discover
-// directories. However, if the scan did not require additional information
-// from the directory (such as package name or exports), the directory
-// information itself may be partially populated. It will be lazily filled in
-// as needed by scans, using the scanCallback.
-type ModuleResolver struct {
- env *ProcessEnv
-
- // Module state, populated during construction
- dummyVendorMod *gocommand.ModuleJSON // if vendoring is enabled, a pseudo-module to represent the /vendor directory
- moduleCacheDir string // GOMODCACHE, inferred from GOPATH if unset
- roots []gopathwalk.Root // roots to scan, in approximate order of importance
- mains []*gocommand.ModuleJSON // main modules
- mainByDir map[string]*gocommand.ModuleJSON // module information by dir, to join with roots
- modsByModPath []*gocommand.ModuleJSON // all modules, ordered by # of path components in their module path
- modsByDir []*gocommand.ModuleJSON // ...or by the number of path components in their Dir.
-
- // Scanning state, populated by scan
-
- // scanSema prevents concurrent scans, and guards scannedRoots and the cache
- // fields below (though the caches themselves are concurrency safe).
- // Receive to acquire, send to release.
- scanSema chan struct{}
- scannedRoots map[gopathwalk.Root]bool // if true, root has been walked
-
- // Caches of directory info, populated by scans and scan callbacks
- //
- // moduleCacheCache stores cached information about roots in the module
- // cache, which are immutable and therefore do not need to be invalidated.
- //
- // otherCache stores information about all other roots (even GOROOT), which
- // may change.
- moduleCacheCache *DirInfoCache
- otherCache *DirInfoCache
-}
-
-// newModuleResolver returns a new module-aware goimports resolver.
-//
-// Note: use caution when modifying this constructor: changes must also be
-// reflected in ModuleResolver.ClearForNewScan.
-func newModuleResolver(e *ProcessEnv, moduleCacheCache *DirInfoCache) (*ModuleResolver, error) {
- r := &ModuleResolver{
- env: e,
- scanSema: make(chan struct{}, 1),
- }
- r.scanSema <- struct{}{} // release
-
- goenv, err := r.env.goEnv()
- if err != nil {
- return nil, err
- }
-
- // TODO(rfindley): can we refactor to share logic with r.env.invokeGo?
- inv := gocommand.Invocation{
- BuildFlags: r.env.BuildFlags,
- ModFlag: r.env.ModFlag,
- Env: r.env.env(),
- Logf: r.env.Logf,
- WorkingDir: r.env.WorkingDir,
- }
-
- vendorEnabled := false
- var mainModVendor *gocommand.ModuleJSON // for module vendoring
- var mainModsVendor []*gocommand.ModuleJSON // for workspace vendoring
-
- goWork := r.env.Env["GOWORK"]
- if len(goWork) == 0 {
- // TODO(rfindley): VendorEnabled runs the go command to get GOFLAGS, but
- // they should be available from the ProcessEnv. Can we avoid the redundant
- // invocation?
- vendorEnabled, mainModVendor, err = gocommand.VendorEnabled(context.TODO(), inv, r.env.GocmdRunner)
- if err != nil {
- return nil, err
- }
- } else {
- vendorEnabled, mainModsVendor, err = gocommand.WorkspaceVendorEnabled(context.Background(), inv, r.env.GocmdRunner)
- if err != nil {
- return nil, err
- }
- }
-
- if vendorEnabled {
- if mainModVendor != nil {
- // Module vendor mode is on, so all the non-Main modules are irrelevant,
- // and we need to search /vendor for everything.
- r.mains = []*gocommand.ModuleJSON{mainModVendor}
- r.dummyVendorMod = &gocommand.ModuleJSON{
- Path: "",
- Dir: filepath.Join(mainModVendor.Dir, "vendor"),
- }
- r.modsByModPath = []*gocommand.ModuleJSON{mainModVendor, r.dummyVendorMod}
- r.modsByDir = []*gocommand.ModuleJSON{mainModVendor, r.dummyVendorMod}
- } else {
- // Workspace vendor mode is on, so all the non-Main modules are irrelevant,
- // and we need to search /vendor for everything.
- r.mains = mainModsVendor
- r.dummyVendorMod = &gocommand.ModuleJSON{
- Path: "",
- Dir: filepath.Join(filepath.Dir(goWork), "vendor"),
- }
- r.modsByModPath = append(slices.Clone(mainModsVendor), r.dummyVendorMod)
- r.modsByDir = append(slices.Clone(mainModsVendor), r.dummyVendorMod)
- }
- } else {
- // Vendor mode is off, so run go list -m ... to find everything.
- err := r.initAllMods()
- // We expect an error when running outside of a module with
- // GO111MODULE=on. Other errors are fatal.
- if err != nil {
- if errMsg := err.Error(); !strings.Contains(errMsg, "working directory is not part of a module") && !strings.Contains(errMsg, "go.mod file not found") {
- return nil, err
- }
- }
- }
-
- r.moduleCacheDir = gomodcacheForEnv(goenv)
- if r.moduleCacheDir == "" {
- return nil, fmt.Errorf("cannot resolve GOMODCACHE")
- }
-
- sort.Slice(r.modsByModPath, func(i, j int) bool {
- count := func(x int) int {
- return strings.Count(r.modsByModPath[x].Path, "/")
- }
- return count(j) < count(i) // descending order
- })
- sort.Slice(r.modsByDir, func(i, j int) bool {
- count := func(x int) int {
- return strings.Count(r.modsByDir[x].Dir, string(filepath.Separator))
- }
- return count(j) < count(i) // descending order
- })
-
- r.roots = []gopathwalk.Root{}
- if goenv["GOROOT"] != "" { // "" happens in tests
- r.roots = append(r.roots, gopathwalk.Root{Path: filepath.Join(goenv["GOROOT"], "/src"), Type: gopathwalk.RootGOROOT})
- }
- r.mainByDir = make(map[string]*gocommand.ModuleJSON)
- for _, main := range r.mains {
- r.roots = append(r.roots, gopathwalk.Root{Path: main.Dir, Type: gopathwalk.RootCurrentModule})
- r.mainByDir[main.Dir] = main
- }
- if vendorEnabled {
- r.roots = append(r.roots, gopathwalk.Root{Path: r.dummyVendorMod.Dir, Type: gopathwalk.RootOther})
- } else {
- addDep := func(mod *gocommand.ModuleJSON) {
- if mod.Replace == nil {
- // This is redundant with the cache, but we'll skip it cheaply enough
- // when we encounter it in the module cache scan.
- //
- // Including it at a lower index in r.roots than the module cache dir
- // helps prioritize matches from within existing dependencies.
- r.roots = append(r.roots, gopathwalk.Root{Path: mod.Dir, Type: gopathwalk.RootModuleCache})
- } else {
- r.roots = append(r.roots, gopathwalk.Root{Path: mod.Dir, Type: gopathwalk.RootOther})
- }
- }
- // Walk dependent modules before scanning the full mod cache, direct deps first.
- for _, mod := range r.modsByModPath {
- if !mod.Indirect && !mod.Main {
- addDep(mod)
- }
- }
- for _, mod := range r.modsByModPath {
- if mod.Indirect && !mod.Main {
- addDep(mod)
- }
- }
- // If provided, share the moduleCacheCache.
- //
- // TODO(rfindley): The module cache is immutable. However, the loaded
- // exports do depend on GOOS and GOARCH. Fortunately, the
- // ProcessEnv.buildContext does not adjust these from build.DefaultContext
- // (even though it should). So for now, this is OK to share, but we need to
- // add logic for handling GOOS/GOARCH.
- r.moduleCacheCache = moduleCacheCache
- r.roots = append(r.roots, gopathwalk.Root{Path: r.moduleCacheDir, Type: gopathwalk.RootModuleCache})
- }
-
- r.scannedRoots = map[gopathwalk.Root]bool{}
- if r.moduleCacheCache == nil {
- r.moduleCacheCache = NewDirInfoCache()
- }
- r.otherCache = NewDirInfoCache()
- return r, nil
-}
-
-// gomodcacheForEnv returns the GOMODCACHE value to use based on the given env
-// map, which must have GOMODCACHE and GOPATH populated.
-//
-// TODO(rfindley): this is defensive refactoring.
-// 1. Is this even relevant anymore? Can't we just read GOMODCACHE.
-// 2. Use this to separate module cache scanning from other scanning.
-func gomodcacheForEnv(goenv map[string]string) string {
- if gmc := goenv["GOMODCACHE"]; gmc != "" {
- // golang/go#67156: ensure that the module cache is clean, since it is
- // assumed as a prefix to directories scanned by gopathwalk, which are
- // themselves clean.
- return filepath.Clean(gmc)
- }
- gopaths := filepath.SplitList(goenv["GOPATH"])
- if len(gopaths) == 0 {
- return ""
- }
- return filepath.Join(gopaths[0], "/pkg/mod")
-}
-
-func (r *ModuleResolver) initAllMods() error {
- stdout, err := r.env.invokeGo(context.TODO(), "list", "-m", "-e", "-json", "...")
- if err != nil {
- return err
- }
- for dec := json.NewDecoder(stdout); dec.More(); {
- mod := &gocommand.ModuleJSON{}
- if err := dec.Decode(mod); err != nil {
- return err
- }
- if mod.Dir == "" {
- r.env.logf("module %v has not been downloaded and will be ignored", mod.Path)
- // Can't do anything with a module that's not downloaded.
- continue
- }
- // golang/go#36193: the go command doesn't always clean paths.
- mod.Dir = filepath.Clean(mod.Dir)
- r.modsByModPath = append(r.modsByModPath, mod)
- r.modsByDir = append(r.modsByDir, mod)
- if mod.Main {
- r.mains = append(r.mains, mod)
- }
- }
- return nil
-}
-
-// ClearForNewScan invalidates the last scan.
-//
-// It preserves the set of roots, but forgets about the set of directories.
-// Though it forgets the set of module cache directories, it remembers their
-// contents, since they are assumed to be immutable.
-func (r *ModuleResolver) ClearForNewScan() Resolver {
- <-r.scanSema // acquire r, to guard scannedRoots
- r2 := &ModuleResolver{
- env: r.env,
- dummyVendorMod: r.dummyVendorMod,
- moduleCacheDir: r.moduleCacheDir,
- roots: r.roots,
- mains: r.mains,
- mainByDir: r.mainByDir,
- modsByModPath: r.modsByModPath,
-
- scanSema: make(chan struct{}, 1),
- scannedRoots: make(map[gopathwalk.Root]bool),
- otherCache: NewDirInfoCache(),
- moduleCacheCache: r.moduleCacheCache,
- }
- r2.scanSema <- struct{}{} // r2 must start released
- // Invalidate root scans. We don't need to invalidate module cache roots,
- // because they are immutable.
- // (We don't support a use case where GOMODCACHE is cleaned in the middle of
- // e.g. a gopls session: the user must restart gopls to get accurate
- // imports.)
- //
- // Scanning for new directories in GOMODCACHE should be handled elsewhere,
- // via a call to ScanModuleCache.
- for _, root := range r.roots {
- if root.Type == gopathwalk.RootModuleCache && r.scannedRoots[root] {
- r2.scannedRoots[root] = true
- }
- }
- r.scanSema <- struct{}{} // release r
- return r2
-}
-
-// ClearModuleInfo invalidates resolver state that depends on go.mod file
-// contents (essentially, the output of go list -m -json ...).
-//
-// Notably, it does not forget directory contents, which are reset
-// asynchronously via ClearForNewScan.
-//
-// If the ProcessEnv is a GOPATH environment, ClearModuleInfo is a no op.
-//
-// TODO(rfindley): move this to a new env.go, consolidating ProcessEnv methods.
-func (e *ProcessEnv) ClearModuleInfo() {
- if r, ok := e.resolver.(*ModuleResolver); ok {
- resolver, err := newModuleResolver(e, e.ModCache)
- if err != nil {
- e.resolver = nil
- e.resolverErr = err
- return
- }
-
- <-r.scanSema // acquire (guards caches)
- resolver.moduleCacheCache = r.moduleCacheCache
- resolver.otherCache = r.otherCache
- r.scanSema <- struct{}{} // release
-
- e.UpdateResolver(resolver)
- }
-}
-
-// UpdateResolver sets the resolver for the ProcessEnv to use in imports
-// operations. Only for use with the result of [Resolver.ClearForNewScan].
-//
-// TODO(rfindley): this awkward API is a result of the (arguably) inverted
-// relationship between configuration and state described in the doc comment
-// for [ProcessEnv].
-func (e *ProcessEnv) UpdateResolver(r Resolver) {
- e.resolver = r
- e.resolverErr = nil
-}
-
-// findPackage returns the module and directory from within the main modules
-// and their dependencies that contains the package at the given import path,
-// or returns nil, "" if no module is in scope.
-func (r *ModuleResolver) findPackage(importPath string) (*gocommand.ModuleJSON, string) {
- // This can't find packages in the stdlib, but that's harmless for all
- // the existing code paths.
- for _, m := range r.modsByModPath {
- if !strings.HasPrefix(importPath, m.Path) {
- continue
- }
- pathInModule := importPath[len(m.Path):]
- pkgDir := filepath.Join(m.Dir, pathInModule)
- if r.dirIsNestedModule(pkgDir, m) {
- continue
- }
-
- if info, ok := r.cacheLoad(pkgDir); ok {
- if loaded, err := info.reachedStatus(nameLoaded); loaded {
- if err != nil {
- continue // No package in this dir.
- }
- return m, pkgDir
- }
- if scanned, err := info.reachedStatus(directoryScanned); scanned && err != nil {
- continue // Dir is unreadable, etc.
- }
- // This is slightly wrong: a directory doesn't have to have an
- // importable package to count as a package for package-to-module
- // resolution. package main or _test files should count but
- // don't.
- // TODO(heschi): fix this.
- if _, err := r.cachePackageName(info); err == nil {
- return m, pkgDir
- }
- }
-
- // Not cached. Read the filesystem.
- pkgFiles, err := os.ReadDir(pkgDir)
- if err != nil {
- continue
- }
- // A module only contains a package if it has buildable go
- // files in that directory. If not, it could be provided by an
- // outer module. See #29736.
- for _, fi := range pkgFiles {
- if ok, _ := r.env.matchFile(pkgDir, fi.Name()); ok {
- return m, pkgDir
- }
- }
- }
- return nil, ""
-}
-
-func (r *ModuleResolver) cacheLoad(dir string) (directoryPackageInfo, bool) {
- if info, ok := r.moduleCacheCache.Load(dir); ok {
- return info, ok
- }
- return r.otherCache.Load(dir)
-}
-
-func (r *ModuleResolver) cacheStore(info directoryPackageInfo) {
- if info.rootType == gopathwalk.RootModuleCache {
- r.moduleCacheCache.Store(info.dir, info)
- } else {
- r.otherCache.Store(info.dir, info)
- }
-}
-
-// cachePackageName caches the package name for a dir already in the cache.
-func (r *ModuleResolver) cachePackageName(info directoryPackageInfo) (string, error) {
- if info.rootType == gopathwalk.RootModuleCache {
- return r.moduleCacheCache.CachePackageName(info)
- }
- return r.otherCache.CachePackageName(info)
-}
-
-func (r *ModuleResolver) cacheExports(ctx context.Context, env *ProcessEnv, info directoryPackageInfo) (string, []stdlib.Symbol, error) {
- if info.rootType == gopathwalk.RootModuleCache {
- return r.moduleCacheCache.CacheExports(ctx, env, info)
- }
- return r.otherCache.CacheExports(ctx, env, info)
-}
-
-// findModuleByDir returns the module that contains dir, or nil if no such
-// module is in scope.
-func (r *ModuleResolver) findModuleByDir(dir string) *gocommand.ModuleJSON {
- // This is quite tricky and may not be correct. dir could be:
- // - a package in the main module.
- // - a replace target underneath the main module's directory.
- // - a nested module in the above.
- // - a replace target somewhere totally random.
- // - a nested module in the above.
- // - in the mod cache.
- // - in /vendor/ in -mod=vendor mode.
- // - nested module? Dunno.
- // Rumor has it that replace targets cannot contain other replace targets.
- //
- // Note that it is critical here that modsByDir is sorted to have deeper dirs
- // first. This ensures that findModuleByDir finds the innermost module.
- // See also golang/go#56291.
- for _, m := range r.modsByDir {
- if !strings.HasPrefix(dir, m.Dir) {
- continue
- }
-
- if r.dirIsNestedModule(dir, m) {
- continue
- }
-
- return m
- }
- return nil
-}
-
-// dirIsNestedModule reports if dir is contained in a nested module underneath
-// mod, not actually in mod.
-func (r *ModuleResolver) dirIsNestedModule(dir string, mod *gocommand.ModuleJSON) bool {
- if !strings.HasPrefix(dir, mod.Dir) {
- return false
- }
- if r.dirInModuleCache(dir) {
- // Nested modules in the module cache are pruned,
- // so it cannot be a nested module.
- return false
- }
- if mod != nil && mod == r.dummyVendorMod {
- // The /vendor pseudomodule is flattened and doesn't actually count.
- return false
- }
- modDir, _ := r.modInfo(dir)
- if modDir == "" {
- return false
- }
- return modDir != mod.Dir
-}
-
-func readModName(modFile string) string {
- modBytes, err := os.ReadFile(modFile)
- if err != nil {
- return ""
- }
- return modulePath(modBytes)
-}
-
-func (r *ModuleResolver) modInfo(dir string) (modDir, modName string) {
- if r.dirInModuleCache(dir) {
- if matches := modCacheRegexp.FindStringSubmatch(dir); len(matches) == 3 {
- index := strings.Index(dir, matches[1]+"@"+matches[2])
- modDir := filepath.Join(dir[:index], matches[1]+"@"+matches[2])
- return modDir, readModName(filepath.Join(modDir, "go.mod"))
- }
- }
- for {
- if info, ok := r.cacheLoad(dir); ok {
- return info.moduleDir, info.moduleName
- }
- f := filepath.Join(dir, "go.mod")
- info, err := os.Stat(f)
- if err == nil && !info.IsDir() {
- return dir, readModName(f)
- }
-
- d := filepath.Dir(dir)
- if len(d) >= len(dir) {
- return "", "" // reached top of file system, no go.mod
- }
- dir = d
- }
-}
-
-func (r *ModuleResolver) dirInModuleCache(dir string) bool {
- if r.moduleCacheDir == "" {
- return false
- }
- return strings.HasPrefix(dir, r.moduleCacheDir)
-}
-
-func (r *ModuleResolver) loadPackageNames(importPaths []string, srcDir string) (map[string]string, error) {
- names := map[string]string{}
- for _, path := range importPaths {
- // TODO(rfindley): shouldn't this use the dirInfoCache?
- _, packageDir := r.findPackage(path)
- if packageDir == "" {
- continue
- }
- name, err := packageDirToName(packageDir)
- if err != nil {
- continue
- }
- names[path] = name
- }
- return names, nil
-}
-
-func (r *ModuleResolver) scan(ctx context.Context, callback *scanCallback) error {
- ctx, done := event.Start(ctx, "imports.ModuleResolver.scan")
- defer done()
-
- processDir := func(info directoryPackageInfo) {
- // Skip this directory if we were not able to get the package information successfully.
- if scanned, err := info.reachedStatus(directoryScanned); !scanned || err != nil {
- return
- }
- pkg, err := r.canonicalize(info)
- if err != nil {
- return
- }
- if !callback.dirFound(pkg) {
- return
- }
-
- pkg.packageName, err = r.cachePackageName(info)
- if err != nil {
- return
- }
- if !callback.packageNameLoaded(pkg) {
- return
- }
-
- _, exports, err := r.loadExports(ctx, pkg, false)
- if err != nil {
- return
- }
- callback.exportsLoaded(pkg, exports)
- }
-
- // Start processing everything in the cache, and listen for the new stuff
- // we discover in the walk below.
- stop1 := r.moduleCacheCache.ScanAndListen(ctx, processDir)
- defer stop1()
- stop2 := r.otherCache.ScanAndListen(ctx, processDir)
- defer stop2()
-
- // We assume cached directories are fully cached, including all their
- // children, and have not changed. We can skip them.
- skip := func(root gopathwalk.Root, dir string) bool {
- if r.env.SkipPathInScan != nil && root.Type == gopathwalk.RootCurrentModule {
- if root.Path == dir {
- return false
- }
-
- if r.env.SkipPathInScan(filepath.Clean(dir)) {
- return true
- }
- }
-
- info, ok := r.cacheLoad(dir)
- if !ok {
- return false
- }
- // This directory can be skipped as long as we have already scanned it.
- // Packages with errors will continue to have errors, so there is no need
- // to rescan them.
- packageScanned, _ := info.reachedStatus(directoryScanned)
- return packageScanned
- }
-
- add := func(root gopathwalk.Root, dir string) {
- r.cacheStore(r.scanDirForPackage(root, dir))
- }
-
- // r.roots and the callback are not necessarily safe to use in the
- // goroutine below. Process them eagerly.
- roots := filterRoots(r.roots, callback.rootFound)
- // We can't cancel walks, because we need them to finish to have a usable
- // cache. Instead, run them in a separate goroutine and detach.
- scanDone := make(chan struct{})
- go func() {
- select {
- case <-ctx.Done():
- return
- case <-r.scanSema: // acquire
- }
- defer func() { r.scanSema <- struct{}{} }() // release
- // We have the lock on r.scannedRoots, and no other scans can run.
- for _, root := range roots {
- if ctx.Err() != nil {
- return
- }
-
- if r.scannedRoots[root] {
- continue
- }
- gopathwalk.WalkSkip([]gopathwalk.Root{root}, add, skip, gopathwalk.Options{Logf: r.env.Logf, ModulesEnabled: true})
- r.scannedRoots[root] = true
- }
- close(scanDone)
- }()
- select {
- case <-ctx.Done():
- case <-scanDone:
- }
- return nil
-}
-
-func (r *ModuleResolver) scoreImportPath(ctx context.Context, path string) float64 {
- if stdlib.HasPackage(path) {
- return MaxRelevance
- }
- mod, _ := r.findPackage(path)
- return modRelevance(mod)
-}
-
-func modRelevance(mod *gocommand.ModuleJSON) float64 {
- var relevance float64
- switch {
- case mod == nil: // out of scope
- return MaxRelevance - 4
- case mod.Indirect:
- relevance = MaxRelevance - 3
- case !mod.Main:
- relevance = MaxRelevance - 2
- default:
- relevance = MaxRelevance - 1 // main module ties with stdlib
- }
-
- _, versionString, ok := module.SplitPathVersion(mod.Path)
- if ok {
- index := strings.Index(versionString, "v")
- if index == -1 {
- return relevance
- }
- if versionNumber, err := strconv.ParseFloat(versionString[index+1:], 64); err == nil {
- relevance += versionNumber / 1000
- }
- }
-
- return relevance
-}
-
-// canonicalize gets the result of canonicalizing the packages using the results
-// of initializing the resolver from 'go list -m'.
-func (r *ModuleResolver) canonicalize(info directoryPackageInfo) (*pkg, error) {
- // Packages in GOROOT are already canonical, regardless of the std/cmd modules.
- if info.rootType == gopathwalk.RootGOROOT {
- return &pkg{
- importPathShort: info.nonCanonicalImportPath,
- dir: info.dir,
- packageName: path.Base(info.nonCanonicalImportPath),
- relevance: MaxRelevance,
- }, nil
- }
-
- importPath := info.nonCanonicalImportPath
- mod := r.findModuleByDir(info.dir)
- // Check if the directory is underneath a module that's in scope.
- if mod != nil {
- // It is. If dir is the target of a replace directive,
- // our guessed import path is wrong. Use the real one.
- if mod.Dir == info.dir {
- importPath = mod.Path
- } else {
- dirInMod := info.dir[len(mod.Dir)+len("/"):]
- importPath = path.Join(mod.Path, filepath.ToSlash(dirInMod))
- }
- } else if !strings.HasPrefix(importPath, info.moduleName) {
- // The module's name doesn't match the package's import path. It
- // probably needs a replace directive we don't have.
- return nil, fmt.Errorf("package in %q is not valid without a replace statement", info.dir)
- }
-
- res := &pkg{
- importPathShort: importPath,
- dir: info.dir,
- relevance: modRelevance(mod),
- }
- // We may have discovered a package that has a different version
- // in scope already. Canonicalize to that one if possible.
- if _, canonicalDir := r.findPackage(importPath); canonicalDir != "" {
- res.dir = canonicalDir
- }
- return res, nil
-}
-
-func (r *ModuleResolver) loadExports(ctx context.Context, pkg *pkg, includeTest bool) (string, []stdlib.Symbol, error) {
- if info, ok := r.cacheLoad(pkg.dir); ok && !includeTest {
- return r.cacheExports(ctx, r.env, info)
- }
- return loadExportsFromFiles(ctx, r.env, pkg.dir, includeTest)
-}
-
-func (r *ModuleResolver) scanDirForPackage(root gopathwalk.Root, dir string) directoryPackageInfo {
- subdir := ""
- if prefix := root.Path + string(filepath.Separator); strings.HasPrefix(dir, prefix) {
- subdir = dir[len(prefix):]
- }
- importPath := filepath.ToSlash(subdir)
- if strings.HasPrefix(importPath, "vendor/") {
- // Only enter vendor directories if they're explicitly requested as a root.
- return directoryPackageInfo{
- status: directoryScanned,
- err: fmt.Errorf("unwanted vendor directory"),
- }
- }
- switch root.Type {
- case gopathwalk.RootCurrentModule:
- importPath = path.Join(r.mainByDir[root.Path].Path, filepath.ToSlash(subdir))
- case gopathwalk.RootModuleCache:
- matches := modCacheRegexp.FindStringSubmatch(subdir)
- if len(matches) == 0 {
- return directoryPackageInfo{
- status: directoryScanned,
- err: fmt.Errorf("invalid module cache path: %v", subdir),
- }
- }
- modPath, err := module.UnescapePath(filepath.ToSlash(matches[1]))
- if err != nil {
- r.env.logf("decoding module cache path %q: %v", subdir, err)
- return directoryPackageInfo{
- status: directoryScanned,
- err: fmt.Errorf("decoding module cache path %q: %v", subdir, err),
- }
- }
- importPath = path.Join(modPath, filepath.ToSlash(matches[3]))
- }
-
- modDir, modName := r.modInfo(dir)
- result := directoryPackageInfo{
- status: directoryScanned,
- dir: dir,
- rootType: root.Type,
- nonCanonicalImportPath: importPath,
- moduleDir: modDir,
- moduleName: modName,
- }
- if root.Type == gopathwalk.RootGOROOT {
- // stdlib packages are always in scope, despite the confusing go.mod
- return result
- }
- return result
-}
-
-// modCacheRegexp splits a path in a module cache into module, module version, and package.
-var modCacheRegexp = regexp.MustCompile(`(.*)@([^/\\]*)(.*)`)
-
-var (
- slashSlash = []byte("//")
- moduleStr = []byte("module")
-)
-
-// modulePath returns the module path from the gomod file text.
-// If it cannot find a module path, it returns an empty string.
-// It is tolerant of unrelated problems in the go.mod file.
-//
-// Copied from cmd/go/internal/modfile.
-func modulePath(mod []byte) string {
- for len(mod) > 0 {
- line := mod
- mod = nil
- if i := bytes.IndexByte(line, '\n'); i >= 0 {
- line, mod = line[:i], line[i+1:]
- }
- if i := bytes.Index(line, slashSlash); i >= 0 {
- line = line[:i]
- }
- line = bytes.TrimSpace(line)
- if !bytes.HasPrefix(line, moduleStr) {
- continue
- }
- line = line[len(moduleStr):]
- n := len(line)
- line = bytes.TrimSpace(line)
- if len(line) == n || len(line) == 0 {
- continue
- }
-
- if line[0] == '"' || line[0] == '`' {
- p, err := strconv.Unquote(string(line))
- if err != nil {
- return "" // malformed quoted string or multiline module path
- }
- return p
- }
-
- return string(line)
- }
- return "" // missing module path
-}
diff --git a/vendor/golang.org/x/tools/internal/imports/mod_cache.go b/vendor/golang.org/x/tools/internal/imports/mod_cache.go
deleted file mode 100644
index b96c9d4bf..000000000
--- a/vendor/golang.org/x/tools/internal/imports/mod_cache.go
+++ /dev/null
@@ -1,331 +0,0 @@
-// Copyright 2019 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package imports
-
-import (
- "context"
- "fmt"
- "path"
- "path/filepath"
- "strings"
- "sync"
-
- "golang.org/x/mod/module"
- "golang.org/x/tools/internal/gopathwalk"
- "golang.org/x/tools/internal/stdlib"
-)
-
-// To find packages to import, the resolver needs to know about all of
-// the packages that could be imported. This includes packages that are
-// already in modules that are in (1) the current module, (2) replace targets,
-// and (3) packages in the module cache. Packages in (1) and (2) may change over
-// time, as the client may edit the current module and locally replaced modules.
-// The module cache (which includes all of the packages in (3)) can only
-// ever be added to.
-//
-// The resolver can thus save state about packages in the module cache
-// and guarantee that this will not change over time. To obtain information
-// about new modules added to the module cache, the module cache should be
-// rescanned.
-//
-// It is OK to serve information about modules that have been deleted,
-// as they do still exist.
-// TODO(suzmue): can we share information with the caller about
-// what module needs to be downloaded to import this package?
-
-type directoryPackageStatus int
-
-const (
- _ directoryPackageStatus = iota
- directoryScanned
- nameLoaded
- exportsLoaded
-)
-
-// directoryPackageInfo holds (possibly incomplete) information about packages
-// contained in a given directory.
-type directoryPackageInfo struct {
- // status indicates the extent to which this struct has been filled in.
- status directoryPackageStatus
- // err is non-nil when there was an error trying to reach status.
- err error
-
- // Set when status >= directoryScanned.
-
- // dir is the absolute directory of this package.
- dir string
- rootType gopathwalk.RootType
- // nonCanonicalImportPath is the package's expected import path. It may
- // not actually be importable at that path.
- nonCanonicalImportPath string
-
- // Module-related information.
- moduleDir string // The directory that is the module root of this dir.
- moduleName string // The module name that contains this dir.
-
- // Set when status >= nameLoaded.
-
- packageName string // the package name, as declared in the source.
-
- // Set when status >= exportsLoaded.
- // TODO(rfindley): it's hard to see this, but exports depend implicitly on
- // the default build context GOOS and GOARCH.
- //
- // We can make this explicit, and key exports by GOOS, GOARCH.
- exports []stdlib.Symbol
-}
-
-// reachedStatus returns true when info has a status at least target and any error associated with
-// an attempt to reach target.
-func (info *directoryPackageInfo) reachedStatus(target directoryPackageStatus) (bool, error) {
- if info.err == nil {
- return info.status >= target, nil
- }
- if info.status == target {
- return true, info.err
- }
- return true, nil
-}
-
-// DirInfoCache is a concurrency-safe map for storing information about
-// directories that may contain packages.
-//
-// The information in this cache is built incrementally. Entries are initialized in scan.
-// No new keys should be added in any other functions, as all directories containing
-// packages are identified in scan.
-//
-// Other functions, including loadExports and findPackage, may update entries in this cache
-// as they discover new things about the directory.
-//
-// The information in the cache is not expected to change for the cache's
-// lifetime, so there is no protection against competing writes. Users should
-// take care not to hold the cache across changes to the underlying files.
-type DirInfoCache struct {
- mu sync.Mutex
- // dirs stores information about packages in directories, keyed by absolute path.
- dirs map[string]*directoryPackageInfo
- listeners map[*int]cacheListener
-}
-
-func NewDirInfoCache() *DirInfoCache {
- return &DirInfoCache{
- dirs: make(map[string]*directoryPackageInfo),
- listeners: make(map[*int]cacheListener),
- }
-}
-
-type cacheListener func(directoryPackageInfo)
-
-// ScanAndListen calls listener on all the items in the cache, and on anything
-// newly added. The returned stop function waits for all in-flight callbacks to
-// finish and blocks new ones.
-func (d *DirInfoCache) ScanAndListen(ctx context.Context, listener cacheListener) func() {
- ctx, cancel := context.WithCancel(ctx)
-
- // Flushing out all the callbacks is tricky without knowing how many there
- // are going to be. Setting an arbitrary limit makes it much easier.
- const maxInFlight = 10
- sema := make(chan struct{}, maxInFlight)
- for range maxInFlight {
- sema <- struct{}{}
- }
-
- cookie := new(int) // A unique ID we can use for the listener.
-
- // We can't hold mu while calling the listener.
- d.mu.Lock()
- var keys []string
- for key := range d.dirs {
- keys = append(keys, key)
- }
- d.listeners[cookie] = func(info directoryPackageInfo) {
- select {
- case <-ctx.Done():
- return
- case <-sema:
- }
- listener(info)
- sema <- struct{}{}
- }
- d.mu.Unlock()
-
- stop := func() {
- cancel()
- d.mu.Lock()
- delete(d.listeners, cookie)
- d.mu.Unlock()
- for range maxInFlight {
- <-sema
- }
- }
-
- // Process the pre-existing keys.
- for _, k := range keys {
- select {
- case <-ctx.Done():
- return stop
- default:
- }
- if v, ok := d.Load(k); ok {
- listener(v)
- }
- }
-
- return stop
-}
-
-// Store stores the package info for dir.
-func (d *DirInfoCache) Store(dir string, info directoryPackageInfo) {
- d.mu.Lock()
- // TODO(rfindley, golang/go#59216): should we overwrite an existing entry?
- // That seems incorrect as the cache should be idempotent.
- _, old := d.dirs[dir]
- d.dirs[dir] = &info
- var listeners []cacheListener
- for _, l := range d.listeners {
- listeners = append(listeners, l)
- }
- d.mu.Unlock()
-
- if !old {
- for _, l := range listeners {
- l(info)
- }
- }
-}
-
-// Load returns a copy of the directoryPackageInfo for absolute directory dir.
-func (d *DirInfoCache) Load(dir string) (directoryPackageInfo, bool) {
- d.mu.Lock()
- defer d.mu.Unlock()
- info, ok := d.dirs[dir]
- if !ok {
- return directoryPackageInfo{}, false
- }
- return *info, true
-}
-
-// Keys returns the keys currently present in d.
-func (d *DirInfoCache) Keys() (keys []string) {
- d.mu.Lock()
- defer d.mu.Unlock()
- for key := range d.dirs {
- keys = append(keys, key)
- }
- return keys
-}
-
-func (d *DirInfoCache) CachePackageName(info directoryPackageInfo) (string, error) {
- if loaded, err := info.reachedStatus(nameLoaded); loaded {
- return info.packageName, err
- }
- if scanned, err := info.reachedStatus(directoryScanned); !scanned || err != nil {
- return "", fmt.Errorf("cannot read package name, scan error: %v", err)
- }
- info.packageName, info.err = packageDirToName(info.dir)
- info.status = nameLoaded
- d.Store(info.dir, info)
- return info.packageName, info.err
-}
-
-func (d *DirInfoCache) CacheExports(ctx context.Context, env *ProcessEnv, info directoryPackageInfo) (string, []stdlib.Symbol, error) {
- if reached, _ := info.reachedStatus(exportsLoaded); reached {
- return info.packageName, info.exports, info.err
- }
- if reached, err := info.reachedStatus(nameLoaded); reached && err != nil {
- return "", nil, err
- }
- info.packageName, info.exports, info.err = loadExportsFromFiles(ctx, env, info.dir, false)
- if info.err == context.Canceled || info.err == context.DeadlineExceeded {
- return info.packageName, info.exports, info.err
- }
- // The cache structure wants things to proceed linearly. We can skip a
- // step here, but only if we succeed.
- if info.status == nameLoaded || info.err == nil {
- info.status = exportsLoaded
- } else {
- info.status = nameLoaded
- }
- d.Store(info.dir, info)
- return info.packageName, info.exports, info.err
-}
-
-// ScanModuleCache walks the given directory, which must be a GOMODCACHE value,
-// for directory package information, storing the results in cache.
-func ScanModuleCache(dir string, cache *DirInfoCache, logf func(string, ...any)) {
- // Note(rfindley): it's hard to see, but this function attempts to implement
- // just the side effects on cache of calling PrimeCache with a ProcessEnv
- // that has the given dir as its GOMODCACHE.
- //
- // Teasing out the control flow, we see that we can avoid any handling of
- // vendor/ and can infer module info entirely from the path, simplifying the
- // logic here.
-
- root := gopathwalk.Root{
- Path: filepath.Clean(dir),
- Type: gopathwalk.RootModuleCache,
- }
-
- directoryInfo := func(root gopathwalk.Root, dir string) directoryPackageInfo {
- // This is a copy of ModuleResolver.scanDirForPackage, trimmed down to
- // logic that applies to a module cache directory.
-
- subdir := ""
- if dir != root.Path {
- subdir = dir[len(root.Path)+len("/"):]
- }
-
- matches := modCacheRegexp.FindStringSubmatch(subdir)
- if len(matches) == 0 {
- return directoryPackageInfo{
- status: directoryScanned,
- err: fmt.Errorf("invalid module cache path: %v", subdir),
- }
- }
- modPath, err := module.UnescapePath(filepath.ToSlash(matches[1]))
- if err != nil {
- if logf != nil {
- logf("decoding module cache path %q: %v", subdir, err)
- }
- return directoryPackageInfo{
- status: directoryScanned,
- err: fmt.Errorf("decoding module cache path %q: %v", subdir, err),
- }
- }
- importPath := path.Join(modPath, filepath.ToSlash(matches[3]))
- index := strings.Index(dir, matches[1]+"@"+matches[2])
- modDir := filepath.Join(dir[:index], matches[1]+"@"+matches[2])
- modName := readModName(filepath.Join(modDir, "go.mod"))
- return directoryPackageInfo{
- status: directoryScanned,
- dir: dir,
- rootType: root.Type,
- nonCanonicalImportPath: importPath,
- moduleDir: modDir,
- moduleName: modName,
- }
- }
-
- add := func(root gopathwalk.Root, dir string) {
- info := directoryInfo(root, dir)
- cache.Store(info.dir, info)
- }
-
- skip := func(_ gopathwalk.Root, dir string) bool {
- // Skip directories that have already been scanned.
- //
- // Note that gopathwalk only adds "package" directories, which must contain
- // a .go file, and all such package directories in the module cache are
- // immutable. So if we can load a dir, it can be skipped.
- info, ok := cache.Load(dir)
- if !ok {
- return false
- }
- packageScanned, _ := info.reachedStatus(directoryScanned)
- return packageScanned
- }
-
- gopathwalk.WalkSkip([]gopathwalk.Root{root}, add, skip, gopathwalk.Options{Logf: logf, ModulesEnabled: true})
-}
diff --git a/vendor/golang.org/x/tools/internal/imports/sortimports.go b/vendor/golang.org/x/tools/internal/imports/sortimports.go
deleted file mode 100644
index 67c17bc43..000000000
--- a/vendor/golang.org/x/tools/internal/imports/sortimports.go
+++ /dev/null
@@ -1,298 +0,0 @@
-// Copyright 2013 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// Hacked up copy of go/ast/import.go
-// Modified to use a single token.File in preference to a FileSet.
-
-package imports
-
-import (
- "go/ast"
- "go/token"
- "log"
- "slices"
- "sort"
- "strconv"
-)
-
-// sortImports sorts runs of consecutive import lines in import blocks in f.
-// It also removes duplicate imports when it is possible to do so without data loss.
-//
-// It may mutate the token.File and the ast.File.
-func sortImports(localPrefix string, tokFile *token.File, f *ast.File) {
- for i, d := range f.Decls {
- d, ok := d.(*ast.GenDecl)
- if !ok || d.Tok != token.IMPORT {
- // Not an import declaration, so we're done.
- // Imports are always first.
- break
- }
-
- if len(d.Specs) == 0 {
- // Empty import block, remove it.
- f.Decls = slices.Delete(f.Decls, i, i+1)
- }
-
- if !d.Lparen.IsValid() {
- // Not a block: sorted by default.
- continue
- }
-
- // Identify and sort runs of specs on successive lines.
- i := 0
- specs := d.Specs[:0]
- for j, s := range d.Specs {
- if j > i && tokFile.Line(s.Pos()) > 1+tokFile.Line(d.Specs[j-1].End()) {
- // j begins a new run. End this one.
- specs = append(specs, sortSpecs(localPrefix, tokFile, f, d.Specs[i:j])...)
- i = j
- }
- }
- specs = append(specs, sortSpecs(localPrefix, tokFile, f, d.Specs[i:])...)
- d.Specs = specs
-
- // Deduping can leave a blank line before the rparen; clean that up.
- // Ignore line directives.
- if len(d.Specs) > 0 {
- lastSpec := d.Specs[len(d.Specs)-1]
- lastLine := tokFile.PositionFor(lastSpec.Pos(), false).Line
- if rParenLine := tokFile.PositionFor(d.Rparen, false).Line; rParenLine > lastLine+1 {
- tokFile.MergeLine(rParenLine - 1) // has side effects!
- }
- }
- }
-}
-
-// mergeImports merges all the import declarations into the first one.
-// Taken from golang.org/x/tools/ast/astutil.
-// This does not adjust line numbers properly
-func mergeImports(f *ast.File) {
- if len(f.Decls) <= 1 {
- return
- }
-
- // Merge all the import declarations into the first one.
- var first *ast.GenDecl
- for i := 0; i < len(f.Decls); i++ {
- decl := f.Decls[i]
- gen, ok := decl.(*ast.GenDecl)
- if !ok || gen.Tok != token.IMPORT || declImports(gen, "C") {
- continue
- }
- if first == nil {
- first = gen
- continue // Don't touch the first one.
- }
- // We now know there is more than one package in this import
- // declaration. Ensure that it ends up parenthesized.
- first.Lparen = first.Pos()
- // Move the imports of the other import declaration to the first one.
- for _, spec := range gen.Specs {
- spec.(*ast.ImportSpec).Path.ValuePos = first.Pos()
- first.Specs = append(first.Specs, spec)
- }
- f.Decls = slices.Delete(f.Decls, i, i+1)
- i--
- }
-}
-
-// declImports reports whether gen contains an import of path.
-// Taken from golang.org/x/tools/ast/astutil.
-func declImports(gen *ast.GenDecl, path string) bool {
- if gen.Tok != token.IMPORT {
- return false
- }
- for _, spec := range gen.Specs {
- impspec := spec.(*ast.ImportSpec)
- if importPath(impspec) == path {
- return true
- }
- }
- return false
-}
-
-func importPath(s ast.Spec) string {
- t, err := strconv.Unquote(s.(*ast.ImportSpec).Path.Value)
- if err == nil {
- return t
- }
- return ""
-}
-
-func importName(s ast.Spec) string {
- n := s.(*ast.ImportSpec).Name
- if n == nil {
- return ""
- }
- return n.Name
-}
-
-func importComment(s ast.Spec) string {
- c := s.(*ast.ImportSpec).Comment
- if c == nil {
- return ""
- }
- return c.Text()
-}
-
-// collapse indicates whether prev may be removed, leaving only next.
-func collapse(prev, next ast.Spec) bool {
- if importPath(next) != importPath(prev) || importName(next) != importName(prev) {
- return false
- }
- return prev.(*ast.ImportSpec).Comment == nil
-}
-
-type posSpan struct {
- Start token.Pos
- End token.Pos
-}
-
-// sortSpecs sorts the import specs within each import decl.
-// It may mutate the token.File.
-func sortSpecs(localPrefix string, tokFile *token.File, f *ast.File, specs []ast.Spec) []ast.Spec {
- // Can't short-circuit here even if specs are already sorted,
- // since they might yet need deduplication.
- // A lone import, however, may be safely ignored.
- if len(specs) <= 1 {
- return specs
- }
-
- // Record positions for specs.
- pos := make([]posSpan, len(specs))
- for i, s := range specs {
- pos[i] = posSpan{s.Pos(), s.End()}
- }
-
- // Identify comments in this range.
- // Any comment from pos[0].Start to the final line counts.
- lastLine := tokFile.Line(pos[len(pos)-1].End)
- cstart := len(f.Comments)
- cend := len(f.Comments)
- for i, g := range f.Comments {
- if g.Pos() < pos[0].Start {
- continue
- }
- if i < cstart {
- cstart = i
- }
- if tokFile.Line(g.End()) > lastLine {
- cend = i
- break
- }
- }
- comments := f.Comments[cstart:cend]
-
- // Assign each comment to the import spec preceding it.
- importComment := map[*ast.ImportSpec][]*ast.CommentGroup{}
- specIndex := 0
- for _, g := range comments {
- for specIndex+1 < len(specs) && pos[specIndex+1].Start <= g.Pos() {
- specIndex++
- }
- s := specs[specIndex].(*ast.ImportSpec)
- importComment[s] = append(importComment[s], g)
- }
-
- // Sort the import specs by import path.
- // Remove duplicates, when possible without data loss.
- // Reassign the import paths to have the same position sequence.
- // Reassign each comment to abut the end of its spec.
- // Sort the comments by new position.
- sort.Sort(byImportSpec{localPrefix, specs})
-
- // Dedup. Thanks to our sorting, we can just consider
- // adjacent pairs of imports.
- deduped := specs[:0]
- for i, s := range specs {
- if i == len(specs)-1 || !collapse(s, specs[i+1]) {
- deduped = append(deduped, s)
- } else {
- p := s.Pos()
- tokFile.MergeLine(tokFile.Line(p)) // has side effects!
- }
- }
- specs = deduped
-
- // Fix up comment positions
- for i, s := range specs {
- s := s.(*ast.ImportSpec)
- if s.Name != nil {
- s.Name.NamePos = pos[i].Start
- }
- s.Path.ValuePos = pos[i].Start
- s.EndPos = pos[i].End
- nextSpecPos := pos[i].End
-
- for _, g := range importComment[s] {
- for _, c := range g.List {
- c.Slash = pos[i].End
- nextSpecPos = c.End()
- }
- }
- if i < len(specs)-1 {
- pos[i+1].Start = nextSpecPos
- pos[i+1].End = nextSpecPos
- }
- }
-
- sort.Sort(byCommentPos(comments))
-
- // Fixup comments can insert blank lines, because import specs are on different lines.
- // We remove those blank lines here by merging import spec to the first import spec line.
- firstSpecLine := tokFile.Line(specs[0].Pos())
- for _, s := range specs[1:] {
- p := s.Pos()
- line := tokFile.Line(p)
- for previousLine := line - 1; previousLine >= firstSpecLine; {
- // MergeLine can panic. Avoid the panic at the cost of not removing the blank line
- // golang/go#50329
- if previousLine > 0 && previousLine < tokFile.LineCount() {
- tokFile.MergeLine(previousLine) // has side effects!
- previousLine--
- } else {
- // try to gather some data to diagnose how this could happen
- req := "Please report what the imports section of your go file looked like."
- log.Printf("panic avoided: first:%d line:%d previous:%d max:%d. %s",
- firstSpecLine, line, previousLine, tokFile.LineCount(), req)
- }
- }
- }
- return specs
-}
-
-type byImportSpec struct {
- localPrefix string
- specs []ast.Spec // slice of *ast.ImportSpec
-}
-
-func (x byImportSpec) Len() int { return len(x.specs) }
-func (x byImportSpec) Swap(i, j int) { x.specs[i], x.specs[j] = x.specs[j], x.specs[i] }
-func (x byImportSpec) Less(i, j int) bool {
- ipath := importPath(x.specs[i])
- jpath := importPath(x.specs[j])
-
- igroup := importGroup(x.localPrefix, ipath)
- jgroup := importGroup(x.localPrefix, jpath)
- if igroup != jgroup {
- return igroup < jgroup
- }
-
- if ipath != jpath {
- return ipath < jpath
- }
- iname := importName(x.specs[i])
- jname := importName(x.specs[j])
-
- if iname != jname {
- return iname < jname
- }
- return importComment(x.specs[i]) < importComment(x.specs[j])
-}
-
-type byCommentPos []*ast.CommentGroup
-
-func (x byCommentPos) Len() int { return len(x) }
-func (x byCommentPos) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
-func (x byCommentPos) Less(i, j int) bool { return x[i].Pos() < x[j].Pos() }
diff --git a/vendor/golang.org/x/tools/internal/imports/source.go b/vendor/golang.org/x/tools/internal/imports/source.go
deleted file mode 100644
index cbe4f3c5b..000000000
--- a/vendor/golang.org/x/tools/internal/imports/source.go
+++ /dev/null
@@ -1,63 +0,0 @@
-// Copyright 2024 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package imports
-
-import "context"
-
-// These types document the APIs below.
-//
-// TODO(rfindley): consider making these defined types rather than aliases.
-type (
- ImportPath = string
- PackageName = string
- Symbol = string
-
- // References is set of References found in a Go file. The first map key is the
- // left hand side of a selector expression, the second key is the right hand
- // side, and the value should always be true.
- References = map[PackageName]map[Symbol]bool
-)
-
-// A Result satisfies a missing import.
-//
-// The Import field describes the missing import spec, and the Package field
-// summarizes the package exports.
-type Result struct {
- Import *ImportInfo
- Package *PackageInfo
-}
-
-// An ImportInfo represents a single import statement.
-type ImportInfo struct {
- ImportPath string // import path, e.g. "crypto/rand".
- Name string // import name, e.g. "crand", or "" if none.
-}
-
-// A PackageInfo represents what's known about a package.
-type PackageInfo struct {
- Name string // package name in the package declaration, if known
- Exports map[string]bool // set of names of known package level sortSymbols
-}
-
-// A Source provides imports to satisfy unresolved references in the file being
-// fixed.
-type Source interface {
- // LoadPackageNames queries PackageName information for the requested import
- // paths, when operating from the provided srcDir.
- //
- // TODO(rfindley): try to refactor to remove this operation.
- LoadPackageNames(ctx context.Context, srcDir string, paths []ImportPath) (map[ImportPath]PackageName, error)
-
- // ResolveReferences asks the Source for the best package name to satisfy
- // each of the missing references, in the context of fixing the given
- // filename.
- //
- // Returns a map from package name to a [Result] for that package name that
- // provides the required symbols. Keys may be omitted in the map if no
- // candidates satisfy all missing references for that package name. It is up
- // to each data source to select the best result for each entry in the
- // missing map.
- ResolveReferences(ctx context.Context, filename string, missing References) ([]*Result, error)
-}
diff --git a/vendor/golang.org/x/tools/internal/imports/source_env.go b/vendor/golang.org/x/tools/internal/imports/source_env.go
deleted file mode 100644
index ec996c3cc..000000000
--- a/vendor/golang.org/x/tools/internal/imports/source_env.go
+++ /dev/null
@@ -1,129 +0,0 @@
-// Copyright 2024 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package imports
-
-import (
- "context"
- "path/filepath"
- "strings"
- "sync"
-
- "golang.org/x/sync/errgroup"
- "golang.org/x/tools/internal/gopathwalk"
-)
-
-// ProcessEnvSource implements the [Source] interface using the legacy
-// [ProcessEnv] abstraction.
-type ProcessEnvSource struct {
- env *ProcessEnv
- srcDir string
- filename string
- pkgName string
-}
-
-// NewProcessEnvSource returns a [ProcessEnvSource] wrapping the given
-// env, to be used for fixing imports in the file with name filename in package
-// named pkgName.
-func NewProcessEnvSource(env *ProcessEnv, filename, pkgName string) (*ProcessEnvSource, error) {
- abs, err := filepath.Abs(filename)
- if err != nil {
- return nil, err
- }
- srcDir := filepath.Dir(abs)
- return &ProcessEnvSource{
- env: env,
- srcDir: srcDir,
- filename: filename,
- pkgName: pkgName,
- }, nil
-}
-
-func (s *ProcessEnvSource) LoadPackageNames(ctx context.Context, srcDir string, unknown []string) (map[string]string, error) {
- r, err := s.env.GetResolver()
- if err != nil {
- return nil, err
- }
- return r.loadPackageNames(unknown, srcDir)
-}
-
-func (s *ProcessEnvSource) ResolveReferences(ctx context.Context, filename string, refs map[string]map[string]bool) ([]*Result, error) {
- var mu sync.Mutex
- found := make(map[string][]pkgDistance)
- callback := &scanCallback{
- rootFound: func(gopathwalk.Root) bool {
- return true // We want everything.
- },
- dirFound: func(pkg *pkg) bool {
- return pkgIsCandidate(filename, refs, pkg)
- },
- packageNameLoaded: func(pkg *pkg) bool {
- if _, want := refs[pkg.packageName]; !want {
- return false
- }
- if pkg.dir == s.srcDir && s.pkgName == pkg.packageName {
- // The candidate is in the same directory and has the
- // same package name. Don't try to import ourselves.
- return false
- }
- if !CanUse(filename, pkg.dir) {
- return false
- }
- mu.Lock()
- defer mu.Unlock()
- found[pkg.packageName] = append(found[pkg.packageName], pkgDistance{pkg, distance(s.srcDir, pkg.dir)})
- return false // We'll do our own loading after we sort.
- },
- }
- resolver, err := s.env.GetResolver()
- if err != nil {
- return nil, err
- }
- if err := resolver.scan(ctx, callback); err != nil {
- return nil, err
- }
-
- g, ctx := errgroup.WithContext(ctx)
-
- searcher := symbolSearcher{
- logf: s.env.logf,
- srcDir: s.srcDir,
- xtest: strings.HasSuffix(s.pkgName, "_test"),
- loadExports: resolver.loadExports,
- }
-
- var resultMu sync.Mutex
- results := make(map[string]*Result, len(refs))
- for pkgName, symbols := range refs {
- g.Go(func() error {
- found, err := searcher.search(ctx, found[pkgName], pkgName, symbols)
- if err != nil {
- return err
- }
- if found == nil {
- return nil // No matching package.
- }
-
- imp := &ImportInfo{
- ImportPath: found.importPathShort,
- }
- pkg := &PackageInfo{
- Name: pkgName,
- Exports: symbols,
- }
- resultMu.Lock()
- results[pkgName] = &Result{Import: imp, Package: pkg}
- resultMu.Unlock()
- return nil
- })
- }
- if err := g.Wait(); err != nil {
- return nil, err
- }
- var ans []*Result
- for _, x := range results {
- ans = append(ans, x)
- }
- return ans, nil
-}
diff --git a/vendor/golang.org/x/tools/internal/imports/source_modindex.go b/vendor/golang.org/x/tools/internal/imports/source_modindex.go
deleted file mode 100644
index ca745d4a1..000000000
--- a/vendor/golang.org/x/tools/internal/imports/source_modindex.go
+++ /dev/null
@@ -1,100 +0,0 @@
-// Copyright 2024 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package imports
-
-import (
- "context"
- "sync"
- "time"
-
- "golang.org/x/tools/internal/modindex"
-)
-
-// This code is here rather than in the modindex package
-// to avoid import loops
-
-// TODO(adonovan): this code is only used by a test in this package.
-// Can we delete it? Or is there a plan to call NewIndexSource from
-// cmd/goimports?
-
-// implements Source using modindex, so only for module cache.
-//
-// this is perhaps over-engineered. A new Index is read at first use.
-// And then Update is called after every 15 minutes, and a new Index
-// is read if the index changed. It is not clear the Mutex is needed.
-type IndexSource struct {
- modcachedir string
- mu sync.Mutex
- index *modindex.Index // (access via getIndex)
- expires time.Time
-}
-
-// create a new Source. Called from NewView in cache/session.go.
-func NewIndexSource(cachedir string) *IndexSource {
- return &IndexSource{modcachedir: cachedir}
-}
-
-func (s *IndexSource) LoadPackageNames(ctx context.Context, srcDir string, paths []ImportPath) (map[ImportPath]PackageName, error) {
- /// This is used by goimports to resolve the package names of imports of the
- // current package, which is irrelevant for the module cache.
- return nil, nil
-}
-
-func (s *IndexSource) ResolveReferences(ctx context.Context, filename string, missing References) ([]*Result, error) {
- index, err := s.getIndex()
- if err != nil {
- return nil, err
- }
- var cs []modindex.Candidate
- for pkg, nms := range missing {
- for nm := range nms {
- x := index.Lookup(pkg, nm, false)
- cs = append(cs, x...)
- }
- }
- found := make(map[string]*Result)
- for _, c := range cs {
- var x *Result
- if x = found[c.ImportPath]; x == nil {
- x = &Result{
- Import: &ImportInfo{
- ImportPath: c.ImportPath,
- Name: "",
- },
- Package: &PackageInfo{
- Name: c.PkgName,
- Exports: make(map[string]bool),
- },
- }
- found[c.ImportPath] = x
- }
- x.Package.Exports[c.Name] = true
- }
- var ans []*Result
- for _, x := range found {
- ans = append(ans, x)
- }
- return ans, nil
-}
-
-func (s *IndexSource) getIndex() (*modindex.Index, error) {
- s.mu.Lock()
- defer s.mu.Unlock()
-
- // (s.index = nil => s.expires is zero,
- // so the first condition is strictly redundant.
- // But it makes the postcondition very clear.)
- if s.index == nil || time.Now().After(s.expires) {
- index, err := modindex.Update(s.modcachedir)
- if err != nil {
- return nil, err
- }
- s.index = index
- s.expires = index.ValidAt.Add(15 * time.Minute) // (refresh period)
- }
- // Inv: s.index != nil
-
- return s.index, nil
-}