internal/lsp: use dependencies in cache keys

This change includes dependencies in the cache keys for
CheckPackageHandles. This should fix the issue with propagating results
to reverse dependencies.

Updates golang/go#34410

Change-Id: I025b7f0d6b0dcaa89c3461ebd94eadd35de4955f
Reviewed-on: https://go-review.googlesource.com/c/tools/+/198317
Run-TryBot: Rebecca Stambler <rstambler@golang.org>
TryBot-Result: Gobot Gobot <gobot@golang.org>
Reviewed-by: Ian Cottrell <iancottrell@google.com>
This commit is contained in:
Rebecca Stambler 2019-10-01 15:21:06 -04:00 committed by Brad Fitzpatrick
parent f7bb6f12f0
commit 9901302c21
9 changed files with 217 additions and 177 deletions

View File

@ -7,9 +7,11 @@ package cache
import (
"bytes"
"context"
"fmt"
"go/ast"
"go/scanner"
"go/types"
"sort"
"sync"
"golang.org/x/tools/go/analysis"
@ -17,7 +19,6 @@ import (
"golang.org/x/tools/internal/lsp/source"
"golang.org/x/tools/internal/lsp/telemetry"
"golang.org/x/tools/internal/memoize"
"golang.org/x/tools/internal/telemetry/log"
"golang.org/x/tools/internal/telemetry/trace"
errors "golang.org/x/xerrors"
)
@ -25,7 +26,6 @@ import (
type importer struct {
snapshot *snapshot
ctx context.Context
config *packages.Config
// seen maintains the set of previously imported packages.
// If we have seen a package that is already in this map, we have a circular import.
@ -41,38 +41,31 @@ type importer struct {
parentCheckPackageHandle *checkPackageHandle
}
// checkPackageKey uniquely identifies a package and its config.
type checkPackageKey struct {
id string
files string
config string
// TODO: For now, we don't include dependencies in the key.
// This will be necessary when we change the cache invalidation logic.
}
// checkPackageHandle implements source.CheckPackageHandle.
type checkPackageHandle struct {
handle *memoize.Handle
files []source.ParseGoHandle
imports map[packagePath]*checkPackageHandle
// files are the ParseGoHandles that compose the package.
files []source.ParseGoHandle
m *metadata
config *packages.Config
// mode is the mode the the files were parsed in.
mode source.ParseMode
// imports is the map of the package's imports.
imports map[packagePath]packageID
// m is the metadata associated with the package.
m *metadata
// key is the hashed key for the package.
key []byte
}
func (cph *checkPackageHandle) Mode() source.ParseMode {
if len(cph.files) == 0 {
return -1
func (cph *checkPackageHandle) packageKey() packageKey {
return packageKey{
id: cph.m.id,
mode: cph.mode,
}
mode := cph.files[0].Mode()
for _, ph := range cph.files[1:] {
if ph.Mode() != mode {
return -1
}
}
return mode
}
// checkPackageData contains the data produced by type-checking a package.
@ -84,38 +77,82 @@ type checkPackageData struct {
}
// checkPackageHandle returns a source.CheckPackageHandle for a given package and config.
func (imp *importer) checkPackageHandle(ctx context.Context, id packageID, s *snapshot) (*checkPackageHandle, error) {
m := s.getMetadata(id)
func (imp *importer) checkPackageHandle(ctx context.Context, id packageID) (*checkPackageHandle, error) {
// Determine the mode that the files should be parsed in.
mode := imp.mode(id)
// Check if we already have this CheckPackageHandle cached.
if cph := imp.snapshot.getPackage(id, mode); cph != nil {
return cph, nil
}
// Build the CheckPackageHandle for this ID and its dependencies.
cph, err := imp.buildKey(ctx, id, mode)
if err != nil {
return nil, err
}
h := imp.snapshot.view.session.cache.store.Bind(string(cph.key), func(ctx context.Context) interface{} {
data := &checkPackageData{}
data.pkg, data.err = imp.typeCheck(ctx, cph)
return data
})
cph.handle = h
return cph, nil
}
// buildKey computes the checkPackageKey for a given checkPackageHandle.
func (imp *importer) buildKey(ctx context.Context, id packageID, mode source.ParseMode) (*checkPackageHandle, error) {
m := imp.snapshot.getMetadata(id)
if m == nil {
return nil, errors.Errorf("no metadata for %s", id)
}
phs, err := imp.parseGoHandles(ctx, m)
phs, err := imp.parseGoHandles(ctx, m, mode)
if err != nil {
log.Error(ctx, "no ParseGoHandles", err, telemetry.Package.Of(id))
return nil, err
}
cph := &checkPackageHandle{
m: m,
files: phs,
config: imp.config,
imports: make(map[packagePath]*checkPackageHandle),
imports: make(map[packagePath]packageID),
mode: mode,
}
h := imp.snapshot.view.session.cache.store.Bind(cph.key(), func(ctx context.Context) interface{} {
data := &checkPackageData{}
data.pkg, data.err = imp.typeCheck(ctx, cph, m)
return data
// Make sure all of the deps are sorted.
deps := append([]packageID{}, m.deps...)
sort.Slice(deps, func(i, j int) bool {
return deps[i] < deps[j]
})
cph.handle = h
// Create the dep importer for use on the dependency handles.
depImporter := &importer{
snapshot: imp.snapshot,
topLevelPackageID: imp.topLevelPackageID,
}
// Begin computing the key by getting the depKeys for all dependencies.
var depKeys [][]byte
for _, dep := range deps {
depHandle, err := depImporter.checkPackageHandle(ctx, dep)
if err != nil {
return nil, errors.Errorf("no dep handle for %s: %+v", dep, err)
}
cph.imports[depHandle.m.pkgPath] = depHandle.m.id
depKeys = append(depKeys, depHandle.key)
}
cph.key = checkPackageKey(cph.m.id, cph.files, m.config, depKeys)
// Cache the CheckPackageHandle in the snapshot.
for _, ph := range cph.files {
uri := ph.File().Identity().URI
s.addPackage(uri, cph)
}
imp.snapshot.addPackage(cph)
return cph, nil
}
func checkPackageKey(id packageID, phs []source.ParseGoHandle, cfg *packages.Config, deps [][]byte) []byte {
return []byte(hashContents([]byte(fmt.Sprintf("%s%s%s%s", id, hashParseKeys(phs), hashConfig(cfg), hashContents(bytes.Join(deps, nil))))))
}
// hashConfig returns the hash for the *packages.Config.
func hashConfig(config *packages.Config) string {
b := bytes.NewBuffer(nil)
@ -143,16 +180,12 @@ func (cph *checkPackageHandle) check(ctx context.Context) (*pkg, error) {
v := cph.handle.Get(ctx)
if v == nil {
return nil, ctx.Err()
return nil, errors.Errorf("no package for %s", cph.m.id)
}
data := v.(*checkPackageData)
return data.pkg, data.err
}
func (cph *checkPackageHandle) Config() *packages.Config {
return cph.config
}
func (cph *checkPackageHandle) Files() []source.ParseGoHandle {
return cph.files
}
@ -182,15 +215,7 @@ func (cph *checkPackageHandle) cached(ctx context.Context) (*pkg, error) {
return data.pkg, data.err
}
func (cph *checkPackageHandle) key() checkPackageKey {
return checkPackageKey{
id: string(cph.m.id),
files: hashParseKeys(cph.files),
config: hashConfig(cph.config),
}
}
func (imp *importer) parseGoHandles(ctx context.Context, m *metadata) ([]source.ParseGoHandle, error) {
func (imp *importer) parseGoHandles(ctx context.Context, m *metadata, mode source.ParseMode) ([]source.ParseGoHandle, error) {
phs := make([]source.ParseGoHandle, 0, len(m.files))
for _, uri := range m.files {
f, err := imp.snapshot.view.GetFile(ctx, uri)
@ -198,15 +223,18 @@ func (imp *importer) parseGoHandles(ctx context.Context, m *metadata) ([]source.
return nil, err
}
fh := imp.snapshot.Handle(ctx, f)
mode := source.ParseExported
if imp.topLevelPackageID == m.id {
mode = source.ParseFull
}
phs = append(phs, imp.snapshot.view.session.cache.ParseGoHandle(fh, mode))
}
return phs, nil
}
func (imp *importer) mode(id packageID) source.ParseMode {
if imp.topLevelPackageID == id {
return source.ParseFull
}
return source.ParseExported
}
func (imp *importer) Import(pkgPath string) (*types.Package, error) {
ctx, done := trace.StartSpan(imp.ctx, "cache.importer.Import", telemetry.PackagePath.Of(pkgPath))
defer done()
@ -215,16 +243,14 @@ func (imp *importer) Import(pkgPath string) (*types.Package, error) {
if imp.parentPkg == nil {
return nil, errors.Errorf("no parent package for import %s", pkgPath)
}
// Get the CheckPackageHandle from the importing package.
cph, ok := imp.parentCheckPackageHandle.imports[packagePath(pkgPath)]
id, ok := imp.parentCheckPackageHandle.imports[packagePath(pkgPath)]
if !ok {
return nil, errors.Errorf("no package data for import path %s", pkgPath)
}
for _, ph := range cph.Files() {
if ph.Mode() != source.ParseExported {
panic("dependency parsed in full mode")
}
cph := imp.snapshot.getPackage(id, source.ParseExported)
if cph == nil {
return nil, errors.Errorf("no package for %s", id)
}
pkg, err := cph.check(ctx)
if err != nil {
@ -234,17 +260,17 @@ func (imp *importer) Import(pkgPath string) (*types.Package, error) {
return pkg.GetTypes(), nil
}
func (imp *importer) typeCheck(ctx context.Context, cph *checkPackageHandle, m *metadata) (*pkg, error) {
ctx, done := trace.StartSpan(ctx, "cache.importer.typeCheck", telemetry.Package.Of(m.id))
func (imp *importer) typeCheck(ctx context.Context, cph *checkPackageHandle) (*pkg, error) {
ctx, done := trace.StartSpan(ctx, "cache.importer.typeCheck", telemetry.Package.Of(cph.m.id))
defer done()
pkg := &pkg{
view: imp.snapshot.view,
id: m.id,
pkgPath: m.pkgPath,
id: cph.m.id,
pkgPath: cph.m.pkgPath,
files: cph.Files(),
imports: make(map[packagePath]*pkg),
typesSizes: m.typesSizes,
typesSizes: cph.m.typesSizes,
typesInfo: &types.Info{
Types: make(map[ast.Expr]types.TypeAndValue),
Defs: make(map[*ast.Ident]types.Object),
@ -257,22 +283,8 @@ func (imp *importer) typeCheck(ctx context.Context, cph *checkPackageHandle, m *
}
// If the package comes back with errors from `go list`,
// don't bother type-checking it.
for _, err := range m.errors {
pkg.errors = append(m.errors, err)
}
// Set imports of package to correspond to cached packages.
cimp := imp.child(ctx, pkg, cph)
for _, depID := range m.deps {
dep := imp.snapshot.getMetadata(depID)
if dep == nil {
continue
}
depHandle, err := cimp.checkPackageHandle(ctx, depID, imp.snapshot)
if err != nil {
log.Error(ctx, "no check package handle", err, telemetry.Package.Of(depID))
continue
}
cph.imports[dep.pkgPath] = depHandle
for _, err := range cph.m.errors {
pkg.errors = append(cph.m.errors, err)
}
var (
files = make([]*ast.File, len(pkg.files))
@ -305,19 +317,19 @@ func (imp *importer) typeCheck(ctx context.Context, cph *checkPackageHandle, m *
files = files[:i]
// Use the default type information for the unsafe package.
if m.pkgPath == "unsafe" {
if cph.m.pkgPath == "unsafe" {
pkg.types = types.Unsafe
} else if len(files) == 0 { // not the unsafe package, no parsed files
return nil, errors.Errorf("no parsed files for package %s", pkg.pkgPath)
} else {
pkg.types = types.NewPackage(string(m.pkgPath), m.name)
pkg.types = types.NewPackage(string(cph.m.pkgPath), cph.m.name)
}
cfg := &types.Config{
Error: func(err error) {
imp.snapshot.view.session.cache.appendPkgError(pkg, err)
},
Importer: cimp,
Importer: imp.depImporter(ctx, cph, pkg),
}
check := types.NewChecker(cfg, imp.snapshot.view.session.cache.FileSet(), pkg.types, pkg.typesInfo)
@ -327,7 +339,7 @@ func (imp *importer) typeCheck(ctx context.Context, cph *checkPackageHandle, m *
return pkg, nil
}
func (imp *importer) child(ctx context.Context, pkg *pkg, cph *checkPackageHandle) *importer {
func (imp *importer) depImporter(ctx context.Context, cph *checkPackageHandle, pkg *pkg) *importer {
// Handle circular imports by copying previously seen imports.
seen := make(map[packageID]struct{})
for k, v := range imp.seen {
@ -336,12 +348,11 @@ func (imp *importer) child(ctx context.Context, pkg *pkg, cph *checkPackageHandl
seen[pkg.id] = struct{}{}
return &importer{
snapshot: imp.snapshot,
ctx: ctx,
config: imp.config,
seen: seen,
topLevelPackageID: imp.topLevelPackageID,
parentPkg: pkg,
parentCheckPackageHandle: cph,
seen: seen,
ctx: ctx,
}
}

View File

@ -21,6 +21,9 @@ func (v *view) CheckPackageHandles(ctx context.Context, f source.File) (source.S
if err != nil {
return nil, nil, err
}
if len(cphs) == 0 {
return nil, nil, errors.Errorf("no CheckPackageHandles for %s", f.URI())
}
return s, cphs, nil
}
@ -31,13 +34,12 @@ func (s *snapshot) checkPackageHandles(ctx context.Context, f source.File) ([]so
// Determine if we need to type-check the package.
m, cphs, load, check := s.shouldCheck(fh)
cfg := s.view.Config(ctx)
// We may need to re-load package metadata.
// We only need to this if it has been invalidated, and is therefore unvailable.
if load {
var err error
m, err = s.load(ctx, f.URI(), cfg)
m, err = s.load(ctx, f.URI())
if err != nil {
return nil, err
}
@ -51,12 +53,11 @@ func (s *snapshot) checkPackageHandles(ctx context.Context, f source.File) ([]so
var results []source.CheckPackageHandle
for _, m := range m {
imp := &importer{
config: cfg,
seen: make(map[packageID]struct{}),
topLevelPackageID: m.id,
snapshot: s,
topLevelPackageID: m.id,
seen: make(map[packageID]struct{}),
}
cph, err := imp.checkPackageHandle(ctx, m.id, s)
cph, err := imp.checkPackageHandle(ctx, m.id)
if err != nil {
return nil, err
}
@ -64,9 +65,6 @@ func (s *snapshot) checkPackageHandles(ctx context.Context, f source.File) ([]so
}
cphs = results
}
if len(cphs) == 0 {
return nil, errors.Errorf("no CheckPackageHandles for %s", f.URI())
}
return cphs, nil
}
@ -90,26 +88,8 @@ func (s *snapshot) shouldCheck(fh source.FileHandle) (m []*metadata, cphs []sour
}
// We expect to see a checked package for each package ID,
// and it should be parsed in full mode.
var (
expected = len(m)
cachedCPHs = s.getPackages(fh.Identity().URI)
)
if len(cachedCPHs) < expected {
return m, nil, load, true
}
for _, cph := range cachedCPHs {
// The package may have been checked in the exported mode.
if cph.Mode() != source.ParseFull {
continue
}
// Confirm that the file belongs to this package.
for _, file := range cph.Files() {
if file.File().Identity() == fh.Identity() {
cphs = append(cphs, cph)
}
}
}
if len(cphs) < expected {
cphs = s.getPackages(fh.Identity().URI, source.ParseFull)
if len(cphs) < len(m) {
return m, nil, load, true
}
return m, cphs, load, check

View File

@ -33,12 +33,16 @@ type metadata struct {
errors []packages.Error
deps []packageID
missingDeps map[packagePath]struct{}
// config is the *packages.Config associated with the loaded package.
config *packages.Config
}
func (s *snapshot) load(ctx context.Context, uri span.URI, cfg *packages.Config) ([]*metadata, error) {
func (s *snapshot) load(ctx context.Context, uri span.URI) ([]*metadata, error) {
ctx, done := trace.StartSpan(ctx, "cache.view.load", telemetry.URI.Of(uri))
defer done()
cfg := s.view.Config(ctx)
pkgs, err := packages.Load(cfg, fmt.Sprintf("file=%s", uri.Filename()))
log.Print(ctx, "go/packages.Load", tag.Of("packages", len(pkgs)))
@ -49,7 +53,7 @@ func (s *snapshot) load(ctx context.Context, uri span.URI, cfg *packages.Config)
// Return this error as a diagnostic to the user.
return nil, err
}
m, prevMissingImports, err := s.updateMetadata(ctx, uri, pkgs)
m, prevMissingImports, err := s.updateMetadata(ctx, uri, pkgs, cfg)
if err != nil {
return nil, err
}
@ -103,12 +107,14 @@ func (c *cache) shouldLoad(ctx context.Context, s *snapshot, originalFH, current
// Get the original parsed file in order to check package name and imports.
original, _, _, err := c.ParseGoHandle(originalFH, source.ParseHeader).Parse(ctx)
if err != nil {
log.Error(ctx, "no ParseGoHandle for original FileHandle", err, telemetry.URI.Of(originalFH.Identity().URI))
return false
}
// Get the current parsed file in order to check package name and imports.
current, _, _, err := c.ParseGoHandle(currentFH, source.ParseHeader).Parse(ctx)
if err != nil {
log.Error(ctx, "no ParseGoHandle for original FileHandle", err, telemetry.URI.Of(currentFH.Identity().URI))
return false
}
@ -133,7 +139,7 @@ func (c *cache) shouldLoad(ctx context.Context, s *snapshot, originalFH, current
return false
}
func (s *snapshot) updateMetadata(ctx context.Context, uri span.URI, pkgs []*packages.Package) ([]*metadata, map[packageID]map[packagePath]struct{}, error) {
func (s *snapshot) updateMetadata(ctx context.Context, uri span.URI, pkgs []*packages.Package, cfg *packages.Config) ([]*metadata, map[packageID]map[packagePath]struct{}, error) {
// Clear metadata since we are re-running go/packages.
prevMissingImports := make(map[packageID]map[packagePath]struct{})
m := s.getMetadataForURI(uri)
@ -149,7 +155,7 @@ func (s *snapshot) updateMetadata(ctx context.Context, uri span.URI, pkgs []*pac
log.Print(ctx, "go/packages.Load", tag.Of("package", pkg.PkgPath), tag.Of("files", pkg.CompiledGoFiles))
// Set the metadata for this package.
if err := s.updateImports(ctx, packagePath(pkg.PkgPath), pkg); err != nil {
if err := s.updateImports(ctx, packagePath(pkg.PkgPath), pkg, cfg); err != nil {
return nil, nil, err
}
m := s.getMetadata(packageID(pkg.ID))
@ -167,7 +173,7 @@ func (s *snapshot) updateMetadata(ctx context.Context, uri span.URI, pkgs []*pac
return results, prevMissingImports, nil
}
func (s *snapshot) updateImports(ctx context.Context, pkgPath packagePath, pkg *packages.Package) error {
func (s *snapshot) updateImports(ctx context.Context, pkgPath packagePath, pkg *packages.Package, cfg *packages.Config) error {
// Recreate the metadata rather than reusing it to avoid locking.
m := &metadata{
id: packageID(pkg.ID),
@ -175,6 +181,7 @@ func (s *snapshot) updateImports(ctx context.Context, pkgPath packagePath, pkg *
name: pkg.Name,
typesSizes: pkg.TypesSizes,
errors: pkg.Errors,
config: cfg,
}
for _, filename := range pkg.CompiledGoFiles {
uri := span.FileURI(filename)
@ -205,7 +212,7 @@ func (s *snapshot) updateImports(ctx context.Context, pkgPath packagePath, pkg *
}
dep := s.getMetadata(importID)
if dep == nil {
if err := s.updateImports(ctx, importPkgPath, importPkg); err != nil {
if err := s.updateImports(ctx, importPkgPath, importPkg, cfg); err != nil {
log.Error(ctx, "error in dependency", err)
}
}

View File

@ -75,7 +75,7 @@ func (h *parseGoHandle) Mode() source.ParseMode {
func (h *parseGoHandle) Parse(ctx context.Context) (*ast.File, *protocol.ColumnMapper, error, error) {
v := h.handle.Get(ctx)
if v == nil {
return nil, nil, nil, ctx.Err()
return nil, nil, nil, errors.Errorf("no parsed file for %s", h.File().Identity().URI)
}
data := v.(*parseGoData)
return data.ast, data.mapper, data.parseError, data.err

View File

@ -208,11 +208,11 @@ func (p *pkg) SetDiagnostics(a *analysis.Analyzer, diags []source.Diagnostic) {
p.diagnostics[a] = diags
}
func (p *pkg) FindDiagnostic(pdiag protocol.Diagnostic) (*source.Diagnostic, error) {
p.diagMu.Lock()
defer p.diagMu.Unlock()
func (pkg *pkg) FindDiagnostic(pdiag protocol.Diagnostic) (*source.Diagnostic, error) {
pkg.diagMu.Lock()
defer pkg.diagMu.Unlock()
for a, diagnostics := range p.diagnostics {
for a, diagnostics := range pkg.diagnostics {
if a.Name != pdiag.Source {
continue
}

View File

@ -99,15 +99,17 @@ func (s *session) NewView(ctx context.Context, name string, folder span.URI, opt
filesByURI: make(map[span.URI]viewFile),
filesByBase: make(map[string][]viewFile),
snapshot: &snapshot{
packages: make(map[span.URI]map[packageKey]*checkPackageHandle),
ids: make(map[span.URI][]packageID),
metadata: make(map[packageID]*metadata),
files: make(map[span.URI]source.FileHandle),
packages: make(map[packageKey]*checkPackageHandle),
ids: make(map[span.URI][]packageID),
metadata: make(map[packageID]*metadata),
files: make(map[span.URI]source.FileHandle),
importedBy: make(map[packageID][]packageID),
},
ignoredURIs: make(map[span.URI]struct{}),
builtin: &builtinPkg{},
}
v.snapshot.view = v
v.analyzers = UpdateAnalyzers(v, defaultAnalyzers)
// Preemptively build the builtin package,
// so we immediately add builtin.go to the list of ignored files.
@ -236,7 +238,7 @@ func (s *session) DidOpen(ctx context.Context, uri span.URI, kind source.FileKin
// A file may be in multiple views.
for _, view := range s.views {
if strings.HasPrefix(string(uri), string(view.Folder())) {
view.invalidateMetadata(uri)
view.invalidateMetadata(ctx, uri)
}
}
}
@ -350,7 +352,7 @@ func (s *session) DidChangeOutOfBand(ctx context.Context, uri span.URI, changeTy
// force a go/packages invocation to refresh the package's file list.
views := s.viewsOf(uri)
for _, v := range views {
v.invalidateMetadata(uri)
v.invalidateMetadata(ctx, uri)
}
}
s.filesWatchMap.Notify(uri)

View File

@ -31,7 +31,7 @@ type snapshot struct {
// packages maps a file URI to a set of CheckPackageHandles to which that file belongs.
// It may be invalidated when a file's content changes.
packages map[span.URI]map[packageKey]*checkPackageHandle
packages map[packageKey]*checkPackageHandle
}
func (s *snapshot) getImportedBy(id packageID) []packageID {
@ -46,33 +46,49 @@ func (s *snapshot) getImportedBy(id packageID) []packageID {
return s.importedBy[id]
}
func (s *snapshot) addPackage(uri span.URI, cph *checkPackageHandle) {
func (s *snapshot) addPackage(cph *checkPackageHandle) {
s.mu.Lock()
defer s.mu.Unlock()
pkgs, ok := s.packages[uri]
if !ok {
pkgs = make(map[packageKey]*checkPackageHandle)
s.packages[uri] = pkgs
// TODO: We should make sure not to compute duplicate CheckPackageHandles,
// and instead panic here. This will be hard to do because we may encounter
// the same package multiple times in the dependency tree.
if _, ok := s.packages[cph.packageKey()]; ok {
return
}
// TODO: Check that there isn't already something set here.
// This can't be done until we fix the cache keys for CheckPackageHandles.
pkgs[packageKey{
id: cph.m.id,
mode: cph.Mode(),
}] = cph
s.packages[cph.packageKey()] = cph
}
func (s *snapshot) getPackages(uri span.URI) (cphs []source.CheckPackageHandle) {
func (s *snapshot) getPackages(uri span.URI, m source.ParseMode) (cphs []source.CheckPackageHandle) {
s.mu.Lock()
defer s.mu.Unlock()
for _, cph := range s.packages[uri] {
cphs = append(cphs, cph)
if ids, ok := s.ids[uri]; ok {
for _, id := range ids {
key := packageKey{
id: id,
mode: m,
}
cph, ok := s.packages[key]
if ok {
cphs = append(cphs, cph)
}
}
}
return cphs
}
func (s *snapshot) getPackage(id packageID, m source.ParseMode) *checkPackageHandle {
s.mu.Lock()
defer s.mu.Unlock()
key := packageKey{
id: id,
mode: m,
}
return s.packages[key]
}
func (s *snapshot) getMetadataForURI(uri span.URI) (metadata []*metadata) {
s.mu.Lock()
defer s.mu.Unlock()
@ -89,6 +105,12 @@ func (s *snapshot) setMetadata(m *metadata) {
s.mu.Lock()
defer s.mu.Unlock()
// TODO: We should make sure not to set duplicate metadata,
// and instead panic here. This can be done by making sure not to
// reset metadata information for packages we've already seen.
if _, ok := s.metadata[m.id]; ok {
return
}
s.metadata[m.id] = m
}
@ -103,6 +125,14 @@ func (s *snapshot) addID(uri span.URI, id packageID) {
s.mu.Lock()
defer s.mu.Unlock()
for _, existingID := range s.ids[uri] {
if existingID == id {
// TODO: We should make sure not to set duplicate IDs,
// and instead panic here. This can be done by making sure not to
// reset metadata information for packages we've already seen.
return
}
}
s.ids[uri] = append(s.ids[uri], id)
}
@ -130,52 +160,67 @@ func (s *snapshot) Handle(ctx context.Context, f source.File) source.FileHandle
return s.files[f.URI()]
}
func (s *snapshot) clone(withoutURI span.URI, withoutTypes, withoutMetadata map[span.URI]struct{}) *snapshot {
func (s *snapshot) clone(ctx context.Context, withoutURI *span.URI, withoutTypes, withoutMetadata map[span.URI]struct{}) *snapshot {
s.mu.Lock()
defer s.mu.Unlock()
result := &snapshot{
id: s.id + 1,
view: s.view,
packages: make(map[span.URI]map[packageKey]*checkPackageHandle),
ids: make(map[span.URI][]packageID),
metadata: make(map[packageID]*metadata),
importedBy: make(map[packageID][]packageID),
metadata: make(map[packageID]*metadata),
packages: make(map[packageKey]*checkPackageHandle),
files: make(map[span.URI]source.FileHandle),
}
// Copy all of the FileHandles except for the one that was invalidated.
for k, v := range s.files {
if k == withoutURI {
if withoutURI != nil && k == *withoutURI {
continue
}
result.files[k] = v
}
for k, v := range s.packages {
// Collect the IDs for the packages associated with the excluded URIs.
withoutMetadataIDs := make(map[packageID]struct{})
withoutTypesIDs := make(map[packageID]struct{})
for k, ids := range s.ids {
// Map URIs to IDs for exclusion.
if withoutTypes != nil {
if _, ok := withoutTypes[k]; ok {
continue
for _, id := range ids {
withoutTypesIDs[id] = struct{}{}
}
}
}
result.packages[k] = v
}
withoutIDs := make(map[packageID]struct{})
for k, ids := range s.ids {
if withoutMetadata != nil {
if _, ok := withoutMetadata[k]; ok {
for _, id := range ids {
withoutIDs[id] = struct{}{}
withoutMetadataIDs[id] = struct{}{}
}
continue
}
}
result.ids[k] = ids
}
// Copy the package type information.
for k, v := range s.packages {
if _, ok := withoutTypesIDs[k.id]; ok {
continue
}
if _, ok := withoutMetadataIDs[k.id]; ok {
continue
}
result.packages[k] = v
}
// Copy the package metadata.
for k, v := range s.metadata {
if _, ok := withoutIDs[k]; ok {
if _, ok := withoutMetadataIDs[k]; ok {
continue
}
result.metadata[k] = v
}
// Don't bother copying the importedBy graph,
// as it changes each time we update metadata.
return result
}
@ -210,8 +255,7 @@ func (v *view) invalidateContent(ctx context.Context, uri span.URI, kind source.
// TODO: If a package's name has changed,
// we should invalidate the metadata for the new package name (if it exists).
}
v.snapshot = v.snapshot.clone(uri, withoutTypes, withoutMetadata)
v.snapshot = v.snapshot.clone(ctx, &uri, withoutTypes, withoutMetadata)
}
// invalidateMetadata invalidates package metadata for all files in f's
@ -220,15 +264,16 @@ func (v *view) invalidateContent(ctx context.Context, uri span.URI, kind source.
//
// TODO: This function shouldn't be necessary.
// We should be able to handle its use cases more efficiently.
func (v *view) invalidateMetadata(uri span.URI) {
func (v *view) invalidateMetadata(ctx context.Context, uri span.URI) {
v.snapshotMu.Lock()
defer v.snapshotMu.Unlock()
withoutMetadata := make(map[span.URI]struct{})
for _, id := range v.snapshot.getIDs(uri) {
v.snapshot.reverseDependencies(id, withoutMetadata, map[packageID]struct{}{})
}
v.snapshot = v.snapshot.clone(uri, nil, withoutMetadata)
v.snapshot = v.snapshot.clone(ctx, nil, withoutMetadata, withoutMetadata)
}
// reverseDependencies populates the uris map with file URIs belonging to the

View File

@ -22,6 +22,7 @@ import (
"golang.org/x/tools/internal/lsp/source"
"golang.org/x/tools/internal/span"
"golang.org/x/tools/internal/telemetry/log"
"golang.org/x/tools/internal/xcontext"
errors "golang.org/x/xerrors"
)
@ -353,6 +354,7 @@ func (v *view) getFile(ctx context.Context, uri span.URI, kind source.FileKind)
kind: source.Go,
}
v.session.filesWatchMap.Watch(uri, func() {
ctx := xcontext.Detach(ctx)
v.invalidateContent(ctx, uri, kind)
})
v.mapFile(uri, f)

View File

@ -105,13 +105,6 @@ type CheckPackageHandle interface {
// ParseGoHandle returns a ParseGoHandle for which to get the package.
Files() []ParseGoHandle
// Config is the *packages.Config that the package metadata was loaded with.
Config() *packages.Config
// Mode returns the ParseMode for all of the files in the CheckPackageHandle.
// The files should always have the same parse mode.
Mode() ParseMode
// Check returns the type-checked Package for the CheckPackageHandle.
Check(ctx context.Context) (Package, error)