diff --git a/CHANGELOG.md b/CHANGELOG.md
index 0d7ca6a0..0fc0bff5 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -11,6 +11,14 @@ All notable changes to `lsif-go` are documented in this file.
## Unreleased changes
+## Fixed
+
+- Many issues relating to package declarations, imports and structs have been fixed.
+ - See [Package Declarations](./docs/package_declarations.md)
+ - See [Imports](./docs/imports.md)
+ - See [Structs](./docs/structs.md)
+- Additionally, package declarations are now indexed.
+
## v1.6.7
### Fixed
diff --git a/docs/examples/imports/.gitignore b/docs/examples/imports/.gitignore
new file mode 100644
index 00000000..ce7ce26d
--- /dev/null
+++ b/docs/examples/imports/.gitignore
@@ -0,0 +1 @@
+dump.lsif
diff --git a/docs/examples/imports/dump.svg b/docs/examples/imports/dump.svg
new file mode 100644
index 00000000..f7e59982
--- /dev/null
+++ b/docs/examples/imports/dump.svg
@@ -0,0 +1,839 @@
+
+
+
+
+
diff --git a/docs/examples/imports/gen.sh b/docs/examples/imports/gen.sh
new file mode 100755
index 00000000..f4f0c2ea
--- /dev/null
+++ b/docs/examples/imports/gen.sh
@@ -0,0 +1,11 @@
+#!/bin/bash
+
+set -e
+set -x
+
+lsif-go-imports
+
+lsif-visualize dump.lsif \
+ --exclude=sourcegraph:documentationResult \
+ --exclude=hoverResult \
+ | dot -Tsvg > dump.svg
diff --git a/docs/examples/imports/go.mod b/docs/examples/imports/go.mod
new file mode 100644
index 00000000..54e09bdb
--- /dev/null
+++ b/docs/examples/imports/go.mod
@@ -0,0 +1,3 @@
+module imports
+
+go 1.16
diff --git a/docs/examples/imports/main.go b/docs/examples/imports/main.go
new file mode 100644
index 00000000..d1149334
--- /dev/null
+++ b/docs/examples/imports/main.go
@@ -0,0 +1,17 @@
+package main
+
+import (
+ "fmt"
+ . "net/http"
+ s "sort"
+)
+
+func Main() {
+ sortedStrings := []string{"hello", "world", "!"}
+
+ // s -> sort
+ s.Strings(sortedStrings)
+
+ // http.CanonicalHeaderKey -> CanonicalHeaderKey
+ fmt.Println(CanonicalHeaderKey(sortedStrings[0]))
+}
diff --git a/docs/examples/smollest/.gitignore b/docs/examples/smollest/.gitignore
new file mode 100644
index 00000000..ce7ce26d
--- /dev/null
+++ b/docs/examples/smollest/.gitignore
@@ -0,0 +1 @@
+dump.lsif
diff --git a/docs/examples/smollest/dump.svg b/docs/examples/smollest/dump.svg
new file mode 100644
index 00000000..73604ebe
--- /dev/null
+++ b/docs/examples/smollest/dump.svg
@@ -0,0 +1,137 @@
+
+
+
+
+
diff --git a/docs/examples/smollest/gen.sh b/docs/examples/smollest/gen.sh
new file mode 100755
index 00000000..66abf563
--- /dev/null
+++ b/docs/examples/smollest/gen.sh
@@ -0,0 +1,11 @@
+#!/bin/bash
+
+set -e
+set -x
+
+lsif-go
+
+lsif-visualize dump.lsif \
+ --exclude=sourcegraph:documentationResult \
+ --exclude=hoverResult \
+ | dot -Tsvg > dump.svg
diff --git a/docs/examples/smollest/go.mod b/docs/examples/smollest/go.mod
new file mode 100644
index 00000000..61095f63
--- /dev/null
+++ b/docs/examples/smollest/go.mod
@@ -0,0 +1,3 @@
+module smollest
+
+go 1.16
diff --git a/docs/examples/smollest/lib.go b/docs/examples/smollest/lib.go
new file mode 100644
index 00000000..f441647e
--- /dev/null
+++ b/docs/examples/smollest/lib.go
@@ -0,0 +1,2 @@
+// Hello world, this is a docstring. So we pick this file.
+package smollest
diff --git a/docs/examples/smollest/sub.go b/docs/examples/smollest/sub.go
new file mode 100644
index 00000000..8eb15c27
--- /dev/null
+++ b/docs/examples/smollest/sub.go
@@ -0,0 +1 @@
+package smollest
diff --git a/docs/imports.md b/docs/imports.md
new file mode 100644
index 00000000..f91aca0f
--- /dev/null
+++ b/docs/imports.md
@@ -0,0 +1,48 @@
+# Imports
+
+There are two types of imports available in Go. In both cases, we generate the same reference
+to the package itself. This is done by creating an importMoniker. This import moniker
+
+```go
+import "fmt"
+// ^^^------ reference github.com/golang/go/std/fmt
+
+import f "fmt"
+// ^--------- local definition
+// ^^^---- reference github.com/golang/go/std/fmt
+
+
+// Special Case, "." generates no local def
+import . "fmt"
+// no local def
+// ^^^---- reference github.com/golang/go/std/fmt
+```
+
+## Example
+
+So given this kind of import, you will see the following.
+
+```go
+import (
+ "fmt"
+ . "net/http"
+ s "sort"
+)
+```
+
+- Regular `"fmt"` import. Creates only a reference to the moniker
+
+
+
+- Named `s "sort"` import. Creates both a reference and a definition. Any local
+references to `s` in this case will link back to the definition of this import.
+`"sort"` will still link to the external package.
+
+
+
+
+
+- `.` import. This will also only create a reference, because `.` does not
+create a new definition. It just pulls it into scope.
+
+
diff --git a/docs/media/fmt_import.png b/docs/media/fmt_import.png
new file mode 100644
index 00000000..f3197073
Binary files /dev/null and b/docs/media/fmt_import.png differ
diff --git a/docs/media/http_import.png b/docs/media/http_import.png
new file mode 100644
index 00000000..cb9228b7
Binary files /dev/null and b/docs/media/http_import.png differ
diff --git a/docs/media/s_definition.png b/docs/media/s_definition.png
new file mode 100644
index 00000000..439055dd
Binary files /dev/null and b/docs/media/s_definition.png differ
diff --git a/docs/media/sort_import.png b/docs/media/sort_import.png
new file mode 100644
index 00000000..be46e48c
Binary files /dev/null and b/docs/media/sort_import.png differ
diff --git a/docs/package_declarations.md b/docs/package_declarations.md
new file mode 100644
index 00000000..d9533cb1
--- /dev/null
+++ b/docs/package_declarations.md
@@ -0,0 +1,36 @@
+# Package Declarations
+
+
+In general, we have used `types.*` structs that match the `types.Object`
+interface. However there was no struct that represented the statement:
+
+```go
+package mypkg
+```
+
+That's the because the majority of the information is held in `types.Package`
+and the corresponding definition in `packages.Package.Syntax`.
+
+Since there was no types.PkgDeclaration or similar available, we created our own.
+See [types.go](/internal/indexer/types.go)
+
+## Definition vs. Reference
+
+We only emit one definition for a package declaration. The way we pick this is detailed
+in `findBestPackageDefinitionPath(...)`. For the `package mypkg`, only the "best" is
+picked as the defintion, the other are all emitted as references. This makes sure that we
+always jump to the best package declaration when jumping between packages.
+
+For example, if we have a project that contains two files:
+- [lib.go](/docs/examples/smollest/lib.go)
+- [sub.go](/docs/examples/smollest/sub.go)
+
+In this case the project is literally just two
+package declarations. The lsif graph will look like this (some nodes removed):
+
+
+
+NOTE: the two ranges point to the same resultSet but only one of the ranges
+(the range from the `lib.go` file) is chosen as the result for the definition
+request.
+
diff --git a/docs/structs.md b/docs/structs.md
new file mode 100644
index 00000000..031a462e
--- /dev/null
+++ b/docs/structs.md
@@ -0,0 +1,40 @@
+# Structs
+
+Structs are generally implemented in a relatively straightforward way.
+
+For example:
+
+```go
+type MyStruct struct {
+ Cli http.Client
+ ^^^----------------- definition MyStruct.Cli
+ ^^^^------------ reference github.com/golang/go/std/http
+ ^^^^^^----- reference github.com/golang/go/std/http.Client
+}
+
+```
+
+But, for anonymous fields, it is a little more complicated, and ends up looking something like this.
+
+```go
+type NestedHandler struct {
+ LocalItem
+ ^^^^^^^^^-------- definition MyStruct.LocalItem
+ ^^^^^^^^^-------- reference LocalItem
+}
+```
+
+In this case it is possible to have the same ranges overlapping, so `lsif-go`
+will re-use the same range.
+
+However, in the following case, we have three separate ranges that, while they overlap
+are not identical, so they cannot be shared and a new range must be created.
+
+```go
+type Nested struct {
+ http.Handler
+ ^^^^^^^^^^^^-------- definition Nested.Handler
+ ^^^^---------------- reference github.com/golang/go/std/http
+ ^^^^^^^-------- reference github.com/golang/go/std/http.Handler
+}
+```
diff --git a/go.mod b/go.mod
index 167d276f..82503cfe 100644
--- a/go.mod
+++ b/go.mod
@@ -3,6 +3,7 @@ module github.com/sourcegraph/lsif-go
go 1.15
require (
+ github.com/agnivade/levenshtein v1.1.1 // indirect
github.com/alecthomas/kingpin v2.2.6+incompatible
github.com/efritz/pentimento v0.0.0-20190429011147-ade47d831101
github.com/google/go-cmp v0.5.6
diff --git a/go.sum b/go.sum
index ef52e787..0f35ffa3 100644
--- a/go.sum
+++ b/go.sum
@@ -7,6 +7,8 @@ github.com/CloudyKit/jet v2.1.3-0.20180809161101-62edd43e4f88+incompatible/go.mo
github.com/Joker/hpp v1.0.0/go.mod h1:8x5n+M1Hp5hC0g8okX3sR3vFQwynaX/UgSOM9MeBKzY=
github.com/Joker/jade v1.0.1-0.20190614124447-d475f43051e7/go.mod h1:6E6s8o2AE4KhCrqr6GRJjdC/gNfTdxkIXvuGZZda2VM=
github.com/Shopify/goreferrer v0.0.0-20181106222321-ec9c9a553398/go.mod h1:a1uqRtAwp2Xwc6WNPJEufxJ7fx3npB4UV/JOLmbu5I0=
+github.com/agnivade/levenshtein v1.1.1 h1:QY8M92nrzkmr798gCo3kmMyqXFzdQVpxLlGPRBij0P8=
+github.com/agnivade/levenshtein v1.1.1/go.mod h1:veldBMzWxcCG2ZvUTKD2kJNRdCk5hVbJomOvKkmgYbo=
github.com/ajg/form v1.5.1/go.mod h1:uL1WgH+h2mgNtvBq0339dVnzXdBETtL2LeUXaIv25UY=
github.com/alecthomas/kingpin v2.2.6+incompatible h1:5svnBTFgJjZvGKyYBtMB0+m5wvrbUHiqye8wRJMlnYI=
github.com/alecthomas/kingpin v2.2.6+incompatible/go.mod h1:59OFYbFVLKQKq+mqrL6Rw5bR0c3ACQaawgXx0QYndlE=
@@ -14,6 +16,7 @@ github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751 h1:JYp7IbQjafo
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/units v0.0.0-20210208195552-ff826a37aa15 h1:AUNCr9CiJuwrRYS3XieqF+Z9B9gNxo/eANAJCF2eiN4=
github.com/alecthomas/units v0.0.0-20210208195552-ff826a37aa15/go.mod h1:OMCwj8VM1Kc9e19TLln2VL61YJF0x1XFtfdL4JdbSyE=
+github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0/go.mod h1:t2tdKJDJF9BV14lnkjHmOQgcvEKgtqs5a1N3LNdJhGE=
github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8=
github.com/aymerick/raymond v2.0.3-0.20180322193309-b565731e1464+incompatible/go.mod h1:osfaiScAUVup+UC9Nfq76eWqDhXlp+4UYaA8uhTBO6g=
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
@@ -48,6 +51,7 @@ github.com/derision-test/go-mockgen v1.1.2/go.mod h1:9H3VGTWYnL1VJoHHCuPKDpPFmNQ
github.com/dgraph-io/badger v1.6.0/go.mod h1:zwt7syl517jmP8s94KqSxTlM6IMsdhYy6psNgSztDR4=
github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ=
github.com/dgryski/go-farm v0.0.0-20190423205320-6a90982ecee2/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw=
+github.com/dgryski/trifles v0.0.0-20200323201526-dd97f9abfb48/go.mod h1:if7Fbed8SFyPtHLHbg49SI7NAdJiC5WIA09pe59rfAA=
github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
github.com/efritz/pentimento v0.0.0-20190429011147-ade47d831101 h1:RylpU+KNJJNEJIk3o8gZ70uPTlutxaYnikKNPko39LA=
github.com/efritz/pentimento v0.0.0-20190429011147-ade47d831101/go.mod h1:5ALWO82UZwfAtNRUtwzsWimcrcuYzyieTyyXOXrP6EQ=
diff --git a/internal/indexer/documentation.go b/internal/indexer/documentation.go
index 71a839ef..c0c72355 100644
--- a/internal/indexer/documentation.go
+++ b/internal/indexer/documentation.go
@@ -37,9 +37,9 @@ import (
//
// A mapping of types -> documentationResult vertex ID
-type emittedDocumentationResults map[types.Object]uint64
+type emittedDocumentationResults map[ObjectLike]uint64
-func (e emittedDocumentationResults) addAll(other emittedDocumentationResults) map[types.Object]uint64 {
+func (e emittedDocumentationResults) addAll(other emittedDocumentationResults) map[ObjectLike]uint64 {
for associatedType, documentationResultID := range other {
e[associatedType] = documentationResultID
}
@@ -580,7 +580,7 @@ type constVarDocs struct {
docsMarkdown string
// The definition object.
- def types.Object
+ def ObjectLike
// Associated tags
tags []protocol.Tag
@@ -684,7 +684,7 @@ type typeDocs struct {
typ types.Type
// The definition object.
- def types.Object
+ def ObjectLike
// Associated tags
tags []protocol.Tag
@@ -777,7 +777,7 @@ type funcDocs struct {
resultTypes []ast.Expr
// The definition object.
- def types.Object
+ def ObjectLike
// Associated tags
tags []protocol.Tag
diff --git a/internal/indexer/helpers_test.go b/internal/indexer/helpers_test.go
index 0ffee697..ca55a3dc 100644
--- a/internal/indexer/helpers_test.go
+++ b/internal/indexer/helpers_test.go
@@ -29,17 +29,24 @@ func getRepositoryRoot(t *testing.T) string {
return root
}
+var getTestPackagesOnce sync.Once
+var cachedTestPackages []*packages.Package
+
// getTestPackages loads the testdata package (and subpackages).
func getTestPackages(t *testing.T) []*packages.Package {
- packages, err := packages.Load(
- &packages.Config{Mode: loadMode, Dir: getRepositoryRoot(t)},
- "./...",
- )
- if err != nil {
- t.Fatalf("unexpected error loading packages: %s", err)
- }
+ getTestPackagesOnce.Do(func() {
+ var err error
+
+ cachedTestPackages, err = packages.Load(
+ &packages.Config{Mode: loadMode, Dir: getRepositoryRoot(t)},
+ "./...",
+ )
+ if err != nil {
+ t.Fatalf("unexpected error loading packages: %s", err)
+ }
+ })
- return packages
+ return cachedTestPackages
}
// findDefinitionByName looks for a definition with the given name in the given packages. Returns
@@ -132,11 +139,31 @@ func findIndent(s string) (emptyLines int, indent int) {
type capturingWriter struct {
m sync.Mutex
elements []interface{}
+
+ // Quicker access for special types of nodes.
+ // Could add other node types if desired.
+ ranges map[uint64]protocol.Range
+ documents map[uint64]protocol.Document
+ contains map[uint64]uint64
}
func (w *capturingWriter) Write(v interface{}) {
w.m.Lock()
w.elements = append(w.elements, v)
+
+ // Store special elements for quicker access
+ switch elem := v.(type) {
+ case protocol.Range:
+ w.ranges[elem.ID] = elem
+ case protocol.Document:
+ w.documents[elem.ID] = elem
+ case protocol.Contains:
+ // A range is always only contained by one document.
+ for _, inV := range elem.InVs {
+ w.contains[inV] = elem.OutV
+ }
+ }
+
w.m.Unlock()
}
@@ -145,36 +172,29 @@ func (w *capturingWriter) Flush() error {
}
// findDocumentURIByDocumentID returns the URI of the document with the given ID.
-func findDocumentURIByDocumentID(elements []interface{}, id uint64) string {
- for _, elem := range elements {
- switch v := elem.(type) {
- case protocol.Document:
- if v.ID == id {
- return v.URI
- }
- }
+func findDocumentURIByDocumentID(w *capturingWriter, id uint64) string {
+ document, ok := w.documents[id]
+ if !ok {
+ return ""
}
- return ""
+ return document.URI
}
// findRangeByID returns the range with the given identifier.
-func findRangeByID(elements []interface{}, id uint64) (protocol.Range, bool) {
- for _, elem := range elements {
- switch v := elem.(type) {
- case protocol.Range:
- if v.ID == id {
- return v, true
- }
- }
+func findRangeByID(w *capturingWriter, id uint64) (protocol.Range, bool) {
+ r, ok := w.ranges[id]
+
+ if !ok {
+ return protocol.Range{}, false
}
- return protocol.Range{}, false
+ return r, true
}
// findHoverResultByID returns the hover result object with the given identifier.
-func findHoverResultByID(elements []interface{}, id uint64) (protocol.HoverResult, bool) {
- for _, elem := range elements {
+func findHoverResultByID(w *capturingWriter, id uint64) (protocol.HoverResult, bool) {
+ for _, elem := range w.elements {
switch v := elem.(type) {
case protocol.HoverResult:
if v.ID == id {
@@ -187,8 +207,8 @@ func findHoverResultByID(elements []interface{}, id uint64) (protocol.HoverResul
}
// findMonikerByID returns the moniker with the given identifier.
-func findMonikerByID(elements []interface{}, id uint64) (protocol.Moniker, bool) {
- for _, elem := range elements {
+func findMonikerByID(w *capturingWriter, id uint64) (protocol.Moniker, bool) {
+ for _, elem := range w.elements {
switch v := elem.(type) {
case protocol.Moniker:
if v.ID == id {
@@ -201,8 +221,8 @@ func findMonikerByID(elements []interface{}, id uint64) (protocol.Moniker, bool)
}
// findPackageInformationByID returns the moniker with the given identifier.
-func findPackageInformationByID(elements []interface{}, id uint64) (protocol.PackageInformation, bool) {
- for _, elem := range elements {
+func findPackageInformationByID(w *capturingWriter, id uint64) (protocol.PackageInformation, bool) {
+ for _, elem := range w.elements {
switch v := elem.(type) {
case protocol.PackageInformation:
if v.ID == id {
@@ -216,13 +236,15 @@ func findPackageInformationByID(elements []interface{}, id uint64) (protocol.Pac
// findDefintionRangesByDefinitionResultID returns the ranges attached to the definition result with the given
// identifier.
-func findDefintionRangesByDefinitionResultID(elements []interface{}, id uint64) (ranges []protocol.Range) {
+func findDefintionRangesByDefinitionResultID(w *capturingWriter, id uint64) (ranges []protocol.Range) {
+ elements := w.elements
+
for _, elem := range elements {
switch e := elem.(type) {
case protocol.Item:
if e.OutV == id {
for _, inV := range e.InVs {
- if r, ok := findRangeByID(elements, inV); ok {
+ if r, ok := findRangeByID(w, inV); ok {
ranges = append(ranges, r)
}
}
@@ -235,13 +257,15 @@ func findDefintionRangesByDefinitionResultID(elements []interface{}, id uint64)
// findReferenceRangesByReferenceResultID returns the ranges attached to the reference result with the given
// identifier.
-func findReferenceRangesByReferenceResultID(elements []interface{}, id uint64) (ranges []protocol.Range) {
+func findReferenceRangesByReferenceResultID(w *capturingWriter, id uint64) (ranges []protocol.Range) {
+ elements := w.elements
+
for _, elem := range elements {
switch e := elem.(type) {
case protocol.Item:
if e.OutV == id {
for _, inV := range e.InVs {
- if r, ok := findRangeByID(elements, inV); ok {
+ if r, ok := findRangeByID(w, inV); ok {
ranges = append(ranges, r)
}
}
@@ -253,28 +277,22 @@ func findReferenceRangesByReferenceResultID(elements []interface{}, id uint64) (
}
// findDocumentURIContaining finds the URI of the document containing the given ID.
-func findDocumentURIContaining(elements []interface{}, id uint64) string {
- for _, elem := range elements {
- switch e := elem.(type) {
- case protocol.Contains:
- for _, inV := range e.InVs {
- if inV == id {
- return findDocumentURIByDocumentID(elements, e.OutV)
- }
- }
- }
+func findDocumentURIContaining(w *capturingWriter, id uint64) string {
+ documentID, ok := w.contains[id]
+ if !ok {
+ return ""
}
- return ""
+ return findDocumentURIByDocumentID(w, documentID)
}
// findRange returns the range in the given file with the given start line and character.
-func findRange(elements []interface{}, filename string, startLine, startCharacter int) (protocol.Range, bool) {
- for _, elem := range elements {
+func findRange(w *capturingWriter, filename string, startLine, startCharacter int) (protocol.Range, bool) {
+ for _, elem := range w.elements {
switch v := elem.(type) {
case protocol.Range:
if v.Start.Line == startLine && v.Start.Character == startCharacter {
- if findDocumentURIContaining(elements, v.ID) == filename {
+ if findDocumentURIContaining(w, v.ID) == filename {
return v, true
}
}
@@ -284,25 +302,43 @@ func findRange(elements []interface{}, filename string, startLine, startCharacte
return protocol.Range{}, false
}
+// findAllRanges returns a list of ranges in the given file with the given start line and character.
+// This can be used to confirm that there is only one range that would match at a particular location
+func findAllRanges(w *capturingWriter, filename string, startLine, startCharacter int) []protocol.Range {
+ ranges := []protocol.Range{}
+ for _, elem := range w.elements {
+ switch v := elem.(type) {
+ case protocol.Range:
+ if v.Start.Line == startLine && v.Start.Character == startCharacter {
+ if findDocumentURIContaining(w, v.ID) == filename {
+ ranges = append(ranges, v)
+ }
+ }
+ }
+ }
+
+ return ranges
+}
+
// findHoverResultByRangeOrResultSetID returns the hover result attached to the range or result
// set with the given identifier.
-func findHoverResultByRangeOrResultSetID(elements []interface{}, id uint64) (protocol.HoverResult, bool) {
+func findHoverResultByRangeOrResultSetID(w *capturingWriter, id uint64) (protocol.HoverResult, bool) {
// First see if we're attached to a hover result directly
- for _, elem := range elements {
+ for _, elem := range w.elements {
switch e := elem.(type) {
case protocol.TextDocumentHover:
if e.OutV == id {
- return findHoverResultByID(elements, e.InV)
+ return findHoverResultByID(w, e.InV)
}
}
}
// Try to get the hover result of the result set attached to the given range or result set
- for _, elem := range elements {
+ for _, elem := range w.elements {
switch e := elem.(type) {
case protocol.Next:
if e.OutV == id {
- if result, ok := findHoverResultByRangeOrResultSetID(elements, e.InV); ok {
+ if result, ok := findHoverResultByRangeOrResultSetID(w, e.InV); ok {
return result, true
}
}
@@ -314,13 +350,15 @@ func findHoverResultByRangeOrResultSetID(elements []interface{}, id uint64) (pro
// findDefinitionRangesByRangeOrResultSetID returns the definition ranges attached to the range or result set
// with the given identifier.
-func findDefinitionRangesByRangeOrResultSetID(elements []interface{}, id uint64) (ranges []protocol.Range) {
+func findDefinitionRangesByRangeOrResultSetID(w *capturingWriter, id uint64) (ranges []protocol.Range) {
+ elements := w.elements
+
// First see if we're attached to definition result directly
for _, elem := range elements {
switch e := elem.(type) {
case protocol.TextDocumentDefinition:
if e.OutV == id {
- ranges = append(ranges, findDefintionRangesByDefinitionResultID(elements, e.InV)...)
+ ranges = append(ranges, findDefintionRangesByDefinitionResultID(w, e.InV)...)
}
}
}
@@ -330,7 +368,7 @@ func findDefinitionRangesByRangeOrResultSetID(elements []interface{}, id uint64)
switch e := elem.(type) {
case protocol.Next:
if e.OutV == id {
- ranges = append(ranges, findDefinitionRangesByRangeOrResultSetID(elements, e.InV)...)
+ ranges = append(ranges, findDefinitionRangesByRangeOrResultSetID(w, e.InV)...)
}
}
}
@@ -340,13 +378,15 @@ func findDefinitionRangesByRangeOrResultSetID(elements []interface{}, id uint64)
// findReferenceRangesByRangeOrResultSetID returns the reference ranges attached to the range or result set with
// the given identifier.
-func findReferenceRangesByRangeOrResultSetID(elements []interface{}, id uint64) (ranges []protocol.Range) {
+func findReferenceRangesByRangeOrResultSetID(w *capturingWriter, id uint64) (ranges []protocol.Range) {
+ elements := w.elements
+
// First see if we're attached to reference result directly
for _, elem := range elements {
switch e := elem.(type) {
case protocol.TextDocumentReferences:
if e.OutV == id {
- ranges = append(ranges, findReferenceRangesByReferenceResultID(elements, e.InV)...)
+ ranges = append(ranges, findReferenceRangesByReferenceResultID(w, e.InV)...)
}
}
}
@@ -356,7 +396,7 @@ func findReferenceRangesByRangeOrResultSetID(elements []interface{}, id uint64)
switch e := elem.(type) {
case protocol.Next:
if e.OutV == id {
- ranges = append(ranges, findReferenceRangesByRangeOrResultSetID(elements, e.InV)...)
+ ranges = append(ranges, findReferenceRangesByRangeOrResultSetID(w, e.InV)...)
}
}
}
@@ -366,12 +406,12 @@ func findReferenceRangesByRangeOrResultSetID(elements []interface{}, id uint64)
// findMonikersByRangeOrReferenceResultID returns the monikers attached to the range or reference result
// with the given identifier.
-func findMonikersByRangeOrReferenceResultID(elements []interface{}, id uint64) (monikers []protocol.Moniker) {
- for _, elem := range elements {
+func findMonikersByRangeOrReferenceResultID(w *capturingWriter, id uint64) (monikers []protocol.Moniker) {
+ for _, elem := range w.elements {
switch e := elem.(type) {
case protocol.MonikerEdge:
if e.OutV == id {
- if m, ok := findMonikerByID(elements, e.InV); ok {
+ if m, ok := findMonikerByID(w, e.InV); ok {
monikers = append(monikers, m)
}
}
@@ -379,11 +419,11 @@ func findMonikersByRangeOrReferenceResultID(elements []interface{}, id uint64) (
}
// Try to get the reference result of a result set attached to the given range or result set
- for _, elem := range elements {
+ for _, elem := range w.elements {
switch e := elem.(type) {
case protocol.Next:
if e.OutV == id {
- monikers = append(monikers, findMonikersByRangeOrReferenceResultID(elements, e.InV)...)
+ monikers = append(monikers, findMonikersByRangeOrReferenceResultID(w, e.InV)...)
}
}
}
@@ -392,23 +432,23 @@ func findMonikersByRangeOrReferenceResultID(elements []interface{}, id uint64) (
}
// findPackageInformationByMonikerID returns the package information vertexes attached to the moniker with the given identifier.
-func findPackageInformationByMonikerID(elements []interface{}, id uint64) (packageInformation []protocol.PackageInformation) {
- for _, elem := range elements {
+func findPackageInformationByMonikerID(w *capturingWriter, id uint64) (packageInformation []protocol.PackageInformation) {
+ for _, elem := range w.elements {
switch e := elem.(type) {
case protocol.PackageInformationEdge:
if e.OutV == id {
- if m, ok := findPackageInformationByID(elements, e.InV); ok {
+ if m, ok := findPackageInformationByID(w, e.InV); ok {
packageInformation = append(packageInformation, m)
}
}
}
}
- for _, elem := range elements {
+ for _, elem := range w.elements {
switch e := elem.(type) {
case protocol.NextMonikerEdge:
if e.OutV == id {
- packageInformation = append(packageInformation, findPackageInformationByMonikerID(elements, e.InV)...)
+ packageInformation = append(packageInformation, findPackageInformationByMonikerID(w, e.InV)...)
}
}
}
@@ -423,3 +463,13 @@ func splitMarkupContent(value string) []string {
func unCodeFence(value string) string {
return strings.Replace(strings.Replace(value, "```go\n", "", -1), "\n```", "", -1)
}
+
+func compareRange(t *testing.T, r protocol.Range, startLine, startCharacter, endLine, endCharacter int) {
+ if r.Start.Line != startLine || r.Start.Character != startCharacter || r.End.Line != endLine || r.End.Character != endCharacter {
+ t.Fatalf(
+ "incorrect range. want=[%d:%d,%d:%d) have=[%d:%d,%d:%d)",
+ startLine, startCharacter, endLine, endCharacter,
+ r.Start.Line, r.Start.Character, r.End.Line, r.End.Character,
+ )
+ }
+}
diff --git a/internal/indexer/hover.go b/internal/indexer/hover.go
index 7cabd108..8bbabc0f 100644
--- a/internal/indexer/hover.go
+++ b/internal/indexer/hover.go
@@ -10,7 +10,7 @@ import (
// findHoverContents returns the hover contents of the given object. This method is not cached
// and should only be called wrapped in a call to makeCachedHoverResult.
-func findHoverContents(packageDataCache *PackageDataCache, pkgs []*packages.Package, p *packages.Package, obj types.Object) protocol.MarkupContent {
+func findHoverContents(packageDataCache *PackageDataCache, pkgs []*packages.Package, p *packages.Package, obj ObjectLike) protocol.MarkupContent {
signature, extra := typeString(obj)
docstring := findDocstring(packageDataCache, pkgs, p, obj)
return toMarkupContent(signature, docstring, extra)
@@ -18,7 +18,7 @@ func findHoverContents(packageDataCache *PackageDataCache, pkgs []*packages.Pack
// findExternalHoverContents returns the hover contents of the given object defined in the given
// package. This method is not cached and should only be called wrapped in a call to makeCachedHoverResult.
-func findExternalHoverContents(packageDataCache *PackageDataCache, pkgs []*packages.Package, p *packages.Package, obj types.Object) protocol.MarkupContent {
+func findExternalHoverContents(packageDataCache *PackageDataCache, pkgs []*packages.Package, p *packages.Package, obj ObjectLike) protocol.MarkupContent {
signature, extra := typeString(obj)
docstring := findExternalDocstring(packageDataCache, pkgs, p, obj)
return toMarkupContent(signature, docstring, extra)
@@ -27,7 +27,7 @@ func findExternalHoverContents(packageDataCache *PackageDataCache, pkgs []*packa
// makeCachedHoverResult returns a hover result vertex identifier. If hover text for the given
// identifier has not already been emitted, a new vertex is created. Identifiers will share the
// same hover result if they refer to the same identifier in the same target package.
-func (i *Indexer) makeCachedHoverResult(pkg *types.Package, obj types.Object, fn func() protocol.MarkupContent) uint64 {
+func (i *Indexer) makeCachedHoverResult(pkg *types.Package, obj ObjectLike, fn func() protocol.MarkupContent) uint64 {
key := makeCacheKey(pkg, obj)
if key == "" {
// Do not store empty cache keys
@@ -61,7 +61,7 @@ func (i *Indexer) makeCachedHoverResult(pkg *types.Package, obj types.Object, fn
// identifier. Otherwise, the key will be the object identifier if it refers to a package import.
// If the given package is nil and the object is not a package import, the returned cache key is
// the empty string (to force a fresh calculation of each local object's hover text).
-func makeCacheKey(pkg *types.Package, obj types.Object) string {
+func makeCacheKey(pkg *types.Package, obj ObjectLike) string {
if pkg != nil {
return fmt.Sprintf("%s::%d", pkg.Path(), obj.Pos())
}
@@ -75,7 +75,7 @@ func makeCacheKey(pkg *types.Package, obj types.Object) string {
// findDocstring extracts the comments from the given object. It is assumed that this object is
// declared in an index target (otherwise, findExternalDocstring should be called).
-func findDocstring(packageDataCache *PackageDataCache, pkgs []*packages.Package, p *packages.Package, obj types.Object) string {
+func findDocstring(packageDataCache *PackageDataCache, pkgs []*packages.Package, p *packages.Package, obj ObjectLike) string {
if obj == nil {
return ""
}
@@ -90,7 +90,7 @@ func findDocstring(packageDataCache *PackageDataCache, pkgs []*packages.Package,
// findExternalDocstring extracts the comments from the given object. It is assumed that this object is
// declared in a dependency.
-func findExternalDocstring(packageDataCache *PackageDataCache, pkgs []*packages.Package, p *packages.Package, obj types.Object) string {
+func findExternalDocstring(packageDataCache *PackageDataCache, pkgs []*packages.Package, p *packages.Package, obj ObjectLike) string {
if obj == nil {
return ""
}
diff --git a/internal/indexer/indexer.go b/internal/indexer/indexer.go
index 50226a24..e2efb76b 100644
--- a/internal/indexer/indexer.go
+++ b/internal/indexer/indexer.go
@@ -8,9 +8,12 @@ import (
"go/token"
"go/types"
"log"
+ "math"
+ "path"
"strings"
"sync"
+ "github.com/agnivade/levenshtein"
"github.com/pkg/errors"
"github.com/sourcegraph/lsif-go/internal/gomod"
"github.com/sourcegraph/lsif-go/internal/output"
@@ -19,16 +22,23 @@ import (
"golang.org/x/tools/go/packages"
)
+type importMonikerReference struct {
+ monikerID uint64
+ documentID uint64
+ rangeID uint64
+}
+type setVal interface{}
+
type Indexer struct {
- repositoryRoot string // path to repository
- repositoryRemote string // import path inferred by git remote
- projectRoot string // path to package
- toolInfo protocol.ToolInfo // metadata vertex payload
- moduleName string // name of this module
- moduleVersion string // version of this module
+ repositoryRoot string // path to repository
+ repositoryRemote string // import path inferred by git remote
+ projectRoot string // path to package
+ toolInfo protocol.ToolInfo // metadata vertex payload
+ moduleName string // name of this module
+ moduleVersion string // version of this module
dependencies map[string]gomod.GoModule // parsed module data
- emitter *writer.Emitter // LSIF data emitter
- outputOptions output.Options // What to print to stdout/stderr
+ emitter *writer.Emitter // LSIF data emitter
+ outputOptions output.Options // What to print to stdout/stderr
// Definition type cache
consts map[interface{}]*DefinitionInfo // position -> info
@@ -39,18 +49,19 @@ type Indexer struct {
vars map[interface{}]*DefinitionInfo // position -> info
// LSIF data cache
- documents map[string]*DocumentInfo // filename -> info
- ranges map[string]map[int]uint64 // filename -> offset -> rangeID
- defined map[string]map[int]struct{} // set of defined ranges (filename, offset)
- hoverResultCache map[string]uint64 // cache key -> hoverResultID
- importMonikerIDs map[string]uint64 // identifier:packageInformationID -> monikerID
- packageInformationIDs map[string]uint64 // name -> packageInformationID
- packageDataCache *PackageDataCache // hover text and moniker path cache
- packages []*packages.Package // index target packages
- projectID uint64 // project vertex identifier
+ documents map[string]*DocumentInfo // filename -> info
+ ranges map[string]map[int]uint64 // filename -> offset -> rangeID
+ defined map[string]map[int]struct{} // set of defined ranges (filename, offset)
+ hoverResultCache map[string]uint64 // cache key -> hoverResultID
+ importMonikerIDs map[string]uint64 // identifier:packageInformationID -> monikerID
+ importMonikerReferences map[uint64]map[uint64]map[uint64]setVal // monikerKey -> documentID -> Set(rangeID)
+ packageInformationIDs map[string]uint64 // name -> packageInformationID
+ packageDataCache *PackageDataCache // hover text and moniker path cache
+ packages []*packages.Package // index target packages
+ projectID uint64 // project vertex identifier
packagesByFile map[string][]*packages.Package
- emittedDocumentationResults map[types.Object]uint64 // type object -> documentationResult vertex ID
- emittedDocumentationResultsByPackagePath map[string]uint64 // package path -> documentationResult vertex ID
+ emittedDocumentationResults map[ObjectLike]uint64 // type object -> documentationResult vertex ID
+ emittedDocumentationResultsByPackagePath map[string]uint64 // package path -> documentationResult vertex ID
constsMutex sync.Mutex
funcsMutex sync.Mutex
@@ -62,6 +73,8 @@ type Indexer struct {
hoverResultCacheMutex sync.RWMutex
importMonikerIDsMutex sync.RWMutex
packageInformationIDsMutex sync.RWMutex
+
+ importMonikerChannel chan importMonikerReference
}
func New(
@@ -77,29 +90,31 @@ func New(
outputOptions output.Options,
) *Indexer {
return &Indexer{
- repositoryRoot: repositoryRoot,
- repositoryRemote: repositoryRemote,
- projectRoot: projectRoot,
- toolInfo: toolInfo,
- moduleName: moduleName,
- moduleVersion: moduleVersion,
- dependencies: dependencies,
- emitter: writer.NewEmitter(jsonWriter),
- outputOptions: outputOptions,
- consts: map[interface{}]*DefinitionInfo{},
- funcs: map[interface{}]*DefinitionInfo{},
- imports: map[interface{}]*DefinitionInfo{},
- labels: map[interface{}]*DefinitionInfo{},
- types: map[interface{}]*DefinitionInfo{},
- vars: map[interface{}]*DefinitionInfo{},
- documents: map[string]*DocumentInfo{},
- ranges: map[string]map[int]uint64{},
- defined: map[string]map[int]struct{}{},
- hoverResultCache: map[string]uint64{},
- importMonikerIDs: map[string]uint64{},
- packageInformationIDs: map[string]uint64{},
- packageDataCache: packageDataCache,
- stripedMutex: newStripedMutex(),
+ repositoryRoot: repositoryRoot,
+ repositoryRemote: repositoryRemote,
+ projectRoot: projectRoot,
+ toolInfo: toolInfo,
+ moduleName: moduleName,
+ moduleVersion: moduleVersion,
+ dependencies: dependencies,
+ emitter: writer.NewEmitter(jsonWriter),
+ outputOptions: outputOptions,
+ consts: map[interface{}]*DefinitionInfo{},
+ funcs: map[interface{}]*DefinitionInfo{},
+ imports: map[interface{}]*DefinitionInfo{},
+ labels: map[interface{}]*DefinitionInfo{},
+ types: map[interface{}]*DefinitionInfo{},
+ vars: map[interface{}]*DefinitionInfo{},
+ documents: map[string]*DocumentInfo{},
+ ranges: map[string]map[int]uint64{},
+ defined: map[string]map[int]struct{}{},
+ hoverResultCache: map[string]uint64{},
+ importMonikerIDs: map[string]uint64{},
+ importMonikerReferences: map[uint64]map[uint64]map[uint64]setVal{},
+ packageInformationIDs: map[string]uint64{},
+ packageDataCache: packageDataCache,
+ stripedMutex: newStripedMutex(),
+ importMonikerChannel: make(chan importMonikerReference, 512),
}
}
@@ -111,14 +126,26 @@ func (i *Indexer) Index() error {
return errors.Wrap(err, "failed to load packages")
}
+ wg := new(sync.WaitGroup)
+ // Start any channels used to synchronize reference sets
+ i.startImportMonikerReferenceTracker(wg)
+
+ // Begin emitting and indexing package
i.emitMetadataAndProjectVertex()
i.emitDocuments()
- i.addImports()
+ i.emitImports()
+ i.indexPackageDeclarations()
i.indexDocumentation() // must be invoked before indexDefinitions/indexReferences
i.indexDefinitions()
i.indexReferences()
+
+ // Stop any channels used to synchronize reference sets
+ i.stopImportMonikerReferenceTracker(wg)
+
+ // Link sets of items to corresponding ranges and results.
i.linkReferenceResultsToRanges()
- i.emitContains()
+ i.linkImportMonikersToRanges()
+ i.linkContainsToRanges()
if err := i.emitter.Flush(); err != nil {
return errors.Wrap(err, "failed to write index to disk")
@@ -127,8 +154,52 @@ func (i *Indexer) Index() error {
return nil
}
+func (i *Indexer) startImportMonikerReferenceTracker(wg *sync.WaitGroup) {
+ wg.Add(1)
+
+ go func() {
+ contained := struct{}{}
+
+ for nextReference := range i.importMonikerChannel {
+ monikerID := nextReference.monikerID
+ documentID := nextReference.documentID
+ rangeID := nextReference.rangeID
+
+ if monikerID == 0 || documentID == 0 || rangeID == 0 {
+ // TODO: We should add error logging/warning somehow for these to be easily reported back to user,
+ // but I have not had this happen at all in testing.
+ continue
+ }
+
+ monikerMap, ok := i.importMonikerReferences[monikerID]
+ if !ok {
+ monikerMap = map[uint64]map[uint64]setVal{}
+ i.importMonikerReferences[monikerID] = monikerMap
+ }
+
+ documentMap, ok := monikerMap[documentID]
+ if !ok {
+ documentMap = map[uint64]setVal{}
+ monikerMap[documentID] = documentMap
+ }
+
+ documentMap[rangeID] = contained
+ }
+
+ wg.Done()
+ }()
+}
+
+func (i *Indexer) stopImportMonikerReferenceTracker(wg *sync.WaitGroup) {
+ close(i.importMonikerChannel)
+ wg.Wait()
+}
+
var loadMode = packages.NeedDeps | packages.NeedFiles | packages.NeedImports | packages.NeedSyntax | packages.NeedTypes | packages.NeedTypesInfo | packages.NeedName
+// cachedPackages makes sure that we only load packages once per execution
+var cachedPackages map[string][]*packages.Package = map[string][]*packages.Package{}
+
// packages populates the packages field containing an AST for each package within the configured
// project root.
//
@@ -149,10 +220,17 @@ func (i *Indexer) loadPackages(deduplicate bool) error {
Logf: i.packagesLoadLogger,
}
- pkgs, err := packages.Load(config, "./...")
- if err != nil {
- errs <- errors.Wrap(err, "packages.Load")
- return
+ // Make sure we only load packages once per execution.
+ pkgs, ok := cachedPackages[i.projectRoot]
+ if !ok {
+ var err error
+ pkgs, err = packages.Load(config, "./...")
+ if err != nil {
+ errs <- errors.Wrap(err, "packages.Load")
+ return
+ }
+
+ cachedPackages[i.projectRoot] = pkgs
}
if deduplicate {
@@ -280,15 +358,13 @@ func (i *Indexer) emitDocument(filename string) {
i.defined[filename] = map[int]struct{}{}
}
-// addImports modifies the definitions map of each file to include entries for import statements so
-// they can be indexed uniformly in subsequent steps.
-func (i *Indexer) addImports() {
- i.visitEachPackage("Adding import definitions", i.addImportsToPackage)
+// emitImports will emit the appropriate import monikers and named definitions for all packages.
+func (i *Indexer) emitImports() {
+ i.visitEachPackage("Emitting import references and definitions", i.emitImportsForPackage)
}
-// addImportsToFile modifies the definitions map of the given file to include entries for import
-// statements so they can be indexed uniformly in subsequent steps.
-func (i *Indexer) addImportsToPackage(p *packages.Package) {
+// emitImportsForPackage will emit the appropriate import monikers and named definitions for a package.
+func (i *Indexer) emitImportsForPackage(p *packages.Package) {
for _, f := range p.Syntax {
for _, spec := range f.Imports {
pkg := p.Imports[strings.Trim(spec.Path.Value, `"`)]
@@ -296,20 +372,75 @@ func (i *Indexer) addImportsToPackage(p *packages.Package) {
continue
}
- name := importSpecName(spec)
- ident := &ast.Ident{NamePos: spec.Pos(), Name: name, Obj: ast.NewObj(ast.Pkg, name)}
- p.TypesInfo.Defs[ident] = types.NewPkgName(spec.Pos(), p.Types, name, pkg.Types)
+ i.emitImportMonikerReference(p, pkg, spec)
+
+ // spec.Name is only non-nil when we have an import of the form:
+ // import f "fmt"
+ //
+ // So, we want to emit a local defition for the `f` token
+ if spec.Name != nil {
+ i.emitImportMonikerNamedDefinition(p, pkg, spec)
+ }
}
}
}
-// importSpecName extracts the name from the given import spec.
-func importSpecName(spec *ast.ImportSpec) string {
- if spec.Name != nil {
- return spec.Name.String()
+// emitImportMonikerReference will emit the associated reference to the import moniker.
+// This will emit the reference in either case:
+//
+// import "fmt"
+// ^^^------ reference github.com/golang/go/std/fmt
+//
+// import f "fmt"
+// ^^^---- reference github.com/golang/go/std/fmt
+//
+// In both cases, this will emit the corresponding import moniker for "fmt". This is ImportSpec.Path
+func (i *Indexer) emitImportMonikerReference(p *packages.Package, pkg *packages.Package, spec *ast.ImportSpec) {
+ pos := spec.Path.Pos()
+ name := spec.Path.Value
+
+ position, document, _ := i.positionAndDocument(p, pos)
+ obj := types.NewPkgName(pos, p.Types, name, pkg.Types)
+
+ rangeID, _ := i.ensureRangeFor(position, obj)
+ if ok := i.emitImportMoniker(rangeID, p, obj, document); !ok {
+ return
+ }
+
+ // TODO(perf): When we have better coverage, it may be possible to skip emitting this.
+ _ = i.emitter.EmitTextDocumentHover(rangeID, i.makeCachedHoverResult(nil, obj, func() protocol.MarkupContent {
+ return findHoverContents(i.packageDataCache, i.packages, p, obj)
+ }))
+
+ document.appendReference(rangeID)
+}
+
+// emitImportMonikerNamedDefinition will emit the local, non-exported definition for the named import.
+// This will emit the definition for:
+//
+// import "fmt"
+// no local defintion
+//
+// import f "fmt"
+// ^----- local definition
+func (i *Indexer) emitImportMonikerNamedDefinition(p *packages.Package, pkg *packages.Package, spec *ast.ImportSpec) {
+ pos := spec.Name.Pos()
+ name := spec.Name.Name
+ ident := spec.Name
+
+ // Don't generate a definition if we import directly into the same namespace (i.e. "." imports)
+ if name == "." {
+ return
}
- return spec.Path.Value
+ position, document, _ := i.positionAndDocument(p, pos)
+ obj := types.NewPkgName(pos, p.Types, name, pkg.Types)
+
+ rangeID, _ := i.ensureRangeFor(position, obj)
+ resultSetID := i.emitter.EmitResultSet()
+ _ = i.emitter.EmitNext(rangeID, resultSetID)
+
+ i.indexDefinitionForRangeAndResult(p, document, obj, rangeID, resultSetID, false, ident)
}
// getAllReferencedPackages returns a slice of packages containing the index target packages
@@ -345,14 +476,19 @@ func (i *Indexer) indexDefinitionsForPackage(p *packages.Package) {
// implicit object for each case clause of a type switch (including default), and they all
// share the same position. This creates a map with one arbitrarily chosen argument for
// each distinct type switch.
- caseClauses := map[token.Pos]types.Object{}
+ caseClauses := map[token.Pos]ObjectLike{}
for node, obj := range p.TypesInfo.Implicits {
if _, ok := node.(*ast.CaseClause); ok {
caseClauses[obj.Pos()] = obj
}
}
- for ident, obj := range p.TypesInfo.Defs {
+ for ident, typeObj := range p.TypesInfo.Defs {
+ // Must cast because other we have errors from being unable to assign
+ // an ObjectLike to a types.Object due to missing things like `color` and other
+ // private methods.
+ var obj ObjectLike = typeObj
+
typeSwitchHeader := false
if obj == nil {
// The definitions map contains nil objects for symbolic variables t in t := x.(type)
@@ -368,11 +504,18 @@ func (i *Indexer) indexDefinitionsForPackage(p *packages.Package) {
typeSwitchHeader = true
}
- pos, d, ok := i.positionAndDocument(p, obj.Pos())
+ position, document, ok := i.positionAndDocument(p, obj.Pos())
if !ok {
continue
}
- if !i.markRange(pos) {
+
+ // Always skip types.PkgName because we handle them in emitImports()
+ // we do not want to emit anything new here.
+ if _, isPkgName := typeObj.(*types.PkgName); isPkgName {
+ continue
+ }
+
+ if !i.markRange(position) {
// This performs a quick assignment to a map that will ensure that
// we don't race against another routine indexing the same definition
// reachable from another dataflow path through the indexer. If we
@@ -380,16 +523,51 @@ func (i *Indexer) indexDefinitionsForPackage(p *packages.Package) {
continue
}
- rangeID := i.indexDefinition(p, pos.Filename, d, pos, obj, typeSwitchHeader, ident)
+ if typVar, ok := typeObj.(*types.Var); ok {
+ if typVar.IsField() && typVar.Anonymous() {
+ i.indexDefinitionForAnonymousField(p, document, ident, typVar, position)
+ continue
+ }
+ }
- i.stripedMutex.LockKey(pos.Filename)
- i.ranges[pos.Filename][pos.Offset] = rangeID
- i.stripedMutex.UnlockKey(pos.Filename)
+ i.indexDefinition(p, document, position, obj, typeSwitchHeader, ident)
+ }
+}
- d.m.Lock()
- d.DefinitionRangeIDs = append(d.DefinitionRangeIDs, rangeID)
- d.m.Unlock()
+// indexDefinitionForAnonymousField will handle anonymous fields definitions.
+//
+// The reason they have to be handled separately is because they are _both_ a:
+// - Defintion
+// - Reference
+//
+// See docs/structs.md for more information.
+func (i *Indexer) indexDefinitionForAnonymousField(p *packages.Package, document *DocumentInfo, ident *ast.Ident, typVar *types.Var, position token.Position) {
+ // NOTE: Subtract 1 because we are switching indexing strategy (1-based -> 0-based)
+ startCol := position.Column - 1
+
+ // To find the end of the identifier, we use the identifier End() Pos and not the length
+ // of the name, because there may be package names prefixing the name ("http.Client").
+ endCol := p.Fset.Position(ident.End()).Column - 1
+
+ var rangeID uint64
+ if endCol-startCol == len(typVar.Name()) {
+ rangeID, _ = i.ensureRangeFor(position, typVar)
+ } else {
+ // This will be a separate range that encompasses _two_ items. So it is kind of
+ // "floating" in the nothingness, and should not be looked up in the future when
+ // trying to create a new range for whatever occurs at the start position of this location.
+ //
+ // In other words, this skips setting `i.ranges` for this range.
+ //
+ // Note to future readers: Do not use EmitRange directly unless you know why you don't want i.ensureRangeFor
+ rangeID = i.emitter.EmitRange(
+ protocol.Pos{Line: position.Line - 1, Character: startCol},
+ protocol.Pos{Line: position.Line - 1, Character: endCol},
+ )
}
+
+ resultSetID := i.emitter.EmitResultSet()
+ i.indexDefinitionForRangeAndResult(p, document, typVar, rangeID, resultSetID, false, ident)
}
// positionAndDocument returns the position of the given object and the document info object
@@ -431,12 +609,9 @@ func (i *Indexer) markRange(pos token.Position) bool {
return true
}
-// indexDefinition emits data for the given definition object.
-func (i *Indexer) indexDefinition(p *packages.Package, filename string, document *DocumentInfo, pos token.Position, obj types.Object, typeSwitchHeader bool, ident *ast.Ident) uint64 {
- // Ensure the range exists, but don't emit a new one as it might already exist due to another
- // phase of indexing (such as symbols) having emitted the range.
- rangeID, _ := i.ensureRangeFor(pos, obj)
- resultSetID := i.emitter.EmitResultSet()
+// indexDefinitionForRangeAndResult will handle all Indexer related handling of
+// a definition for a given rangeID and resultSetID.
+func (i *Indexer) indexDefinitionForRangeAndResult(p *packages.Package, document *DocumentInfo, obj ObjectLike, rangeID, resultSetID uint64, typeSwitchHeader bool, ident *ast.Ident) *DefinitionInfo {
defResultID := i.emitter.EmitDefinitionResult()
_ = i.emitter.EmitNext(rangeID, resultSetID)
@@ -456,9 +631,7 @@ func (i *Indexer) indexDefinition(p *packages.Package, filename string, document
}))
}
- if _, ok := obj.(*types.PkgName); ok {
- i.emitImportMoniker(resultSetID, p, obj)
- }
+ // NOTE: Import monikers are emitted by emitImports, they do not need to be emitted here.
if obj.Exported() {
i.emitExportMoniker(resultSetID, p, obj)
@@ -474,22 +647,35 @@ func (i *Indexer) indexDefinition(p *packages.Package, filename string, document
_ = i.emitter.EmitDocumentationResultEdge(documentationResultID, resultSetID)
}
- i.setDefinitionInfo(obj, ident, &DefinitionInfo{
+ definitionInfo := &DefinitionInfo{
DocumentID: document.DocumentID,
RangeID: rangeID,
ResultSetID: resultSetID,
DefinitionResultID: defResultID,
ReferenceRangeIDs: map[uint64][]uint64{},
TypeSwitchHeader: typeSwitchHeader,
- })
+ }
+ i.setDefinitionInfo(obj, ident, definitionInfo)
+
+ document.appendDefinition(rangeID)
+
+ return definitionInfo
+}
- return rangeID
+// indexDefinition emits data for the given definition object.
+func (i *Indexer) indexDefinition(p *packages.Package, document *DocumentInfo, position token.Position, obj ObjectLike, typeSwitchHeader bool, ident *ast.Ident) *DefinitionInfo {
+ // Ensure the range exists, but don't emit a new one as it might already exist due to another
+ // phase of indexing (such as symbols) having emitted the range.
+ rangeID, _ := i.ensureRangeFor(position, obj)
+ resultSetID := i.emitter.EmitResultSet()
+
+ return i.indexDefinitionForRangeAndResult(p, document, obj, rangeID, resultSetID, typeSwitchHeader, ident)
}
// setDefinitionInfo stashes the given definition info indexed by the given object type and name.
// This definition info will be accessible by invoking getDefinitionInfo with the same type and
// name values (but not necessarily the same object).
-func (i *Indexer) setDefinitionInfo(obj types.Object, ident *ast.Ident, d *DefinitionInfo) {
+func (i *Indexer) setDefinitionInfo(obj ObjectLike, ident *ast.Ident, d *DefinitionInfo) {
switch v := obj.(type) {
case *types.Const:
i.constsMutex.Lock()
@@ -522,6 +708,11 @@ func (i *Indexer) setDefinitionInfo(obj types.Object, ident *ast.Ident, d *Defin
i.varsMutex.Lock()
i.vars[obj.Pos()] = d
i.varsMutex.Unlock()
+
+ case *PkgDeclaration:
+ // Do nothing -- we don't need to reference these ever again.
+ break
+
}
}
@@ -540,35 +731,39 @@ func (i *Indexer) indexReferencesForPackage(p *packages.Package) {
continue
}
- pos, d, ok := i.positionAndDocument(p, ident.Pos())
+ pos, document, ok := i.positionAndDocument(p, ident.Pos())
if !ok {
continue
}
- rangeID, ok := i.indexReference(p, d, pos, definitionObj, ident)
+ rangeID, ok := i.indexReference(p, document, pos, definitionObj, ident)
if !ok {
continue
}
- d.m.Lock()
- d.ReferenceRangeIDs = append(d.ReferenceRangeIDs, rangeID)
- d.m.Unlock()
+ document.appendReference(rangeID)
}
}
// indexReference emits data for the given reference object.
-func (i *Indexer) indexReference(p *packages.Package, document *DocumentInfo, pos token.Position, definitionObj types.Object, ident *ast.Ident) (uint64, bool) {
- if def := i.getDefinitionInfo(definitionObj, ident); def != nil {
- return i.indexReferenceToDefinition(p, document, pos, definitionObj, def)
- }
+func (i *Indexer) indexReference(p *packages.Package, document *DocumentInfo, pos token.Position, definitionObj ObjectLike, ident *ast.Ident) (uint64, bool) {
+ return i.indexReferenceWithDefinitionInfo(p, document, pos, definitionObj, ident, i.getDefinitionInfo(definitionObj, ident))
+}
- return i.indexReferenceToExternalDefinition(p, document, pos, definitionObj)
+// indexReferenceWithDefinitionInfo emits data for the given reference object and definition info.
+// This can be used when the DefinitionInfo is already known, which will skip needing to get and release locks.
+func (i *Indexer) indexReferenceWithDefinitionInfo(p *packages.Package, document *DocumentInfo, pos token.Position, definitionObj ObjectLike, ident *ast.Ident, definitionInfo *DefinitionInfo) (uint64, bool) {
+ if definitionInfo != nil {
+ return i.indexReferenceToDefinition(p, document, pos, definitionObj, definitionInfo)
+ } else {
+ return i.indexReferenceToExternalDefinition(p, document, pos, definitionObj)
+ }
}
// getDefinitionInfo returns the definition info object for the given object. This requires that
// setDefinitionInfo was previously called an object that can be resolved in the same way. This
// will only return definitions which are defined in an index target (not a dependency).
-func (i *Indexer) getDefinitionInfo(obj types.Object, ident *ast.Ident) *DefinitionInfo {
+func (i *Indexer) getDefinitionInfo(obj ObjectLike, ident *ast.Ident) *DefinitionInfo {
switch v := obj.(type) {
case *types.Const:
return i.consts[v.Pos()]
@@ -582,6 +777,9 @@ func (i *Indexer) getDefinitionInfo(obj types.Object, ident *ast.Ident) *Definit
return i.types[ident.String()+"="+obj.Type().String()]
case *types.Var:
return i.vars[v.Pos()]
+ case *PkgDeclaration:
+ // We don't store definition info for PkgDeclaration.
+ // They are never referenced after the first iteration.
}
return nil
@@ -589,7 +787,7 @@ func (i *Indexer) getDefinitionInfo(obj types.Object, ident *ast.Ident) *Definit
// indexReferenceToDefinition emits data for the given reference object that is defined within
// an index target package.
-func (i *Indexer) indexReferenceToDefinition(p *packages.Package, document *DocumentInfo, pos token.Position, definitionObj types.Object, d *DefinitionInfo) (uint64, bool) {
+func (i *Indexer) indexReferenceToDefinition(p *packages.Package, document *DocumentInfo, pos token.Position, definitionObj ObjectLike, d *DefinitionInfo) (uint64, bool) {
rangeID, ok := i.ensureRangeFor(pos, definitionObj)
if !ok {
// Not a new range result; this occurs when the definition and reference
@@ -612,7 +810,7 @@ func (i *Indexer) indexReferenceToDefinition(p *packages.Package, document *Docu
d.m.Unlock()
if d.TypeSwitchHeader {
- // Attache a hover text result _directly_ to the given range so that it "overwrites" the
+ // Attach a hover text result _directly_ to the given range so that it "overwrites" the
// hover result of the type switch header for this use. Each reference of such a variable
// will need a more specific hover text, as the type of the variable is refined in the body
// of case clauses of the type switch.
@@ -627,7 +825,7 @@ func (i *Indexer) indexReferenceToDefinition(p *packages.Package, document *Docu
// indexReferenceToExternalDefinition emits data for the given reference object that is not defined
// within an index target package. This definition _may_ be resolvable by scanning dependencies, but
// it is not guaranteed.
-func (i *Indexer) indexReferenceToExternalDefinition(p *packages.Package, document *DocumentInfo, pos token.Position, definitionObj types.Object) (uint64, bool) {
+func (i *Indexer) indexReferenceToExternalDefinition(p *packages.Package, document *DocumentInfo, pos token.Position, definitionObj ObjectLike) (uint64, bool) {
definitionPkg := definitionObj.Pkg()
if definitionPkg == nil {
return 0, false
@@ -642,21 +840,27 @@ func (i *Indexer) indexReferenceToExternalDefinition(p *packages.Package, docume
})
rangeID, _ := i.ensureRangeFor(pos, definitionObj)
- refResultID := i.emitter.EmitReferenceResult()
- _ = i.emitter.EmitTextDocumentReferences(rangeID, refResultID)
- _ = i.emitter.EmitItemOfReferences(refResultID, []uint64{rangeID}, document.DocumentID)
-
if hoverResultID != 0 {
_ = i.emitter.EmitTextDocumentHover(rangeID, hoverResultID)
}
- i.emitImportMoniker(rangeID, p, definitionObj)
+ // Only emit an import moniker which will link to the external definition. If we actually
+ // put a textDocument/references result here, we would not traverse to lookup the external defintion
+ // via the moniker.
+ if ok := i.emitImportMoniker(rangeID, p, definitionObj, document); !ok {
+ return 0, false
+ }
+
return rangeID, true
}
+func (i *Indexer) addImportMonikerReference(monikerID, rangeID, documentID uint64) {
+ i.importMonikerChannel <- importMonikerReference{monikerID, documentID, rangeID}
+}
+
// ensureRangeFor returns a range identifier for the given object. If a range for the object has
// not been emitted, a new vertex is created.
-func (i *Indexer) ensureRangeFor(pos token.Position, obj types.Object) (uint64, bool) {
+func (i *Indexer) ensureRangeFor(pos token.Position, obj ObjectLike) (uint64, bool) {
i.stripedMutex.RLockKey(pos.Filename)
rangeID, ok := i.ranges[pos.Filename][pos.Offset]
i.stripedMutex.RUnlockKey(pos.Filename)
@@ -696,23 +900,48 @@ func (i *Indexer) linkItemsToDefinitions(d *DefinitionInfo) {
}
}
-// emitContains emits the contains relationship for all documents and the ranges that it contains.
-func (i *Indexer) emitContains() {
- i.visitEachDocument("Emitting contains relations", i.emitContainsForDocument)
+func (i *Indexer) linkImportMonikersToRanges() {
+ for monikerID, documentReferences := range i.importMonikerReferences {
+ // emit one result set and reference result per monikerID
+ resultSetID := i.emitter.EmitResultSet()
+ referenceResultID := i.emitter.EmitReferenceResult()
+
+ // Link the result set to the correct moniker
+ _ = i.emitter.EmitMonikerEdge(resultSetID, monikerID)
+
+ // Link the ranges correctly to the result
+ for documentID, rangeSet := range documentReferences {
+ rangeIDs := make([]uint64, 0, len(rangeSet))
+ for rangeID := range rangeSet {
+ rangeIDs = append(rangeIDs, rangeID)
+
+ _ = i.emitter.EmitNext(rangeID, resultSetID)
+ }
+
+ _ = i.emitter.EmitTextDocumentReferences(resultSetID, referenceResultID)
+ _ = i.emitter.EmitItemOfReferences(referenceResultID, rangeIDs, documentID)
+ }
+
+ }
+}
+
+// linkContainsToRanges emits the contains relationship for all documents and the ranges that it contains.
+func (i *Indexer) linkContainsToRanges() {
+ i.visitEachDocument("Emitting contains relations", i.linkContainsForDocument)
// TODO(efritz) - think about printing a title here
- i.emitContainsForProject()
+ i.linkContainsForProject()
}
// emitContainsForProject emits a contains edge between a document and its ranges.
-func (i *Indexer) emitContainsForDocument(d *DocumentInfo) {
+func (i *Indexer) linkContainsForDocument(d *DocumentInfo) {
if len(d.DefinitionRangeIDs) > 0 || len(d.ReferenceRangeIDs) > 0 {
_ = i.emitter.EmitContains(d.DocumentID, union(d.DefinitionRangeIDs, d.ReferenceRangeIDs))
}
}
-// emitContainsForProject emits a contains edge between the target project and all indexed documents.
-func (i *Indexer) emitContainsForProject() {
+// linkContainsForProject emits a contains edge between the target project and all indexed documents.
+func (i *Indexer) linkContainsForProject() {
documentIDs := make([]uint64, 0, len(i.documents))
for _, info := range i.documents {
documentIDs = append(documentIDs, info.DocumentID)
@@ -723,6 +952,89 @@ func (i *Indexer) emitContainsForProject() {
}
}
+func (i *Indexer) indexPackageDeclarations() {
+ i.visitEachPackage("Indexing package declarations", i.indexPackageDeclarationForPackage)
+}
+
+type DeclInfo struct {
+ HasDoc bool
+ Path string
+}
+
+// Pick the filename that is the most idiomatic for the defintion of the package.
+// This will make jump to def always send you to a better go file than the $PKG_test.go, for example.
+func (i *Indexer) indexPackageDeclarationForPackage(p *packages.Package) {
+ packageDeclarations := make([]DeclInfo, 0, len(p.Syntax))
+ for _, f := range p.Syntax {
+ _, position := newPkgDeclaration(p, f)
+ packageDeclarations = append(packageDeclarations, DeclInfo{
+ HasDoc: f.Doc != nil,
+ Path: position.Filename,
+ })
+ }
+
+ bestFilename, err := findBestPackageDefinitionPath(p.Name, packageDeclarations)
+ if err != nil {
+ return
+ }
+
+ // First, index the defition, which is the best package info.
+ var definitionInfo *DefinitionInfo
+ for _, f := range p.Syntax {
+ obj, position := newPkgDeclaration(p, f)
+
+ // Skip everything that isn't the best
+ if position.Filename != bestFilename {
+ continue
+ }
+
+ name := obj.Name()
+ _, d, ok := i.positionAndDocument(p, obj.Pos())
+ if !ok {
+ return
+ }
+
+ definitionInfo = i.indexDefinition(p, d, position, obj, false, &ast.Ident{
+ NamePos: obj.Pos(),
+ Name: name,
+ Obj: nil,
+ })
+
+ // Once we've indexed the best one, we can quit this loop
+ break
+ }
+
+ // Then, index the rest of the files, which are references to that package info.
+ for _, f := range p.Syntax {
+ obj, position := newPkgDeclaration(p, f)
+
+ // Skip the definition, it is already indexed
+ if position.Filename == bestFilename {
+ continue
+ }
+
+ name := obj.Name()
+
+ _, document, ok := i.positionAndDocument(p, obj.Pos())
+ if !ok {
+ continue
+ }
+ ident := &ast.Ident{
+ NamePos: obj.Pos(),
+ Name: name,
+ Obj: nil,
+ }
+ rangeID, ok := i.indexReferenceWithDefinitionInfo(p, document, position, obj, ident, definitionInfo)
+
+ if !ok {
+ continue
+ }
+
+ i.setRangeForPosition(position, rangeID)
+ document.appendReference(rangeID)
+ }
+}
+
// Stats returns an IndexerStats object with the number of packages, files, and elements analyzed/emitted.
func (i *Indexer) Stats() IndexerStats {
return IndexerStats{
@@ -732,3 +1044,87 @@ func (i *Indexer) Stats() IndexerStats {
NumElements: i.emitter.NumElements(),
}
}
+
+func (i *Indexer) setRangeForPosition(position token.Position, id uint64) {
+ i.stripedMutex.LockKey(position.Filename)
+ i.ranges[position.Filename][position.Offset] = id
+ i.stripedMutex.UnlockKey(position.Filename)
+}
+
+// findBestPackageDefinitionPath searches paths in possiblePaths and finds the one that seems best.
+// Chooses one with documentation if possible, otherwise looks for most similar name.
+func findBestPackageDefinitionPath(packageName string, possiblePaths []DeclInfo) (string, error) {
+ if len(possiblePaths) == 0 {
+ return "", errors.New("must have at least one possible path")
+ }
+
+ pathsWithDocs := []DeclInfo{}
+ for _, v := range possiblePaths {
+ if v.HasDoc {
+ pathsWithDocs = append(pathsWithDocs, v)
+ }
+ }
+
+ // The idiomatic way is to _only_ have one .go file per package that has a docstring
+ // for the package. This should generally return here.
+ if len(pathsWithDocs) == 1 {
+ return pathsWithDocs[0].Path, nil
+ }
+
+ // If we for some reason have more than one .go file per package that has a docstring,
+ // only consider returning paths that contain the docstring (instead of any of the possible
+ // paths).
+ if len(pathsWithDocs) > 1 {
+ possiblePaths = pathsWithDocs
+ }
+
+ // Try to only pick non _test files for non _test packages and vice versa.
+ possiblePaths = filterBasedOnTestFiles(possiblePaths, packageName)
+
+ // Find the best remaining path.
+ // Chooses:
+ // 1. doc.go
+ // 2. exact match
+ // 3. computes levenshtein and picks best score
+ minDistance, bestPath := math.MaxInt32, ""
+ for _, v := range possiblePaths {
+ fileName := fileNameWithoutExtension(v.Path)
+
+ if "doc.go" == path.Base(v.Path) {
+ return v.Path, nil
+ }
+
+ if packageName == fileName {
+ return v.Path, nil
+ }
+
+ distance := levenshtein.ComputeDistance(packageName, fileName)
+ if distance < minDistance {
+ minDistance = distance
+ bestPath = v.Path
+ }
+ }
+
+ return bestPath, nil
+}
+
+func fileNameWithoutExtension(fileName string) string {
+ return strings.TrimSuffix(fileName, path.Ext(fileName))
+}
+
+func filterBasedOnTestFiles(possiblePaths []DeclInfo, packageName string) []DeclInfo {
+ packageNameEndsWithTest := strings.HasSuffix(packageName, "_test")
+
+ preferredPaths := []DeclInfo{}
+ for _, v := range possiblePaths {
+ if packageNameEndsWithTest == strings.HasSuffix(v.Path, "_test.go") {
+ preferredPaths = append(preferredPaths, v)
+ }
+ }
+
+ if len(preferredPaths) > 0 {
+ return preferredPaths
+ }
+
+ return possiblePaths
+}
diff --git a/internal/indexer/indexer_test.go b/internal/indexer/indexer_test.go
index 3508b7a4..70650636 100644
--- a/internal/indexer/indexer_test.go
+++ b/internal/indexer/indexer_test.go
@@ -11,14 +11,25 @@ import (
"testing"
"github.com/hexops/autogold"
+ "github.com/sourcegraph/lsif-go/internal/gomod"
"github.com/sourcegraph/lsif-go/internal/output"
"github.com/sourcegraph/lsif-static-doc/staticdoc"
"github.com/sourcegraph/sourcegraph/lib/codeintel/lsif/protocol"
"github.com/sourcegraph/sourcegraph/lib/codeintel/lsif/protocol/writer"
)
+var dependencies = map[string]gomod.GoModule{
+ "github.com/sourcegraph/lsif-go": {Name: "github.com/sourcegraph/lsif-go", Version: "dev"},
+ "github.com/golang/go": {Name: "github.com/golang/go", Version: "go1.16"},
+}
+
func TestIndexer(t *testing.T) {
- w := &capturingWriter{}
+ w := &capturingWriter{
+ ranges: map[uint64]protocol.Range{},
+ documents: map[uint64]protocol.Document{},
+ contains: map[uint64]uint64{},
+ }
+
projectRoot := getRepositoryRoot(t)
indexer := New(
"/dev/github.com/sourcegraph/lsif-go/internal/testdata",
@@ -27,7 +38,7 @@ func TestIndexer(t *testing.T) {
protocol.ToolInfo{Name: "lsif-go", Version: "dev"},
"testdata",
"0.0.1",
- nil,
+ dependencies,
w,
NewPackageDataCache(),
output.Options{},
@@ -38,16 +49,16 @@ func TestIndexer(t *testing.T) {
}
t.Run("check Parallel function hover text", func(t *testing.T) {
- r, ok := findRange(w.elements, "file://"+filepath.Join(projectRoot, "parallel.go"), 13, 5)
+ r, ok := findRange(w, "file://"+filepath.Join(projectRoot, "parallel.go"), 13, 5)
if !ok {
t.Fatalf("could not find target range")
}
- hoverResult, ok := findHoverResultByRangeOrResultSetID(w.elements, r.ID)
+ hoverResult, ok := findHoverResultByRangeOrResultSetID(w, r.ID)
markupContentSegments := splitMarkupContent(hoverResult.Result.Contents.(protocol.MarkupContent).Value)
if !ok || len(markupContentSegments) < 2 {
- t.Fatalf("could not find hover text")
+ t.Fatalf("could not find hover text: %v", markupContentSegments)
}
expectedType := `func Parallel(ctx Context, fns ...ParallelizableFunc) error`
@@ -64,18 +75,83 @@ func TestIndexer(t *testing.T) {
}
})
- // TODO(efritz) - support "package testdata" identifiers
+ t.Run("declares definitions for 'package testdata' identifiers", func(t *testing.T) {
+ r, ok := findRange(w, "file://"+filepath.Join(projectRoot, "main.go"), 2, 8)
+ if !ok {
+ t.Errorf("Could not find range for 'package testdata'")
+ }
+
+ definitions := findDefinitionRangesByRangeOrResultSetID(w, r.ID)
+ if len(definitions) != 1 {
+ t.Errorf("Definitions: %+v\n", definitions)
+ }
+
+ def := definitions[0]
+ compareRange(t, def, 2, 8, 2, 16)
+
+ monikers := findMonikersByRangeOrReferenceResultID(w, r.ID)
+ if len(monikers) != 1 {
+ t.Errorf("Monikers: %+v\n", monikers)
+ }
+
+ moniker := monikers[0]
+ value := moniker.Identifier
+ expectedLabel := "github.com/sourcegraph/lsif-go/internal/testdata"
+ if value != expectedLabel {
+ t.Errorf("incorrect moniker identifier. want=%q have=%q", expectedLabel, value)
+ }
+ })
+
+ t.Run("declares definitions for nested 'package *' identifiers", func(t *testing.T) {
+ r, ok := findRange(w, "file://"+filepath.Join(projectRoot, "internal", "secret", "doc.go"), 1, 8)
+ if !ok {
+ t.Errorf("Could not find range for 'package secret'")
+ }
+
+ definitions := findDefinitionRangesByRangeOrResultSetID(w, r.ID)
+ if len(definitions) != 1 {
+ t.Errorf("Definitions: %+v\n", definitions)
+ }
+
+ def := definitions[0]
+ compareRange(t, def, 1, 8, 1, 14)
+
+ monikers := findMonikersByRangeOrReferenceResultID(w, r.ID)
+ if len(monikers) != 1 {
+ t.Errorf("Monikers: %+v\n", monikers)
+ }
+
+ moniker := monikers[0]
+ value := moniker.Identifier
+ expectedLabel := "github.com/sourcegraph/lsif-go/internal/testdata/internal/secret"
+ if value != expectedLabel {
+ t.Errorf("incorrect moniker identifier. want=%q have=%q", expectedLabel, value)
+ }
+ })
t.Run("check external package hover text", func(t *testing.T) {
- r, ok := findRange(w.elements, "file://"+filepath.Join(projectRoot, "parallel.go"), 4, 2)
+ r, ok := findRange(w, "file://"+filepath.Join(projectRoot, "parallel.go"), 4, 2)
if !ok {
t.Fatalf("could not find target range")
}
- hoverResult, ok := findHoverResultByRangeOrResultSetID(w.elements, r.ID)
+ monikers := findMonikersByRangeOrReferenceResultID(w, r.ID)
+ if len(monikers) != 1 {
+ t.Fatalf("found too many monikers: %+v\n", monikers)
+ }
+
+ // Only important part is linking to the correct moniker.
+ // Hover results will be linked accordingly
+ moniker := monikers[0]
+ expectedMoniker := "github.com/golang/go/std/sync"
+ if moniker.Identifier != expectedMoniker {
+ t.Errorf("incorrect moniker identifier. want=%q have=%q", expectedMoniker, moniker.Identifier)
+ }
+
+ hoverResult, ok := findHoverResultByRangeOrResultSetID(w, r.ID)
markupContentSegments := splitMarkupContent(hoverResult.Result.Contents.(protocol.MarkupContent).Value)
if !ok || len(markupContentSegments) < 2 {
- t.Fatalf("could not find hover text")
+ t.Fatalf("could not find hover text: %v", markupContentSegments)
}
expectedType := `package "sync"`
@@ -95,12 +171,12 @@ func TestIndexer(t *testing.T) {
})
t.Run("check errs definition", func(t *testing.T) {
- r, ok := findRange(w.elements, "file://"+filepath.Join(projectRoot, "parallel.go"), 21, 3)
+ r, ok := findRange(w, "file://"+filepath.Join(projectRoot, "parallel.go"), 21, 3)
if !ok {
t.Fatalf("could not find target range")
}
- definitions := findDefinitionRangesByRangeOrResultSetID(w.elements, r.ID)
+ definitions := findDefinitionRangesByRangeOrResultSetID(w, r.ID)
if len(definitions) != 1 {
t.Fatalf("incorrect definition count. want=%d have=%d", 1, len(definitions))
}
@@ -109,12 +185,12 @@ func TestIndexer(t *testing.T) {
})
t.Run("check wg references", func(t *testing.T) {
- r, ok := findRange(w.elements, "file://"+filepath.Join(projectRoot, "parallel.go"), 26, 1)
+ r, ok := findRange(w, "file://"+filepath.Join(projectRoot, "parallel.go"), 26, 1)
if !ok {
t.Fatalf("could not find target range")
}
- references := findReferenceRangesByRangeOrResultSetID(w.elements, r.ID)
+ references := findReferenceRangesByRangeOrResultSetID(w, r.ID)
if len(references) != 4 {
t.Fatalf("incorrect reference count. want=%d have=%d", 4, len(references))
}
@@ -128,12 +204,12 @@ func TestIndexer(t *testing.T) {
})
t.Run("check NestedB monikers", func(t *testing.T) {
- r, ok := findRange(w.elements, "file://"+filepath.Join(projectRoot, "data.go"), 27, 3)
+ r, ok := findRange(w, "file://"+filepath.Join(projectRoot, "data.go"), 27, 3)
if !ok {
t.Fatalf("could not find target range")
}
- monikers := findMonikersByRangeOrReferenceResultID(w.elements, r.ID)
+ monikers := findMonikersByRangeOrReferenceResultID(w, r.ID)
if len(monikers) != 1 {
t.Fatalf("incorrect moniker count. want=%d have=%d", 1, len(monikers))
}
@@ -149,17 +225,17 @@ func TestIndexer(t *testing.T) {
})
t.Run("check typeswitch", func(t *testing.T) {
- definition, ok := findRange(w.elements, "file://"+filepath.Join(projectRoot, "typeswitch.go"), 3, 8)
+ definition, ok := findRange(w, "file://"+filepath.Join(projectRoot, "typeswitch.go"), 3, 8)
if !ok {
t.Fatalf("could not find target range")
}
- intReference, ok := findRange(w.elements, "file://"+filepath.Join(projectRoot, "typeswitch.go"), 5, 9)
+ intReference, ok := findRange(w, "file://"+filepath.Join(projectRoot, "typeswitch.go"), 5, 9)
if !ok {
t.Fatalf("could not find target range")
}
- boolReference, ok := findRange(w.elements, "file://"+filepath.Join(projectRoot, "typeswitch.go"), 7, 10)
+ boolReference, ok := findRange(w, "file://"+filepath.Join(projectRoot, "typeswitch.go"), 7, 10)
if !ok {
t.Fatalf("could not find target range")
}
@@ -167,7 +243,7 @@ func TestIndexer(t *testing.T) {
//
// Check definition links
- definitions := findDefinitionRangesByRangeOrResultSetID(w.elements, intReference.ID)
+ definitions := findDefinitionRangesByRangeOrResultSetID(w, intReference.ID)
if len(definitions) != 1 {
t.Fatalf("incorrect definition count. want=%d have=%d", 1, len(definitions))
}
@@ -176,7 +252,7 @@ func TestIndexer(t *testing.T) {
//
// Check reference links
- references := findReferenceRangesByRangeOrResultSetID(w.elements, definition.ID)
+ references := findReferenceRangesByRangeOrResultSetID(w, definition.ID)
if len(references) != 3 {
t.Fatalf("incorrect reference count. want=%d have=%d", 2, len(references))
}
@@ -191,7 +267,7 @@ func TestIndexer(t *testing.T) {
// TODO(efritz) - update test here if we emit hover text for the header
- intReferenceHoverResult, ok := findHoverResultByRangeOrResultSetID(w.elements, intReference.ID)
+ intReferenceHoverResult, ok := findHoverResultByRangeOrResultSetID(w, intReference.ID)
markupContentSegments := splitMarkupContent(intReferenceHoverResult.Result.Contents.(protocol.MarkupContent).Value)
if !ok || len(markupContentSegments) < 1 {
t.Fatalf("could not find hover text")
@@ -202,7 +278,7 @@ func TestIndexer(t *testing.T) {
t.Errorf("incorrect hover text type. want=%q have=%q", expectedType, value)
}
- boolReferenceHoverResult, ok := findHoverResultByRangeOrResultSetID(w.elements, boolReference.ID)
+ boolReferenceHoverResult, ok := findHoverResultByRangeOrResultSetID(w, boolReference.ID)
markupContentSegments = splitMarkupContent(boolReferenceHoverResult.Result.Contents.(protocol.MarkupContent).Value)
if !ok || len(markupContentSegments) < 1 {
t.Fatalf("could not find hover text")
@@ -217,19 +293,19 @@ func TestIndexer(t *testing.T) {
t.Run("check typealias", func(t *testing.T) {
typealiasFile := "file://" + filepath.Join(projectRoot, "typealias.go")
- r, ok := findRange(w.elements, typealiasFile, 7, 5)
+ r, ok := findRange(w, typealiasFile, 7, 5)
if !ok {
t.Fatalf("could not find target range")
}
- definitions := findDefinitionRangesByRangeOrResultSetID(w.elements, r.ID)
+ definitions := findDefinitionRangesByRangeOrResultSetID(w, r.ID)
if len(definitions) != 1 {
t.Fatalf("incorrection definition count. want=%d have=%d", 1, len(definitions))
}
compareRange(t, definitions[0], 7, 5, 7, 17)
- hover, ok := findHoverResultByRangeOrResultSetID(w.elements, r.ID)
+ hover, ok := findHoverResultByRangeOrResultSetID(w, r.ID)
markupContentSegments := splitMarkupContent(hover.Result.Contents.(protocol.MarkupContent).Value)
if !ok || len(markupContentSegments) < 3 {
t.Fatalf("incorrect hover text count. want=%d have=%d: %v", 3, len(markupContentSegments), markupContentSegments)
@@ -259,12 +335,12 @@ func TestIndexer(t *testing.T) {
t.Run("check typealias reference", func(t *testing.T) {
typealiasFile := "file://" + filepath.Join(projectRoot, "typealias.go")
- r, ok := findRange(w.elements, typealiasFile, 7, 27)
+ r, ok := findRange(w, typealiasFile, 7, 27)
if !ok {
t.Fatalf("could not find target range")
}
- definitions := findDefinitionRangesByRangeOrResultSetID(w.elements, r.ID)
+ definitions := findDefinitionRangesByRangeOrResultSetID(w, r.ID)
if len(definitions) != 1 {
t.Fatalf("incorrection definition count. want=%d have=%d", 1, len(definitions))
}
@@ -276,7 +352,7 @@ func TestIndexer(t *testing.T) {
compareRange(t, definitions[0], 6, 5, 6, 11)
- hover, ok := findHoverResultByRangeOrResultSetID(w.elements, r.ID)
+ hover, ok := findHoverResultByRangeOrResultSetID(w, r.ID)
markupContentSegments := splitMarkupContent(hover.Result.Contents.(protocol.MarkupContent).Value)
if !ok || len(markupContentSegments) < 3 {
t.Fatalf("incorrect hover text count. want=%d have=%d: %v", 3, len(markupContentSegments), markupContentSegments)
@@ -306,19 +382,19 @@ func TestIndexer(t *testing.T) {
t.Run("check_typealias anonymous struct", func(t *testing.T) {
typealiasFile := "file://" + filepath.Join(projectRoot, "typealias.go")
- r, ok := findRange(w.elements, typealiasFile, 9, 5)
+ r, ok := findRange(w, typealiasFile, 9, 5)
if !ok {
t.Fatalf("could not find target range")
}
- definitions := findDefinitionRangesByRangeOrResultSetID(w.elements, r.ID)
+ definitions := findDefinitionRangesByRangeOrResultSetID(w, r.ID)
if len(definitions) != 1 {
t.Fatalf("incorrection definition count. want=%d have=%d", 1, len(definitions))
}
compareRange(t, definitions[0], 9, 5, 9, 14)
- hover, ok := findHoverResultByRangeOrResultSetID(w.elements, r.ID)
+ hover, ok := findHoverResultByRangeOrResultSetID(w, r.ID)
markupContentSegments := splitMarkupContent(hover.Result.Contents.(protocol.MarkupContent).Value)
if !ok || len(markupContentSegments) < 2 {
t.Fatalf("incorrect hover text count. want=%d have=%d: %v", 2, len(markupContentSegments), markupContentSegments)
@@ -337,25 +413,174 @@ func TestIndexer(t *testing.T) {
t.Errorf("incorrect hover text documentation. want=%q have=%q", expectedUnderlyingType, value)
}
- r, ok = findRange(w.elements, typealiasFile, 9, 17)
+ r, ok = findRange(w, typealiasFile, 9, 17)
if ok {
t.Fatalf("found range for anonymous struct when not expected")
}
})
t.Run("check nested struct definition", func(t *testing.T) {
- r, ok := findRange(w.elements, "file://"+filepath.Join(projectRoot, "composite.go"), 11, 1)
+ ranges := findAllRanges(w, "file://"+filepath.Join(projectRoot, "composite.go"), 11, 1)
+ if len(ranges) != 1 {
+ t.Fatalf("found more than one range for a non-selector nested struct: %v", ranges)
+ }
+
+ r, ok := findRange(w, "file://"+filepath.Join(projectRoot, "composite.go"), 11, 1)
if !ok {
t.Fatalf("could not find target range")
}
- definitions := findDefinitionRangesByRangeOrResultSetID(w.elements, r.ID)
+ definitions := findDefinitionRangesByRangeOrResultSetID(w, r.ID)
if len(definitions) != 2 {
t.Fatalf("incorrect definition count. want=%d have=%d", 2, len(definitions))
}
+ sort.Slice(definitions, func(i, j int) bool {
+ return definitions[i].Start.Line < definitions[j].Start.Line
+ })
+
+ // Original definition
compareRange(t, definitions[0], 4, 5, 4, 10)
+
+ // Definition through the moniker
compareRange(t, definitions[1], 11, 1, 11, 6)
+
+ // Expect to find the reference from the definition and for the time we instantiate it in the function.
+ references := findReferenceRangesByRangeOrResultSetID(w, r.ID)
+ if len(references) != 2 {
+ t.Fatalf("incorrect references count. want=%d have=%d", 2, len(references))
+ }
+
+ monikers := findMonikersByRangeOrReferenceResultID(w, r.ID)
+ if len(monikers) != 1 {
+ t.Fatalf("incorrect references count. want=%d have=%d %+v", 2, len(monikers), monikers)
+ }
+
+ moniker := monikers[0]
+ identifier := moniker.Identifier
+
+ expectedIdentifier := "github.com/sourcegraph/lsif-go/internal/testdata:Outer.Inner"
+ if identifier != expectedIdentifier {
+ t.Fatalf("incorrect moniker identifier. want=%s have=%s", expectedIdentifier, identifier)
+ }
+ })
+
+ t.Run("check named import definition: non-'.' import", func(t *testing.T) {
+ r, ok := findRange(w, "file://"+filepath.Join(projectRoot, "named_import.go"), 4, 1)
+ if !ok {
+ t.Fatalf("could not find target range")
+ }
+
+ definitions := findDefinitionRangesByRangeOrResultSetID(w, r.ID)
+ if len(definitions) != 2 {
+ t.Fatalf("Failed to get the correct definitions: %+v\n", definitions)
+ }
+
+ definition := definitions[0]
+ compareRange(t, definition, 4, 1, 4, 2)
+ })
+
+ t.Run("check named import reference: non-'.' import", func(t *testing.T) {
+ r, ok := findRange(w, "file://"+filepath.Join(projectRoot, "named_import.go"), 4, 4)
+ if !ok {
+ t.Fatalf("could not find target range")
+ }
+
+ monikers := findMonikersByRangeOrReferenceResultID(w, r.ID)
+ if len(monikers) != 1 {
+ t.Fatalf("Failed to get the expected single moniker: %+v\n", monikers)
+ }
+
+ moniker := monikers[0]
+ identifier := moniker.Identifier
+
+ expectedIdentifier := "github.com/golang/go/std/net/http"
+ if identifier != expectedIdentifier {
+ t.Fatalf("incorrect moniker identifier. want=%s have=%s", expectedIdentifier, identifier)
+ }
+ })
+
+ t.Run("check named import definition: . import", func(t *testing.T) {
+ // There should be no range generated for the `.` in the import.
+ _, ok := findRange(w, "file://"+filepath.Join(projectRoot, "named_import.go"), 3, 1)
+ if ok {
+ t.Fatalf("could not find target range")
+ }
+ })
+
+ t.Run("check named import reference: . import", func(t *testing.T) {
+ r, ok := findRange(w, "file://"+filepath.Join(projectRoot, "named_import.go"), 3, 4)
+ if !ok {
+ t.Fatalf("could not find target range")
+ }
+
+ monikers := findMonikersByRangeOrReferenceResultID(w, r.ID)
+ if len(monikers) != 1 {
+ t.Fatalf("Failed to get the expected single moniker: %+v\n", monikers)
+ }
+
+ moniker := monikers[0]
+ identifier := moniker.Identifier
+
+ expectedIdentifier := "github.com/golang/go/std/fmt"
+ if identifier != expectedIdentifier {
+ t.Fatalf("incorrect moniker identifier. want=%s have=%s", expectedIdentifier, identifier)
+ }
+ })
+
+ t.Run("check external nested struct definition", func(t *testing.T) {
+ ranges := findAllRanges(w, "file://"+filepath.Join(projectRoot, "external_composite.go"), 5, 1)
+ if len(ranges) != 2 {
+ t.Fatalf("Incorrect number of ranges: %v", ranges)
+ }
+
+ sort.Slice(ranges, func(i, j int) bool {
+ return ranges[i].End.Character < ranges[j].End.Character
+ })
+
+ // line: http.Handler
+ // ^^^^------------ ranges[0], for http package reference
+ // ^^^^^^^^^^^^---- ranges[1], for http.Handler, the entire definition
+ //
+ // ^^^^^^^---- Separate range, for Handler reference
+ // See docs/structs.md
+ compareRange(t, ranges[0], 5, 1, 5, 5)
+ compareRange(t, ranges[1], 5, 1, 5, 13)
+
+ anonymousFieldRange := ranges[1]
+
+ definitions := findDefinitionRangesByRangeOrResultSetID(w, anonymousFieldRange.ID)
+ if len(definitions) != 1 {
+ t.Fatalf("incorrect definition count. want=%d have=%d %v", 1, len(definitions), definitions)
+ }
+
+ compareRange(t, definitions[0], 5, 1, 5, 13)
+
+ monikers := findMonikersByRangeOrReferenceResultID(w, anonymousFieldRange.ID)
+ if len(monikers) != 1 {
+ t.Fatalf("incorrect monikers count. want=%d have=%d %+v", 1, len(monikers), monikers)
+ }
+
+ moniker := monikers[0]
+ identifier := moniker.Identifier
+
+ expectedIdentifier := "github.com/sourcegraph/lsif-go/internal/testdata:NestedHandler.Handler"
+ if identifier != expectedIdentifier {
+ t.Fatalf("incorrect moniker identifier. want=%s have=%s", expectedIdentifier, identifier)
+ }
+
+ // Check to make sure that the http range still correctly links to the external package.
+ httpRange := ranges[0]
+ httpMonikers := findMonikersByRangeOrReferenceResultID(w, httpRange.ID)
+ if len(httpMonikers) != 1 {
+ t.Fatalf("incorrect http monikers count. want=%d have=%d %+v", 1, len(httpMonikers), httpMonikers)
+ }
+
+ httpIdentifier := httpMonikers[0].Identifier
+ expectedHttpIdentifier := "github.com/golang/go/std/net/http"
+ if httpIdentifier != expectedHttpIdentifier {
+ t.Fatalf("incorrect moniker identifier. want=%s have=%s", expectedHttpIdentifier, httpIdentifier)
+ }
})
}
@@ -387,7 +612,7 @@ func TestIndexer_documentation(t *testing.T) {
protocol.ToolInfo{Name: "lsif-go", Version: "dev"},
"testdata",
"0.0.1",
- nil,
+ dependencies,
writer.NewJSONWriter(&buf),
NewPackageDataCache(),
output.Options{},
@@ -415,16 +640,6 @@ func TestIndexer_documentation(t *testing.T) {
}
}
-func compareRange(t *testing.T, r protocol.Range, startLine, startCharacter, endLine, endCharacter int) {
- if r.Start.Line != startLine || r.Start.Character != startCharacter || r.End.Line != endLine || r.End.Character != endCharacter {
- t.Errorf(
- "incorrect range. want=[%d:%d,%d:%d) have=[%d:%d,%d:%d)",
- startLine, startCharacter, endLine, endCharacter,
- r.Start.Line, r.Start.Character, r.End.Line, r.End.Character,
- )
- }
-}
-
func TestIndexer_shouldVisitPackage(t *testing.T) {
w := &capturingWriter{}
projectRoot := getRepositoryRoot(t)
@@ -435,7 +650,7 @@ func TestIndexer_shouldVisitPackage(t *testing.T) {
protocol.ToolInfo{Name: "lsif-go", Version: "dev"},
"testdata",
"0.0.1",
- nil,
+ dependencies,
w,
NewPackageDataCache(),
output.Options{},
@@ -461,6 +676,7 @@ func TestIndexer_shouldVisitPackage(t *testing.T) {
"github.com/sourcegraph/lsif-go/internal/testdata/conflicting_test_symbols.test": false,
"github.com/sourcegraph/lsif-go/internal/testdata/duplicate_path_id": true,
"github.com/sourcegraph/lsif-go/internal/testdata/illegal_multiple_mains": true,
+ "github.com/sourcegraph/lsif-go/internal/testdata/cmd/minimal_main": true,
"…/secret": true,
"…/shouldvisit/notests": true,
"…/shouldvisit/tests": false,
@@ -471,3 +687,70 @@ func TestIndexer_shouldVisitPackage(t *testing.T) {
"…/shouldvisit/tests_separate_test […/shouldvisit/tests_separate.test]": true,
}).Equal(t, visited)
}
+
+func TestIndexer_findBestPackageDefinitionPath(t *testing.T) {
+ t.Run("Should find exact name match", func(t *testing.T) {
+ packageName := "smol"
+ possibleFilepaths := []DeclInfo{
+ {false, "smol.go"},
+ {false, "other.go"},
+ }
+
+ pkgDefinitionPath, _ := findBestPackageDefinitionPath(packageName, possibleFilepaths)
+ if pkgDefinitionPath != "smol.go" {
+ t.Errorf("incorrect hover text documentation. want=%q have=%q", "smol.go", pkgDefinitionPath)
+ }
+ })
+
+ t.Run("Should not pick _test files if package is not a test package", func(t *testing.T) {
+ packageName := "mylib"
+ possibleFilepaths := []DeclInfo{
+ {false, "smol.go"},
+ {false, "smol_test.go"},
+ }
+
+ pkgDefinitionPath, _ := findBestPackageDefinitionPath(packageName, possibleFilepaths)
+ if pkgDefinitionPath != "smol.go" {
+ t.Errorf("incorrect hover text documentation. want=%q have=%q", "smol.go", pkgDefinitionPath)
+ }
+ })
+
+ t.Run("should always pick whatever has the documentation", func(t *testing.T) {
+ packageName := "mylib"
+ possibleFilepaths := []DeclInfo{
+ {true, "smol.go"},
+ {false, "mylib.go"},
+ }
+
+ pkgDefinitionPath, _ := findBestPackageDefinitionPath(packageName, possibleFilepaths)
+ if pkgDefinitionPath != "smol.go" {
+ t.Errorf("incorrect hover text documentation. want=%q have=%q", "smol.go", pkgDefinitionPath)
+ }
+ })
+
+ t.Run("should pick a name that is a closer edit distance than one far away", func(t *testing.T) {
+ packageName := "http_router"
+ possibleFilepaths := []DeclInfo{
+ {false, "httprouter.go"},
+ {false, "httpother.go"},
+ }
+
+ pkgDefinitionPath, _ := findBestPackageDefinitionPath(packageName, possibleFilepaths)
+ if pkgDefinitionPath != "httprouter.go" {
+ t.Errorf("incorrect hover text documentation. want=%q have=%q", "smol.go", pkgDefinitionPath)
+ }
+ })
+
+ t.Run("should prefer test packages over other packages if the package name has test suffix", func(t *testing.T) {
+ packageName := "httprouter_test"
+ possibleFilepaths := []DeclInfo{
+ {false, "httprouter.go"},
+ {false, "http_test.go"},
+ }
+
+ pkgDefinitionPath, _ := findBestPackageDefinitionPath(packageName, possibleFilepaths)
+ if pkgDefinitionPath != "http_test.go" {
+ t.Errorf("incorrect hover text documentation. want=%q have=%q", "smol.go", pkgDefinitionPath)
+ }
+ })
+}
diff --git a/internal/indexer/info.go b/internal/indexer/info.go
index 780614d6..8a3dd581 100644
--- a/internal/indexer/info.go
+++ b/internal/indexer/info.go
@@ -24,6 +24,18 @@ type DocumentInfo struct {
m sync.Mutex
}
+func (document *DocumentInfo) appendDefinition(rangeID uint64) {
+ document.m.Lock()
+ document.DefinitionRangeIDs = append(document.DefinitionRangeIDs, rangeID)
+ document.m.Unlock()
+}
+
+func (document *DocumentInfo) appendReference(rangeID uint64) {
+ document.m.Lock()
+ document.ReferenceRangeIDs = append(document.ReferenceRangeIDs, rangeID)
+ document.m.Unlock()
+}
+
// DefinitionInfo provides context about a range that defines an identifier. An object
// of this shape is keyed by type and identifier in the indexer so that it can be
// re-retrieved for a range that uses the definition.
diff --git a/internal/indexer/moniker.go b/internal/indexer/moniker.go
index f52ef3e6..edf052e1 100644
--- a/internal/indexer/moniker.go
+++ b/internal/indexer/moniker.go
@@ -12,7 +12,7 @@ import (
// emitExportMoniker emits an export moniker for the given object linked to the given source
// identifier (either a range or a result set identifier). This will also emit links between
// the moniker vertex and the package information vertex representing the current module.
-func (i *Indexer) emitExportMoniker(sourceID uint64, p *packages.Package, obj types.Object) {
+func (i *Indexer) emitExportMoniker(sourceID uint64, p *packages.Package, obj ObjectLike) {
if i.moduleName == "" {
// Unknown dependencies, skip export monikers
return
@@ -53,7 +53,7 @@ func joinMonikerParts(parts ...string) string {
// identifier (either a range or a result set identifier). This will also emit links between
// the moniker vertex and the package information vertex representing the dependency containing
// the identifier.
-func (i *Indexer) emitImportMoniker(sourceID uint64, p *packages.Package, obj types.Object) {
+func (i *Indexer) emitImportMoniker(rangeID uint64, p *packages.Package, obj ObjectLike, document *DocumentInfo) bool {
pkg := makeMonikerPackage(obj)
monikerIdentifier := joinMonikerParts(pkg, makeMonikerIdentifier(i.packageDataCache, p, obj))
@@ -65,11 +65,14 @@ func (i *Indexer) emitImportMoniker(sourceID uint64, p *packages.Package, obj ty
// Lazily emit moniker vertex
monikerID := i.ensureImportMoniker(monikerIdentifier, packageInformationID)
- // Attach moniker to source element and stop after first match
- _ = i.emitter.EmitMonikerEdge(sourceID, monikerID)
- break
+ // Monikers will be linked during Indexer.linkImportMonikersToRanges
+ i.addImportMonikerReference(monikerID, rangeID, document.DocumentID)
+
+ return true
}
}
+
+ return false
}
// packagePrefixes returns all prefix of the go package path. For example, the package
@@ -136,10 +139,12 @@ func (i *Indexer) ensureImportMoniker(identifier string, packageInformationID ui
// makeMonikerPackage returns the package prefix used to construct a unique moniker for the given object.
// A full moniker has the form `{package prefix}:{identifier suffix}`.
-func makeMonikerPackage(obj types.Object) string {
+func makeMonikerPackage(obj ObjectLike) string {
var pkgName string
if v, ok := obj.(*types.PkgName); ok {
- pkgName = strings.Trim(v.Name(), `"`)
+ // gets the full path of the package name, rather than just the name.
+ // So instead of "http", it will return "net/http"
+ pkgName = v.Imported().Path()
} else {
pkgName = obj.Pkg().Path()
}
@@ -150,12 +155,17 @@ func makeMonikerPackage(obj types.Object) string {
// makeMonikerIdentifier returns the identifier suffix used to construct a unique moniker for the given object.
// A full moniker has the form `{package prefix}:{identifier suffix}`. The identifier is meant to act as a
// qualified type path to the given object (e.g. `StructName.FieldName` or `StructName.MethodName`).
-func makeMonikerIdentifier(packageDataCache *PackageDataCache, p *packages.Package, obj types.Object) string {
+func makeMonikerIdentifier(packageDataCache *PackageDataCache, p *packages.Package, obj ObjectLike) string {
if _, ok := obj.(*types.PkgName); ok {
// Packages are identified uniquely by their package prefix
return ""
}
+ if _, ok := obj.(*PkgDeclaration); ok {
+ // Package declarations are identified uniquely by their package name
+ return ""
+ }
+
if v, ok := obj.(*types.Var); ok && v.IsField() {
if target := p.Imports[obj.Pkg().Path()]; target != nil {
p = target
diff --git a/internal/indexer/moniker_test.go b/internal/indexer/moniker_test.go
index 56d25d3c..996fd926 100644
--- a/internal/indexer/moniker_test.go
+++ b/internal/indexer/moniker_test.go
@@ -4,10 +4,12 @@ import (
"go/constant"
"go/token"
"go/types"
+ "sync"
"testing"
"github.com/google/go-cmp/cmp"
"github.com/sourcegraph/lsif-go/internal/gomod"
+ "github.com/sourcegraph/sourcegraph/lib/codeintel/lsif/protocol"
"github.com/sourcegraph/sourcegraph/lib/codeintel/lsif/protocol/writer"
)
@@ -15,15 +17,16 @@ func TestEmitExportMoniker(t *testing.T) {
w := &capturingWriter{}
indexer := &Indexer{
- repositoryRemote: "github.com/sourcegraph/lsif-go",
- repositoryRoot: "/users/efritz/dev/sourcegraph/lsif-go",
- projectRoot: "/users/efritz/dev/sourcegraph/lsif-go",
- moduleName: "https://github.com/sourcegraph/lsif-go",
- moduleVersion: "3.14.159",
- emitter: writer.NewEmitter(w),
- importMonikerIDs: map[string]uint64{},
- packageInformationIDs: map[string]uint64{},
- stripedMutex: newStripedMutex(),
+ repositoryRemote: "github.com/sourcegraph/lsif-go",
+ repositoryRoot: "/users/efritz/dev/sourcegraph/lsif-go",
+ projectRoot: "/users/efritz/dev/sourcegraph/lsif-go",
+ moduleName: "https://github.com/sourcegraph/lsif-go",
+ moduleVersion: "3.14.159",
+ emitter: writer.NewEmitter(w),
+ importMonikerIDs: map[string]uint64{},
+ packageInformationIDs: map[string]uint64{},
+ importMonikerReferences: map[uint64]map[uint64]map[uint64]setVal{},
+ stripedMutex: newStripedMutex(),
}
object := types.NewConst(
@@ -36,7 +39,7 @@ func TestEmitExportMoniker(t *testing.T) {
indexer.emitExportMoniker(123, nil, object)
- monikers := findMonikersByRangeOrReferenceResultID(w.elements, 123)
+ monikers := findMonikersByRangeOrReferenceResultID(w, 123)
if monikers == nil || len(monikers) < 1 {
t.Fatalf("could not find moniker")
}
@@ -50,7 +53,7 @@ func TestEmitExportMoniker(t *testing.T) {
t.Errorf("incorrect moniker identifier. want=%q have=%q", "github.com/test/pkg:foobar", monikers[0].Identifier)
}
- packageInformation := findPackageInformationByMonikerID(w.elements, monikers[0].ID)
+ packageInformation := findPackageInformationByMonikerID(w, monikers[0].ID)
if monikers == nil || len(monikers) < 1 {
t.Fatalf("could not find package information")
}
@@ -66,15 +69,16 @@ func TestEmitExportMonikerPreGoMod(t *testing.T) {
w := &capturingWriter{}
indexer := &Indexer{
- repositoryRemote: "github.com/sourcegraph/lsif-go",
- repositoryRoot: "/users/efritz/dev/sourcegraph/lsif-go",
- projectRoot: "/users/efritz/dev/sourcegraph/lsif-go",
- moduleName: "https://github.com/sourcegraph/lsif-go",
- moduleVersion: "3.14.159",
- emitter: writer.NewEmitter(w),
- importMonikerIDs: map[string]uint64{},
- packageInformationIDs: map[string]uint64{},
- stripedMutex: newStripedMutex(),
+ repositoryRemote: "github.com/sourcegraph/lsif-go",
+ repositoryRoot: "/users/efritz/dev/sourcegraph/lsif-go",
+ projectRoot: "/users/efritz/dev/sourcegraph/lsif-go",
+ moduleName: "https://github.com/sourcegraph/lsif-go",
+ moduleVersion: "3.14.159",
+ emitter: writer.NewEmitter(w),
+ importMonikerIDs: map[string]uint64{},
+ packageInformationIDs: map[string]uint64{},
+ importMonikerReferences: map[uint64]map[uint64]map[uint64]setVal{},
+ stripedMutex: newStripedMutex(),
}
object := types.NewConst(
@@ -87,7 +91,7 @@ func TestEmitExportMonikerPreGoMod(t *testing.T) {
indexer.emitExportMoniker(123, nil, object)
- monikers := findMonikersByRangeOrReferenceResultID(w.elements, 123)
+ monikers := findMonikersByRangeOrReferenceResultID(w, 123)
if monikers == nil || len(monikers) < 1 {
t.Fatalf("could not find moniker")
}
@@ -101,7 +105,7 @@ func TestEmitExportMonikerPreGoMod(t *testing.T) {
t.Errorf("incorrect moniker identifier. want=%q have=%q", "github.com/sourcegraph/lsif-go/internal/git:InferRemote", monikers[0].Identifier)
}
- packageInformation := findPackageInformationByMonikerID(w.elements, monikers[0].ID)
+ packageInformation := findPackageInformationByMonikerID(w, monikers[0].ID)
if monikers == nil || len(monikers) < 1 {
t.Fatalf("could not find package information")
}
@@ -120,10 +124,12 @@ func TestEmitImportMoniker(t *testing.T) {
dependencies: map[string]gomod.GoModule{
"github.com/test/pkg/sub1": {Name: "github.com/test/pkg/sub1", Version: "1.2.3-deadbeef"},
},
- emitter: writer.NewEmitter(w),
- importMonikerIDs: map[string]uint64{},
- packageInformationIDs: map[string]uint64{},
- stripedMutex: newStripedMutex(),
+ emitter: writer.NewEmitter(w),
+ importMonikerIDs: map[string]uint64{},
+ packageInformationIDs: map[string]uint64{},
+ stripedMutex: newStripedMutex(),
+ importMonikerChannel: make(chan importMonikerReference, 1),
+ importMonikerReferences: map[uint64]map[uint64]map[uint64]setVal{},
}
object := types.NewConst(
@@ -134,31 +140,37 @@ func TestEmitImportMoniker(t *testing.T) {
constant.MakeBool(true),
)
- indexer.emitImportMoniker(123, nil, object)
+ wg := new(sync.WaitGroup)
+ indexer.startImportMonikerReferenceTracker(wg)
- monikers := findMonikersByRangeOrReferenceResultID(w.elements, 123)
- if monikers == nil || len(monikers) < 1 {
+ if !indexer.emitImportMoniker(123, nil, object, &DocumentInfo{DocumentID: 1}) {
+ t.Fatalf("Failed to emit import moniker")
+ }
+
+ // TODO: It might be nice to not hard code the elements... but this test is not super fantastic for anything else.
+ moniker, ok := w.elements[1].(protocol.Moniker)
+ if !ok {
t.Fatalf("could not find moniker")
}
- if monikers[0].Kind != "import" {
- t.Errorf("incorrect moniker kind. want=%q have=%q", "import", monikers[0].Kind)
+ if moniker.Kind != "import" {
+ t.Errorf("incorrect moniker kind. want=%q have=%q", "import", moniker.Kind)
}
- if monikers[0].Scheme != "gomod" {
- t.Errorf("incorrect moniker scheme want=%q have=%q", "gomod", monikers[0].Scheme)
+ if moniker.Scheme != "gomod" {
+ t.Errorf("incorrect moniker scheme want=%q have=%q", "gomod", moniker.Scheme)
}
- if monikers[0].Identifier != "github.com/test/pkg/sub1/sub2/sub3:foobar" {
- t.Errorf("incorrect moniker identifier. want=%q have=%q", "github.com/test/pkg/sub1/sub2/sub3:foobar", monikers[0].Identifier)
+ if moniker.Identifier != "github.com/test/pkg/sub1/sub2/sub3:foobar" {
+ t.Errorf("incorrect moniker identifier. want=%q have=%q", "github.com/test/pkg/sub1/sub2/sub3:foobar", moniker.Identifier)
}
- packageInformation := findPackageInformationByMonikerID(w.elements, monikers[0].ID)
- if monikers == nil || len(monikers) < 1 {
+ packageInformation, ok := w.elements[0].(protocol.PackageInformation)
+ if !ok {
t.Fatalf("could not find package information")
}
- if packageInformation[0].Name != "github.com/test/pkg/sub1" {
- t.Errorf("incorrect moniker kind. want=%q have=%q", "github.com/test/pkg/sub1", monikers[0].Kind)
+ if packageInformation.Name != "github.com/test/pkg/sub1" {
+ t.Errorf("incorrect moniker kind. want=%q have=%q", "github.com/test/pkg/sub1", moniker.Kind)
}
- if packageInformation[0].Version != "1.2.3-deadbeef" {
- t.Errorf("incorrect moniker scheme want=%q have=%q", "1.2.3-deadbeef", monikers[0].Scheme)
+ if packageInformation.Version != "1.2.3-deadbeef" {
+ t.Errorf("incorrect moniker scheme want=%q have=%q", "1.2.3-deadbeef", moniker.Scheme)
}
}
diff --git a/internal/indexer/package_data_cache.go b/internal/indexer/package_data_cache.go
index 244f28b7..2e39cbe2 100644
--- a/internal/indexer/package_data_cache.go
+++ b/internal/indexer/package_data_cache.go
@@ -182,6 +182,11 @@ func updateMonikerPath(monikerPath []string, node ast.Node) []string {
return addString(monikerPath, name.Name)
}
+ // Handle embedded types that are selectors, like http.Client
+ if selector, ok := q.Type.(*ast.SelectorExpr); ok {
+ return addString(monikerPath, selector.Sel.Name)
+ }
+
case *ast.TypeSpec:
// Add the top-level type spec (e.g. `type X struct` and `type Y interface`)
return addString(monikerPath, q.Name.String())
@@ -215,7 +220,7 @@ func childrenOf(n ast.Node) (children []ast.Node) {
}
// isField returns true if the given object is a field.
-func isField(obj types.Object) bool {
+func isField(obj ObjectLike) bool {
if v, ok := obj.(*types.Var); ok && v.IsField() {
return true
}
@@ -226,7 +231,7 @@ func isField(obj types.Object) bool {
// is similar but distinct from the set of types from which we _extract_ hover text. See canExtractHoverText
// for those types. This function returns true for the set of objects for which we actually call the methods
// findHoverContents or findExternalHoverContents (see hover.go).
-func shouldHaveHoverText(obj types.Object) bool {
+func shouldHaveHoverText(obj ObjectLike) bool {
switch obj.(type) {
case *types.Const:
return true
diff --git a/internal/indexer/protocol.go b/internal/indexer/protocol.go
index 53288046..80808ae7 100644
--- a/internal/indexer/protocol.go
+++ b/internal/indexer/protocol.go
@@ -15,7 +15,7 @@ const languageGo = "go"
// rangeForObject transforms the position of the given object (1-indexed) into an LSP range
// (0-indexed). If the object is a quoted package name, the leading and trailing quotes are
// stripped from the resulting range's bounds.
-func rangeForObject(obj types.Object, pos token.Position) (protocol.Pos, protocol.Pos) {
+func rangeForObject(obj ObjectLike, pos token.Position) (protocol.Pos, protocol.Pos) {
adjustment := 0
if pkgName, ok := obj.(*types.PkgName); ok && strings.HasPrefix(pkgName.Name(), `"`) {
adjustment = 1
diff --git a/internal/indexer/testdata/TestIndexer_documentation/testdata/cmd.json b/internal/indexer/testdata/TestIndexer_documentation/testdata/cmd.json
new file mode 100755
index 00000000..b828ee47
--- /dev/null
+++ b/internal/indexer/testdata/TestIndexer_documentation/testdata/cmd.json
@@ -0,0 +1,22 @@
+{
+ "pathID": "/cmd",
+ "documentation": {
+ "identifier": "cmd",
+ "newPage": true,
+ "searchKey": "",
+ "tags": []
+ },
+ "label": {
+ "kind": "plaintext",
+ "value": ""
+ },
+ "detail": {
+ "kind": "plaintext",
+ "value": ""
+ },
+ "children": [
+ {
+ "pathID": "/cmd/minimal_main"
+ }
+ ]
+}
diff --git a/internal/indexer/testdata/TestIndexer_documentation/testdata/cmd.md b/internal/indexer/testdata/TestIndexer_documentation/testdata/cmd.md
new file mode 100755
index 00000000..389ac912
--- /dev/null
+++ b/internal/indexer/testdata/TestIndexer_documentation/testdata/cmd.md
@@ -0,0 +1,8 @@
+#
+
+## Index
+
+* Subpages
+ * [cmd/minimal_main](cmd/minimal_main.md)
+
+
diff --git a/internal/indexer/testdata/TestIndexer_documentation/testdata/cmd/minimal_main.json b/internal/indexer/testdata/TestIndexer_documentation/testdata/cmd/minimal_main.json
new file mode 100755
index 00000000..f90845ca
--- /dev/null
+++ b/internal/indexer/testdata/TestIndexer_documentation/testdata/cmd/minimal_main.json
@@ -0,0 +1,135 @@
+{
+ "pathID": "/cmd/minimal_main",
+ "documentation": {
+ "identifier": "minimal_main",
+ "newPage": true,
+ "searchKey": "cmd/minimal_main",
+ "tags": [
+ "private",
+ "package"
+ ]
+ },
+ "label": {
+ "kind": "plaintext",
+ "value": "Package main"
+ },
+ "detail": {
+ "kind": "markdown",
+ "value": ""
+ },
+ "children": [
+ {
+ "node": {
+ "pathID": "/cmd/minimal_main#type",
+ "documentation": {
+ "identifier": "type",
+ "newPage": false,
+ "searchKey": "",
+ "tags": [
+ "private"
+ ]
+ },
+ "label": {
+ "kind": "plaintext",
+ "value": "Types"
+ },
+ "detail": {
+ "kind": "plaintext",
+ "value": ""
+ },
+ "children": [
+ {
+ "node": {
+ "pathID": "/cmd/minimal_main#User",
+ "documentation": {
+ "identifier": "User",
+ "newPage": false,
+ "searchKey": "main.User",
+ "tags": [
+ "struct"
+ ]
+ },
+ "label": {
+ "kind": "plaintext",
+ "value": "type User struct"
+ },
+ "detail": {
+ "kind": "markdown",
+ "value": "```Go\ntype User struct {\n\tId, Name string\n}\n```\n\n"
+ },
+ "children": null
+ }
+ },
+ {
+ "node": {
+ "pathID": "/cmd/minimal_main#UserResource",
+ "documentation": {
+ "identifier": "UserResource",
+ "newPage": false,
+ "searchKey": "main.UserResource",
+ "tags": [
+ "struct"
+ ]
+ },
+ "label": {
+ "kind": "plaintext",
+ "value": "type UserResource struct{}"
+ },
+ "detail": {
+ "kind": "markdown",
+ "value": "```Go\ntype UserResource struct{}\n```\n\n"
+ },
+ "children": null
+ }
+ }
+ ]
+ }
+ },
+ {
+ "node": {
+ "pathID": "/cmd/minimal_main#func",
+ "documentation": {
+ "identifier": "func",
+ "newPage": false,
+ "searchKey": "",
+ "tags": [
+ "private"
+ ]
+ },
+ "label": {
+ "kind": "plaintext",
+ "value": "Functions"
+ },
+ "detail": {
+ "kind": "plaintext",
+ "value": ""
+ },
+ "children": [
+ {
+ "node": {
+ "pathID": "/cmd/minimal_main#main",
+ "documentation": {
+ "identifier": "main",
+ "newPage": false,
+ "searchKey": "main.main",
+ "tags": [
+ "function",
+ "private"
+ ]
+ },
+ "label": {
+ "kind": "plaintext",
+ "value": "func main()"
+ },
+ "detail": {
+ "kind": "markdown",
+ "value": "```Go\nfunc main()\n```\n\n"
+ },
+ "children": null
+ }
+ }
+ ]
+ }
+ }
+ ]
+}
diff --git a/internal/indexer/testdata/TestIndexer_documentation/testdata/cmd/minimal_main.md b/internal/indexer/testdata/TestIndexer_documentation/testdata/cmd/minimal_main.md
new file mode 100755
index 00000000..8dd0b12e
--- /dev/null
+++ b/internal/indexer/testdata/TestIndexer_documentation/testdata/cmd/minimal_main.md
@@ -0,0 +1,58 @@
+# Package main
+
+## Index
+
+* [Types](#type)
+ * [type User struct](#User)
+ * [type UserResource struct{}](#UserResource)
+* [Functions](#func)
+ * [func main()](#main)
+
+
+## Types
+
+```
+tags: [private]
+```
+
+### type User struct
+
+```
+searchKey: main.User
+tags: [struct]
+```
+
+```Go
+type User struct {
+ Id, Name string
+}
+```
+
+### type UserResource struct{}
+
+```
+searchKey: main.UserResource
+tags: [struct]
+```
+
+```Go
+type UserResource struct{}
+```
+
+## Functions
+
+```
+tags: [private]
+```
+
+### func main()
+
+```
+searchKey: main.main
+tags: [function private]
+```
+
+```Go
+func main()
+```
+
diff --git a/internal/indexer/testdata/TestIndexer_documentation/testdata/index.json b/internal/indexer/testdata/TestIndexer_documentation/testdata/index.json
index ac408ab5..08226c1b 100755
--- a/internal/indexer/testdata/TestIndexer_documentation/testdata/index.json
+++ b/internal/indexer/testdata/TestIndexer_documentation/testdata/index.json
@@ -15,9 +15,12 @@
},
"detail": {
"kind": "markdown",
- "value": "Package testdata \n\ntestdata is a small package containing sample Go source code used for testing the indexing routines of github.com/sourcegraph/lsif-go. \n\n"
+ "value": "testdata is a small package containing sample Go source code used for testing the indexing routines of github.com/sourcegraph/lsif-go. \n\n"
},
"children": [
+ {
+ "pathID": "/cmd"
+ },
{
"pathID": "/internal"
},
@@ -639,6 +642,28 @@
]
}
},
+ {
+ "node": {
+ "pathID": "/#NestedHandler",
+ "documentation": {
+ "identifier": "NestedHandler",
+ "newPage": false,
+ "searchKey": "testdata.NestedHandler",
+ "tags": [
+ "struct"
+ ]
+ },
+ "label": {
+ "kind": "plaintext",
+ "value": "type NestedHandler struct"
+ },
+ "detail": {
+ "kind": "markdown",
+ "value": "```Go\ntype NestedHandler struct {\n\thttp.Handler\n\tOther int\n}\n```\n\n"
+ },
+ "children": null
+ }
+ },
{
"node": {
"pathID": "/#Outer",
@@ -1040,67 +1065,66 @@
"children": [
{
"node": {
- "pathID": "/#Parallel",
+ "pathID": "/#Example",
"documentation": {
- "identifier": "Parallel",
+ "identifier": "Example",
"newPage": false,
- "searchKey": "testdata.Parallel",
+ "searchKey": "testdata.Example",
"tags": [
"function"
]
},
"label": {
"kind": "plaintext",
- "value": "func Parallel(ctx context.Context, fns ...ParallelizableFunc) error"
+ "value": "func Example()"
},
"detail": {
"kind": "markdown",
- "value": "```Go\nfunc Parallel(ctx context.Context, fns ...ParallelizableFunc) error\n```\n\nParallel invokes each of the given parallelizable functions in their own goroutines and returns the first error to occur. This method will block until all goroutines have returned. \n\n"
+ "value": "```Go\nfunc Example()\n```\n\n"
},
"children": null
}
},
{
"node": {
- "pathID": "/#Switch",
+ "pathID": "/#Parallel",
"documentation": {
- "identifier": "Switch",
+ "identifier": "Parallel",
"newPage": false,
- "searchKey": "testdata.Switch",
+ "searchKey": "testdata.Parallel",
"tags": [
"function"
]
},
"label": {
"kind": "plaintext",
- "value": "func Switch(interfaceValue interface{}) bool"
+ "value": "func Parallel(ctx context.Context, fns ...ParallelizableFunc) error"
},
"detail": {
"kind": "markdown",
- "value": "```Go\nfunc Switch(interfaceValue interface{}) bool\n```\n\n"
+ "value": "```Go\nfunc Parallel(ctx context.Context, fns ...ParallelizableFunc) error\n```\n\nParallel invokes each of the given parallelizable functions in their own goroutines and returns the first error to occur. This method will block until all goroutines have returned. \n\n"
},
"children": null
}
},
{
"node": {
- "pathID": "/#main",
+ "pathID": "/#Switch",
"documentation": {
- "identifier": "main",
+ "identifier": "Switch",
"newPage": false,
- "searchKey": "testdata.main",
+ "searchKey": "testdata.Switch",
"tags": [
- "function",
- "private"
+ "function"
]
},
"label": {
"kind": "plaintext",
- "value": "func main()"
+ "value": "func Switch(interfaceValue interface{}) bool"
},
"detail": {
"kind": "markdown",
- "value": "```Go\nfunc main()\n```\n\n"
+ "value": "```Go\nfunc Switch(interfaceValue interface{}) bool\n```\n\n"
},
"children": null
}
diff --git a/internal/indexer/testdata/TestIndexer_documentation/testdata/index.md b/internal/indexer/testdata/TestIndexer_documentation/testdata/index.md
index c12016d9..7d059292 100755
--- a/internal/indexer/testdata/TestIndexer_documentation/testdata/index.md
+++ b/internal/indexer/testdata/TestIndexer_documentation/testdata/index.md
@@ -1,12 +1,11 @@
# Package testdata
-Package testdata
-
testdata is a small package containing sample Go source code used for testing the indexing routines of github.com/sourcegraph/lsif-go.
## Index
* Subpages
+ * [cmd](cmd.md)
* [internal](internal.md)
* [conflicting_test_symbols](conflicting_test_symbols.md)
* [duplicate_path_id](duplicate_path_id.md)
@@ -38,6 +37,7 @@ testdata is a small package containing sample Go source code used for testing th
* [type InnerStruct struct{}](#InnerStruct)
* [type Interface interface](#Interface)
* [func NewInterface() Interface](#NewInterface)
+ * [type NestedHandler struct](#NestedHandler)
* [type Outer struct](#Outer)
* [type ParallelizableFunc func(ctx context.Context) error](#ParallelizableFunc)
* [type SecretBurger secret.Burger](#SecretBurger)
@@ -56,9 +56,9 @@ testdata is a small package containing sample Go source code used for testing th
* [type X struct](#X)
* [type Y struct](#Y)
* [Functions](#func)
+ * [func Example()](#Example)
* [func Parallel(ctx context.Context, fns ...ParallelizableFunc) error](#Parallel)
* [func Switch(interfaceValue interface{}) bool](#Switch)
- * [func main()](#main)
* [func useOfCompositeStructs()](#useOfCompositeStructs)
@@ -402,6 +402,20 @@ tags: [function]
func NewInterface() Interface
```
+### type NestedHandler struct
+
+```
+searchKey: testdata.NestedHandler
+tags: [struct]
+```
+
+```Go
+type NestedHandler struct {
+ http.Handler
+ Other int
+}
+```
+
### type Outer struct
```
@@ -666,6 +680,17 @@ Go can be fun
tags: [private]
```
+### func Example()
+
+```
+searchKey: testdata.Example
+tags: [function]
+```
+
+```Go
+func Example()
+```
+
### func Parallel(ctx context.Context, fns ...ParallelizableFunc) error
```
@@ -690,17 +715,6 @@ tags: [function]
func Switch(interfaceValue interface{}) bool
```
-### func main()
-
-```
-searchKey: testdata.main
-tags: [function private]
-```
-
-```Go
-func main()
-```
-
### func useOfCompositeStructs()
```
diff --git a/internal/indexer/types.go b/internal/indexer/types.go
new file mode 100644
index 00000000..add74069
--- /dev/null
+++ b/internal/indexer/types.go
@@ -0,0 +1,74 @@
+package indexer
+
+import (
+ "go/ast"
+ "go/token"
+ "go/types"
+
+ "golang.org/x/tools/go/packages"
+)
+
+type importReference struct {
+ rangeID uint64
+ documentID uint64
+}
+
+// ObjectLike is effectively just types.Object. We needed an interface that we could actually implement
+// since types.Object has unexported fields, so it is unimplementable for our package.
+type ObjectLike interface {
+ Pos() token.Pos
+ Pkg() *types.Package
+ Name() string
+ Type() types.Type
+ Exported() bool
+ Id() string
+
+ String() string
+}
+
+// PkgDeclaration is similar to types.PkgName, except that instead of for _imported_ packages
+// it is for _declared_ packages.
+//
+// Generated for: `package name`
+//
+// For more information, see : docs/package_declarations.md
+type PkgDeclaration struct {
+ pos token.Pos
+ pkg *types.Package
+ name string
+}
+
+func (p PkgDeclaration) Pos() token.Pos { return p.pos }
+func (p PkgDeclaration) Pkg() *types.Package { return p.pkg }
+func (p PkgDeclaration) Name() string { return p.name }
+func (p PkgDeclaration) Type() types.Type { return pkgDeclarationType{p} }
+func (p PkgDeclaration) Exported() bool { return true }
+func (p PkgDeclaration) Id() string { return "pkg:" + p.pkg.Name() + ":" + p.name }
+func (p PkgDeclaration) String() string { return "pkg:" + p.pkg.Name() + ":" + p.name }
+
+// Fulfills types.Type interface
+type pkgDeclarationType struct{ decl PkgDeclaration }
+
+func (p pkgDeclarationType) Underlying() types.Type { return p }
+func (p pkgDeclarationType) String() string { return p.decl.Id() }
+
+var packageLen = len("package ")
+
+func newPkgDeclaration(p *packages.Package, f *ast.File) (*PkgDeclaration, token.Position) {
+ // import mypackage
+ // ^--------------------- pkgKeywordPosition *types.Position
+ // ^-------------- pkgDeclarationPos *types.Pos
+ // ^-------------- pkgPosition *types.Position
+ pkgKeywordPosition := p.Fset.Position(f.Package)
+
+ pkgDeclarationPos := p.Fset.File(f.Package).Pos(pkgKeywordPosition.Offset + packageLen)
+ pkgPosition := p.Fset.Position(pkgDeclarationPos)
+
+ name := f.Name.Name
+
+ return &PkgDeclaration{
+ pos: pkgDeclarationPos,
+ pkg: types.NewPackage(p.PkgPath, name),
+ name: name,
+ }, pkgPosition
+}
diff --git a/internal/indexer/typestring.go b/internal/indexer/typestring.go
index c6854988..4a9ca429 100644
--- a/internal/indexer/typestring.go
+++ b/internal/indexer/typestring.go
@@ -10,8 +10,8 @@ import (
// indent is used to format struct fields.
const indent = " "
-// typeString returns the string representation fo the given object's type.
-func typeString(obj types.Object) (signature string, extra string) {
+// typeString returns the string representation of the given object's type.
+func typeString(obj ObjectLike) (signature string, extra string) {
switch v := obj.(type) {
case *types.PkgName:
return fmt.Sprintf("package %s", v.Name()), ""
@@ -26,10 +26,20 @@ func typeString(obj types.Object) (signature string, extra string) {
}
case *types.Const:
- return fmt.Sprintf("%s = %s", types.ObjectString(obj, packageQualifier), v.Val()), ""
+ return fmt.Sprintf("%s = %s", types.ObjectString(v, packageQualifier), v.Val()), ""
+
+ case *PkgDeclaration:
+ return fmt.Sprintf("package %s", v.name), ""
+
}
- return types.ObjectString(obj, packageQualifier), ""
+ // Fall back to types.Object
+ // All other cases of this should be this type. We only had to implement PkgDeclaration because
+ // some fields are not exported in types.Object.
+ //
+ // We expect any new ObjectLike items to be `types.Object` values.
+ v, _ := obj.(types.Object)
+ return types.ObjectString(v, packageQualifier), ""
}
// packageQualifier returns an empty string in order to remove the leading package
diff --git a/internal/testdata/child_symbols.go b/internal/testdata/child_symbols.go
index 4cd3e398..f17effc6 100644
--- a/internal/testdata/child_symbols.go
+++ b/internal/testdata/child_symbols.go
@@ -1,4 +1,3 @@
-// Package testdata
package testdata
// Const is a constant equal to 5. It's the best constant I've ever written. 😹
diff --git a/internal/testdata/minimal_main.go b/internal/testdata/cmd/minimal_main/minimal_main.go
similarity index 100%
rename from internal/testdata/minimal_main.go
rename to internal/testdata/cmd/minimal_main/minimal_main.go
diff --git a/internal/testdata/external_composite.go b/internal/testdata/external_composite.go
new file mode 100644
index 00000000..b441a441
--- /dev/null
+++ b/internal/testdata/external_composite.go
@@ -0,0 +1,8 @@
+package testdata
+
+import "net/http"
+
+type NestedHandler struct {
+ http.Handler
+ Other int
+}
diff --git a/internal/testdata/go.mod b/internal/testdata/go.mod
index 03346016..b0fa8571 100644
--- a/internal/testdata/go.mod
+++ b/internal/testdata/go.mod
@@ -1,3 +1,3 @@
module github.com/sourcegraph/lsif-go/internal/testdata
-go 1.12
+go 1.16
diff --git a/internal/testdata/named_import.go b/internal/testdata/named_import.go
new file mode 100644
index 00000000..d2f9806b
--- /dev/null
+++ b/internal/testdata/named_import.go
@@ -0,0 +1,10 @@
+package testdata
+
+import (
+ . "fmt"
+ h "net/http"
+)
+
+func Example() {
+ Println(h.CanonicalHeaderKey("accept-encoding"))
+}