diff --git a/langserver/completion.go b/langserver/completion.go new file mode 100644 index 00000000..75e1b35a --- /dev/null +++ b/langserver/completion.go @@ -0,0 +1,79 @@ +package langserver + +import ( + "context" + "fmt" + + "github.com/sourcegraph/go-langserver/langserver/internal/gocode" + "github.com/sourcegraph/go-langserver/pkg/lsp" + "github.com/sourcegraph/jsonrpc2" +) + +var ( + GocodeCompletionEnabled = false + CIKConstantSupported = lsp.CIKVariable // or lsp.CIKConstant if client supported +) + +func (h *LangHandler) handleTextDocumentCompletion(ctx context.Context, conn jsonrpc2.JSONRPC2, req *jsonrpc2.Request, params lsp.CompletionParams) (*lsp.CompletionList, error) { + if !isFileURI(params.TextDocument.URI) { + return nil, &jsonrpc2.Error{ + Code: jsonrpc2.CodeInvalidParams, + Message: fmt.Sprintf("textDocument/completion not yet supported for out-of-workspace URI (%q)", params.TextDocument.URI), + } + } + + // In the case of testing, our OS paths and VFS paths do not match. In the + // real world, this is never the case. Give the test suite the opportunity + // to correct the path now. + vfsURI := params.TextDocument.URI + if testOSToVFSPath != nil { + vfsURI = pathToURI(testOSToVFSPath(uriToFilePath(vfsURI))) + } + + // Read file contents and calculate byte offset. + contents, err := h.readFile(ctx, vfsURI) + if err != nil { + return nil, err + } + filename := h.FilePath(params.TextDocument.URI) + offset, valid, why := offsetForPosition(contents, params.Position) + if !valid { + return nil, fmt.Errorf("invalid position: %s:%d:%d (%s)", filename, params.Position.Line, params.Position.Character, why) + } + + ca, rangelen := gocode.AutoComplete(contents, filename, offset) + citems := make([]lsp.CompletionItem, len(ca)) + for i, it := range ca { + var kind lsp.CompletionItemKind + switch it.Class.String() { + case "const": + kind = CIKConstantSupported + case "func": + kind = lsp.CIKFunction + case "import": + kind = lsp.CIKModule + case "package": + kind = lsp.CIKModule + case "type": + kind = lsp.CIKClass + case "var": + kind = lsp.CIKVariable + } + citems[i] = lsp.CompletionItem{ + Label: it.Name, + Kind: kind, + Detail: it.Type, + TextEdit: &lsp.TextEdit{ + Range: lsp.Range{ + Start: lsp.Position{Line: params.Position.Line, Character: params.Position.Character - rangelen}, + End: lsp.Position{Line: params.Position.Line, Character: params.Position.Character}, + }, + NewText: it.Name, + }, + } + } + return &lsp.CompletionList{ + IsIncomplete: false, + Items: citems, + }, nil +} diff --git a/langserver/handler.go b/langserver/handler.go index 6ce53211..7070498b 100644 --- a/langserver/handler.go +++ b/langserver/handler.go @@ -15,6 +15,7 @@ import ( opentracing "github.com/opentracing/opentracing-go" "github.com/opentracing/opentracing-go/ext" + "github.com/sourcegraph/go-langserver/langserver/internal/gocode" "github.com/sourcegraph/go-langserver/pkg/lsp" "github.com/sourcegraph/go-langserver/pkg/lspext" "github.com/sourcegraph/jsonrpc2" @@ -72,6 +73,13 @@ type LangHandler struct { // reset clears all internal state in h. func (h *LangHandler) reset(init *InitializeParams) error { + for _, k := range init.Capabilities.TextDocument.Completion.CompletionItemKind.ValueSet { + if k == lsp.CIKConstant { + CIKConstantSupported = lsp.CIKConstant + break + } + } + if isFileURI(lsp.DocumentURI(init.InitializeParams.RootPath)) { log.Printf("Passing an initialize rootPath URI (%q) is deprecated. Use rootUri instead.", init.InitializeParams.RootPath) } @@ -189,6 +197,9 @@ func (h *LangHandler) Handle(ctx context.Context, conn jsonrpc2.JSONRPC2, req *j if err := h.reset(¶ms); err != nil { return nil, err } + if GocodeCompletionEnabled { + gocode.InitDaemon(h.BuildContext(ctx)) + } // PERF: Kick off a workspace/symbol in the background to warm up the server if yes, _ := strconv.ParseBool(envWarmupOnInitialize); yes { @@ -203,11 +214,16 @@ func (h *LangHandler) Handle(ctx context.Context, conn jsonrpc2.JSONRPC2, req *j } kind := lsp.TDSKIncremental + var completionOp *lsp.CompletionOptions + if GocodeCompletionEnabled { + completionOp = &lsp.CompletionOptions{TriggerCharacters: []string{"."}} + } return lsp.InitializeResult{ Capabilities: lsp.ServerCapabilities{ TextDocumentSync: lsp.TextDocumentSyncOptionsOrKind{ Kind: &kind, }, + CompletionProvider: completionOp, DefinitionProvider: true, DocumentFormattingProvider: true, DocumentSymbolProvider: true, @@ -284,6 +300,16 @@ func (h *LangHandler) Handle(ctx context.Context, conn jsonrpc2.JSONRPC2, req *j } return h.handleXDefinition(ctx, conn, req, params) + case "textDocument/completion": + if req.Params == nil { + return nil, &jsonrpc2.Error{Code: jsonrpc2.CodeInvalidParams} + } + var params lsp.CompletionParams + if err := json.Unmarshal(*req.Params, ¶ms); err != nil { + return nil, err + } + return h.handleTextDocumentCompletion(ctx, conn, req, params) + case "textDocument/references": if req.Params == nil { return nil, &jsonrpc2.Error{Code: jsonrpc2.CodeInvalidParams} diff --git a/langserver/internal/gocode/.gitignore b/langserver/internal/gocode/.gitignore new file mode 100644 index 00000000..106b25b4 --- /dev/null +++ b/langserver/internal/gocode/.gitignore @@ -0,0 +1,12 @@ +*.8 +*.a +*.out +gocode +gocode.exe +goremote +gocodetest +*.swp +listidents +showcursor +showsmap +rename diff --git a/langserver/internal/gocode/LICENSE b/langserver/internal/gocode/LICENSE new file mode 100644 index 00000000..5ce0cee7 --- /dev/null +++ b/langserver/internal/gocode/LICENSE @@ -0,0 +1,19 @@ +Copyright (C) 2010 nsf + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/langserver/internal/gocode/autocompletecontext.go b/langserver/internal/gocode/autocompletecontext.go new file mode 100644 index 00000000..c1592e36 --- /dev/null +++ b/langserver/internal/gocode/autocompletecontext.go @@ -0,0 +1,793 @@ +package gocode + +import ( + "bytes" + "fmt" + "go/ast" + "go/parser" + "go/token" + "os" + "path/filepath" + "runtime" + "sort" + "strings" + "time" +) + +//------------------------------------------------------------------------- +// out_buffers +// +// Temporary structure for writing autocomplete response. +//------------------------------------------------------------------------- + +// fields must be exported for RPC +type candidate struct { + Name string + Type string + Class decl_class + Package string +} + +type out_buffers struct { + tmpbuf *bytes.Buffer + candidates []candidate + canonical_aliases map[string]string + ctx *auto_complete_context + tmpns map[string]bool + ignorecase bool +} + +func new_out_buffers(ctx *auto_complete_context) *out_buffers { + b := new(out_buffers) + b.tmpbuf = bytes.NewBuffer(make([]byte, 0, 1024)) + b.candidates = make([]candidate, 0, 64) + b.ctx = ctx + b.canonical_aliases = make(map[string]string) + for _, imp := range b.ctx.current.packages { + b.canonical_aliases[imp.abspath] = imp.alias + } + return b +} + +func (b *out_buffers) Len() int { + return len(b.candidates) +} + +func (b *out_buffers) Less(i, j int) bool { + x := b.candidates[i] + y := b.candidates[j] + if x.Class == y.Class { + return x.Name < y.Name + } + return x.Class < y.Class +} + +func (b *out_buffers) Swap(i, j int) { + b.candidates[i], b.candidates[j] = b.candidates[j], b.candidates[i] +} + +func (b *out_buffers) append_decl(p, name, pkg string, decl *decl, class decl_class) { + c1 := !g_config.ProposeBuiltins && decl.scope == g_universe_scope && decl.name != "Error" + c2 := class != decl_invalid && decl.class != class + c3 := class == decl_invalid && !has_prefix(name, p, b.ignorecase) + c4 := !decl.matches() + c5 := !check_type_expr(decl.typ) + + if c1 || c2 || c3 || c4 || c5 { + return + } + + decl.pretty_print_type(b.tmpbuf, b.canonical_aliases) + b.candidates = append(b.candidates, candidate{ + Name: name, + Type: b.tmpbuf.String(), + Class: decl.class, + Package: pkg, + }) + b.tmpbuf.Reset() +} + +func (b *out_buffers) append_embedded(p string, decl *decl, pkg string, class decl_class) { + if decl.embedded == nil { + return + } + + first_level := false + if b.tmpns == nil { + // first level, create tmp namespace + b.tmpns = make(map[string]bool) + first_level = true + + // add all children of the current decl to the namespace + for _, c := range decl.children { + b.tmpns[c.name] = true + } + } + + for _, emb := range decl.embedded { + typedecl := type_to_decl(emb, decl.scope) + if typedecl == nil { + continue + } + + // could be type alias + if typedecl.is_alias() { + typedecl = typedecl.type_dealias() + } + + // prevent infinite recursion here + if typedecl.is_visited() { + continue + } + typedecl.set_visited() + defer typedecl.clear_visited() + + for _, c := range typedecl.children { + if _, has := b.tmpns[c.name]; has { + continue + } + b.append_decl(p, c.name, pkg, c, class) + b.tmpns[c.name] = true + } + b.append_embedded(p, typedecl, pkg, class) + } + + if first_level { + // remove tmp namespace + b.tmpns = nil + } +} + +//------------------------------------------------------------------------- +// auto_complete_context +// +// Context that holds cache structures for autocompletion needs. It +// includes cache for packages and for main package files. +//------------------------------------------------------------------------- + +type auto_complete_context struct { + current *auto_complete_file // currently edited file + others []*decl_file_cache // other files of the current package + pkg *scope + + pcache package_cache // packages cache + declcache *decl_cache // top-level declarations cache +} + +func new_auto_complete_context(pcache package_cache, declcache *decl_cache) *auto_complete_context { + c := new(auto_complete_context) + c.current = new_auto_complete_file("", declcache.context) + c.pcache = pcache + c.declcache = declcache + return c +} + +func (c *auto_complete_context) update_caches() { + // temporary map for packages that we need to check for a cache expiration + // map is used as a set of unique items to prevent double checks + ps := make(map[string]*package_file_cache) + + // collect import information from all of the files + c.pcache.append_packages(ps, c.current.packages) + c.others = get_other_package_files(c.current.name, c.current.package_name, c.declcache) + for _, other := range c.others { + c.pcache.append_packages(ps, other.packages) + } + + update_packages(ps) + + // fix imports for all files + fixup_packages(c.current.filescope, c.current.packages, c.pcache) + for _, f := range c.others { + fixup_packages(f.filescope, f.packages, c.pcache) + } + + // At this point we have collected all top level declarations, now we need to + // merge them in the common package block. + c.merge_decls() +} + +func (c *auto_complete_context) merge_decls() { + c.pkg = new_scope(g_universe_scope) + merge_decls(c.current.filescope, c.pkg, c.current.decls) + merge_decls_from_packages(c.pkg, c.current.packages, c.pcache) + for _, f := range c.others { + merge_decls(f.filescope, c.pkg, f.decls) + merge_decls_from_packages(c.pkg, f.packages, c.pcache) + } + + // special pass for type aliases which also have methods, while this is + // valid code, it shouldn't happen a lot in practice, so, whatever + // let's move all type alias methods to their first non-alias type down in + // the chain + propagate_type_alias_methods(c.pkg) +} + +func (c *auto_complete_context) make_decl_set(scope *scope) map[string]*decl { + set := make(map[string]*decl, len(c.pkg.entities)*2) + make_decl_set_recursive(set, scope) + return set +} + +func (c *auto_complete_context) get_candidates_from_set(set map[string]*decl, partial string, class decl_class, b *out_buffers) { + for key, value := range set { + if value == nil { + continue + } + value.infer_type() + pkgname := "" + if pkg, ok := c.pcache[value.name]; ok { + pkgname = pkg.import_name + } + b.append_decl(partial, key, pkgname, value, class) + } +} + +func (c *auto_complete_context) get_candidates_from_decl_alias(cc cursor_context, class decl_class, b *out_buffers) { + if cc.decl.is_visited() { + return + } + + cc.decl = cc.decl.type_dealias() + if cc.decl == nil { + return + } + + cc.decl.set_visited() + defer cc.decl.clear_visited() + + c.get_candidates_from_decl(cc, class, b) + return +} + +func (c *auto_complete_context) decl_package_import_path(decl *decl) string { + if decl == nil || decl.scope == nil { + return "" + } + if pkg, ok := c.pcache[decl.scope.pkgname]; ok { + return pkg.import_name + } + return "" +} + +func (c *auto_complete_context) get_candidates_from_decl(cc cursor_context, class decl_class, b *out_buffers) { + if cc.decl.is_alias() { + c.get_candidates_from_decl_alias(cc, class, b) + return + } + + // propose all children of a subject declaration and + for _, decl := range cc.decl.children { + if cc.decl.class == decl_package && !ast.IsExported(decl.name) { + continue + } + if cc.struct_field { + // if we're autocompleting struct field init, skip all methods + if _, ok := decl.typ.(*ast.FuncType); ok { + continue + } + } + b.append_decl(cc.partial, decl.name, c.decl_package_import_path(decl), decl, class) + } + // propose all children of an underlying struct/interface type + adecl := advance_to_struct_or_interface(cc.decl) + if adecl != nil && adecl != cc.decl { + for _, decl := range adecl.children { + if decl.class == decl_var { + b.append_decl(cc.partial, decl.name, c.decl_package_import_path(decl), decl, class) + } + } + } + // propose all children of its embedded types + b.append_embedded(cc.partial, cc.decl, c.decl_package_import_path(cc.decl), class) +} + +func (c *auto_complete_context) get_import_candidates(partial string, b *out_buffers) { + currentPackagePath, pkgdirs := g_daemon.context.pkg_dirs() + resultSet := map[string]struct{}{} + for _, pkgdir := range pkgdirs { + // convert srcpath to pkgpath and get candidates + get_import_candidates_dir(pkgdir, filepath.FromSlash(partial), b.ignorecase, currentPackagePath, resultSet) + } + for k := range resultSet { + b.candidates = append(b.candidates, candidate{Name: k, Class: decl_import}) + } +} + +func get_import_candidates_dir(root, partial string, ignorecase bool, currentPackagePath string, r map[string]struct{}) { + var fpath string + var match bool + if strings.HasSuffix(partial, "/") { + fpath = filepath.Join(root, partial) + } else { + fpath = filepath.Join(root, filepath.Dir(partial)) + match = true + } + fi := readdir(fpath) + for i := range fi { + name := fi[i].Name() + rel, err := filepath.Rel(root, filepath.Join(fpath, name)) + if err != nil { + panic(err) + } + if match && !has_prefix(rel, partial, ignorecase) { + continue + } else if fi[i].IsDir() { + get_import_candidates_dir(root, rel+string(filepath.Separator), ignorecase, currentPackagePath, r) + } else { + ext := filepath.Ext(name) + if ext != ".a" { + continue + } else { + rel = rel[0 : len(rel)-2] + } + if ipath, ok := vendorlessImportPath(filepath.ToSlash(rel), currentPackagePath); ok { + r[ipath] = struct{}{} + } + } + } +} + +// returns three slices of the same length containing: +// 1. apropos names +// 2. apropos types (pretty-printed) +// 3. apropos classes +// and length of the part that should be replaced (if any) +func (c *auto_complete_context) apropos(file []byte, filename string, cursor int) ([]candidate, int) { + c.current.cursor = cursor + c.current.name = filename + + // Update caches and parse the current file. + // This process is quite complicated, because I was trying to design it in a + // concurrent fashion. Apparently I'm not really good at that. Hopefully + // will be better in future. + + // Ugly hack, but it actually may help in some cases. Insert a + // semicolon right at the cursor location. + filesemi := make([]byte, len(file)+1) + copy(filesemi, file[:cursor]) + filesemi[cursor] = ';' + copy(filesemi[cursor+1:], file[cursor:]) + + // Does full processing of the currently edited file (top-level declarations plus + // active function). + c.current.process_data(filesemi) + + // Updates cache of other files and packages. See the function for details of + // the process. At the end merges all the top-level declarations into the package + // block. + c.update_caches() + + // And we're ready to Go. ;) + + b := new_out_buffers(c) + + partial := 0 + cc, ok := c.deduce_cursor_context(file, cursor) + if !ok { + var d *decl + if ident, ok := cc.expr.(*ast.Ident); ok && g_config.UnimportedPackages { + p := resolveKnownPackageIdent(ident.Name, c.current.name, c.current.context) + c.pcache[p.name] = p + d = p.main + } + if d == nil { + return nil, 0 + } + cc.decl = d + } + + class := decl_invalid + switch cc.partial { + case "const": + class = decl_const + case "var": + class = decl_var + case "type": + class = decl_type + case "func": + class = decl_func + case "package": + class = decl_package + } + + if cc.decl_import { + c.get_import_candidates(cc.partial, b) + if cc.partial != "" && len(b.candidates) == 0 { + // as a fallback, try case insensitive approach + b.ignorecase = true + c.get_import_candidates(cc.partial, b) + } + } else if cc.decl == nil { + // In case if no declaraion is a subject of completion, propose all: + set := c.make_decl_set(c.current.scope) + c.get_candidates_from_set(set, cc.partial, class, b) + if cc.partial != "" && len(b.candidates) == 0 { + // as a fallback, try case insensitive approach + b.ignorecase = true + c.get_candidates_from_set(set, cc.partial, class, b) + } + } else { + c.get_candidates_from_decl(cc, class, b) + if cc.partial != "" && len(b.candidates) == 0 { + // as a fallback, try case insensitive approach + b.ignorecase = true + c.get_candidates_from_decl(cc, class, b) + } + } + partial = len(cc.partial) + + if len(b.candidates) == 0 { + return nil, 0 + } + + sort.Sort(b) + return b.candidates, partial +} + +func update_packages(ps map[string]*package_file_cache) { + // initiate package cache update + done := make(chan bool) + for _, p := range ps { + go func(p *package_file_cache) { + defer func() { + if err := recover(); err != nil { + print_backtrace(err) + done <- false + } + }() + p.update_cache() + done <- true + }(p) + } + + // wait for its completion + for _ = range ps { + if !<-done { + panic("One of the package cache updaters panicked") + } + } +} + +func collect_type_alias_methods(d *decl) map[string]*decl { + if d == nil || d.is_visited() || !d.is_alias() { + return nil + } + d.set_visited() + defer d.clear_visited() + + // add own methods + m := map[string]*decl{} + for k, v := range d.children { + m[k] = v + } + + // recurse into more aliases + dd := type_to_decl(d.typ, d.scope) + for k, v := range collect_type_alias_methods(dd) { + m[k] = v + } + + return m +} + +func propagate_type_alias_methods(s *scope) { + for _, e := range s.entities { + if !e.is_alias() { + continue + } + + methods := collect_type_alias_methods(e) + if len(methods) == 0 { + continue + } + + dd := e.type_dealias() + if dd == nil { + continue + } + + decl := dd.deep_copy() + for _, v := range methods { + decl.add_child(v) + } + s.entities[decl.name] = decl + } +} + +func merge_decls(filescope *scope, pkg *scope, decls map[string]*decl) { + for _, d := range decls { + pkg.merge_decl(d) + } + filescope.parent = pkg +} + +func merge_decls_from_packages(pkgscope *scope, pkgs []package_import, pcache package_cache) { + for _, p := range pkgs { + path, alias := p.abspath, p.alias + if alias != "." { + continue + } + p := pcache[path].main + if p == nil { + continue + } + for _, d := range p.children { + if ast.IsExported(d.name) { + pkgscope.merge_decl(d) + } + } + } +} + +func fixup_packages(filescope *scope, pkgs []package_import, pcache package_cache) { + for _, p := range pkgs { + path, alias := p.abspath, p.alias + if alias == "" { + alias = pcache[path].defalias + } + // skip packages that will be merged to the package scope + if alias == "." { + continue + } + filescope.replace_decl(alias, pcache[path].main) + } +} + +func get_other_package_files(filename, packageName string, declcache *decl_cache) []*decl_file_cache { + others := find_other_package_files(filename, packageName) + + ret := make([]*decl_file_cache, len(others)) + done := make(chan *decl_file_cache) + + for _, nm := range others { + go func(name string) { + defer func() { + if err := recover(); err != nil { + print_backtrace(err) + done <- nil + } + }() + done <- declcache.get_and_update(name) + }(nm) + } + + for i := range others { + ret[i] = <-done + if ret[i] == nil { + panic("One of the decl cache updaters panicked") + } + } + + return ret +} + +func find_other_package_files(filename, package_name string) []string { + if filename == "" { + return nil + } + + dir, file := filepath.Split(filename) + files_in_dir, err := readdir_lstat(dir) + if err != nil { + panic(err) + } + + count := 0 + for _, stat := range files_in_dir { + ok, _ := filepath.Match("*.go", stat.Name()) + if !ok || stat.Name() == file { + continue + } + count++ + } + + out := make([]string, 0, count) + for _, stat := range files_in_dir { + const non_regular = os.ModeDir | os.ModeSymlink | + os.ModeDevice | os.ModeNamedPipe | os.ModeSocket + + ok, _ := filepath.Match("*.go", stat.Name()) + if !ok || stat.Name() == file || stat.Mode()&non_regular != 0 { + continue + } + + abspath := filepath.Join(dir, stat.Name()) + if file_package_name(abspath) == package_name { + n := len(out) + out = out[:n+1] + out[n] = abspath + } + } + + return out +} + +func file_package_name(filename string) string { + file, _ := parser.ParseFile(token.NewFileSet(), filename, nil, parser.PackageClauseOnly) + return file.Name.Name +} + +func make_decl_set_recursive(set map[string]*decl, scope *scope) { + for name, ent := range scope.entities { + if _, ok := set[name]; !ok { + set[name] = ent + } + } + if scope.parent != nil { + make_decl_set_recursive(set, scope.parent) + } +} + +func check_func_field_list(f *ast.FieldList) bool { + if f == nil { + return true + } + + for _, field := range f.List { + if !check_type_expr(field.Type) { + return false + } + } + return true +} + +// checks for a type expression correctness, it the type expression has +// ast.BadExpr somewhere, returns false, otherwise true +func check_type_expr(e ast.Expr) bool { + switch t := e.(type) { + case *ast.StarExpr: + return check_type_expr(t.X) + case *ast.ArrayType: + return check_type_expr(t.Elt) + case *ast.SelectorExpr: + return check_type_expr(t.X) + case *ast.FuncType: + a := check_func_field_list(t.Params) + b := check_func_field_list(t.Results) + return a && b + case *ast.MapType: + a := check_type_expr(t.Key) + b := check_type_expr(t.Value) + return a && b + case *ast.Ellipsis: + return check_type_expr(t.Elt) + case *ast.ChanType: + return check_type_expr(t.Value) + case *ast.BadExpr: + return false + default: + return true + } + return true +} + +//------------------------------------------------------------------------- +// Status output +//------------------------------------------------------------------------- + +type decl_slice []*decl + +func (s decl_slice) Less(i, j int) bool { + if s[i].class != s[j].class { + return s[i].name < s[j].name + } + return s[i].class < s[j].class +} +func (s decl_slice) Len() int { return len(s) } +func (s decl_slice) Swap(i, j int) { s[i], s[j] = s[j], s[i] } + +const ( + color_red = "\033[0;31m" + color_red_bold = "\033[1;31m" + color_green = "\033[0;32m" + color_green_bold = "\033[1;32m" + color_yellow = "\033[0;33m" + color_yellow_bold = "\033[1;33m" + color_blue = "\033[0;34m" + color_blue_bold = "\033[1;34m" + color_magenta = "\033[0;35m" + color_magenta_bold = "\033[1;35m" + color_cyan = "\033[0;36m" + color_cyan_bold = "\033[1;36m" + color_white = "\033[0;37m" + color_white_bold = "\033[1;37m" + color_none = "\033[0m" +) + +var g_decl_class_to_color = [...]string{ + decl_const: color_white_bold, + decl_var: color_magenta, + decl_type: color_cyan, + decl_func: color_green, + decl_package: color_red, + decl_methods_stub: color_red, +} + +var g_decl_class_to_string_status = [...]string{ + decl_const: " const", + decl_var: " var", + decl_type: " type", + decl_func: " func", + decl_package: "package", + decl_methods_stub: " stub", +} + +func (c *auto_complete_context) status() string { + + buf := bytes.NewBuffer(make([]byte, 0, 4096)) + fmt.Fprintf(buf, "Server's GOMAXPROCS == %d\n", runtime.GOMAXPROCS(0)) + fmt.Fprintf(buf, "\nPackage cache contains %d entries\n", len(c.pcache)) + fmt.Fprintf(buf, "\nListing these entries:\n") + for _, mod := range c.pcache { + fmt.Fprintf(buf, "\tname: %s (default alias: %s)\n", mod.name, mod.defalias) + fmt.Fprintf(buf, "\timports %d declarations and %d packages\n", len(mod.main.children), len(mod.others)) + if mod.mtime == -1 { + fmt.Fprintf(buf, "\tthis package stays in cache forever (built-in package)\n") + } else { + mtime := time.Unix(0, mod.mtime) + fmt.Fprintf(buf, "\tlast modification time: %s\n", mtime) + } + fmt.Fprintf(buf, "\n") + } + if c.current.name != "" { + fmt.Fprintf(buf, "Last edited file: %s (package: %s)\n", c.current.name, c.current.package_name) + if len(c.others) > 0 { + fmt.Fprintf(buf, "\nOther files from the current package:\n") + } + for _, f := range c.others { + fmt.Fprintf(buf, "\t%s\n", f.name) + } + fmt.Fprintf(buf, "\nListing declarations from files:\n") + + const status_decls = "\t%s%s" + color_none + " " + color_yellow + "%s" + color_none + "\n" + const status_decls_children = "\t%s%s" + color_none + " " + color_yellow + "%s" + color_none + " (%d)\n" + + fmt.Fprintf(buf, "\n%s:\n", c.current.name) + ds := make(decl_slice, len(c.current.decls)) + i := 0 + for _, d := range c.current.decls { + ds[i] = d + i++ + } + sort.Sort(ds) + for _, d := range ds { + if len(d.children) > 0 { + fmt.Fprintf(buf, status_decls_children, + g_decl_class_to_color[d.class], + g_decl_class_to_string_status[d.class], + d.name, len(d.children)) + } else { + fmt.Fprintf(buf, status_decls, + g_decl_class_to_color[d.class], + g_decl_class_to_string_status[d.class], + d.name) + } + } + + for _, f := range c.others { + fmt.Fprintf(buf, "\n%s:\n", f.name) + ds = make(decl_slice, len(f.decls)) + i = 0 + for _, d := range f.decls { + ds[i] = d + i++ + } + sort.Sort(ds) + for _, d := range ds { + if len(d.children) > 0 { + fmt.Fprintf(buf, status_decls_children, + g_decl_class_to_color[d.class], + g_decl_class_to_string_status[d.class], + d.name, len(d.children)) + } else { + fmt.Fprintf(buf, status_decls, + g_decl_class_to_color[d.class], + g_decl_class_to_string_status[d.class], + d.name) + } + } + } + } + return buf.String() +} diff --git a/langserver/internal/gocode/autocompletefile.go b/langserver/internal/gocode/autocompletefile.go new file mode 100644 index 00000000..6d47dcdb --- /dev/null +++ b/langserver/internal/gocode/autocompletefile.go @@ -0,0 +1,420 @@ +package gocode + +import ( + "bytes" + "go/ast" + "go/parser" + "go/scanner" + "go/token" + "log" +) + +func parse_decl_list(fset *token.FileSet, data []byte) ([]ast.Decl, error) { + var buf bytes.Buffer + buf.WriteString("package p;") + buf.Write(data) + file, err := parser.ParseFile(fset, "", buf.Bytes(), parser.AllErrors) + if err != nil { + return file.Decls, err + } + return file.Decls, nil +} + +func log_parse_error(intro string, err error) { + if el, ok := err.(scanner.ErrorList); ok { + log.Printf("%s:", intro) + for _, er := range el { + log.Printf(" %s", er) + } + } else { + log.Printf("%s: %s", intro, err) + } +} + +//------------------------------------------------------------------------- +// auto_complete_file +//------------------------------------------------------------------------- + +type auto_complete_file struct { + name string + package_name string + + decls map[string]*decl + packages []package_import + filescope *scope + scope *scope + + cursor int // for current file buffer only + fset *token.FileSet + context *package_lookup_context +} + +func new_auto_complete_file(name string, context *package_lookup_context) *auto_complete_file { + p := new(auto_complete_file) + p.name = name + p.cursor = -1 + p.fset = token.NewFileSet() + p.context = context + return p +} + +func (f *auto_complete_file) offset(p token.Pos) int { + const fixlen = len("package p;") + return f.fset.Position(p).Offset - fixlen +} + +// this one is used for current file buffer exclusively +func (f *auto_complete_file) process_data(data []byte) { + cur, filedata, block := rip_off_decl(data, f.cursor) + file, err := parser.ParseFile(f.fset, "", filedata, parser.AllErrors) + if err != nil && *g_debug { + log_parse_error("Error parsing input file (outer block)", err) + } + f.package_name = package_name(file) + + f.decls = make(map[string]*decl) + f.packages = collect_package_imports(f.name, file.Decls, f.context) + f.filescope = new_scope(nil) + f.scope = f.filescope + + for _, d := range file.Decls { + anonymify_ast(d, 0, f.filescope) + } + + // process all top-level declarations + for _, decl := range file.Decls { + append_to_top_decls(f.decls, decl, f.scope) + } + if block != nil { + // process local function as top-level declaration + decls, err := parse_decl_list(f.fset, block) + if err != nil && *g_debug { + log_parse_error("Error parsing input file (inner block)", err) + } + + for _, d := range decls { + anonymify_ast(d, 0, f.filescope) + } + + for _, decl := range decls { + append_to_top_decls(f.decls, decl, f.scope) + } + + // process function internals + f.cursor = cur + for _, decl := range decls { + f.process_decl_locals(decl) + } + } + +} + +func (f *auto_complete_file) process_decl_locals(decl ast.Decl) { + switch t := decl.(type) { + case *ast.FuncDecl: + if f.cursor_in(t.Body) { + s := f.scope + f.scope = new_scope(f.scope) + + f.process_field_list(t.Recv, s) + f.process_field_list(t.Type.Params, s) + f.process_field_list(t.Type.Results, s) + f.process_block_stmt(t.Body) + } + default: + v := new(func_lit_visitor) + v.ctx = f + ast.Walk(v, decl) + } +} + +func (f *auto_complete_file) process_decl(decl ast.Decl) { + if t, ok := decl.(*ast.GenDecl); ok && f.offset(t.TokPos) > f.cursor { + return + } + prevscope := f.scope + foreach_decl(decl, func(data *foreach_decl_struct) { + class := ast_decl_class(data.decl) + if class != decl_type { + f.scope, prevscope = advance_scope(f.scope) + } + for i, name := range data.names { + typ, v, vi := data.type_value_index(i) + + d := new_decl_full(name.Name, class, ast_decl_flags(data.decl), typ, v, vi, prevscope) + if d == nil { + return + } + + f.scope.add_named_decl(d) + } + }) +} + +func (f *auto_complete_file) process_block_stmt(block *ast.BlockStmt) { + if block != nil && f.cursor_in(block) { + f.scope, _ = advance_scope(f.scope) + + for _, stmt := range block.List { + f.process_stmt(stmt) + } + + // hack to process all func literals + v := new(func_lit_visitor) + v.ctx = f + ast.Walk(v, block) + } +} + +type func_lit_visitor struct { + ctx *auto_complete_file +} + +func (v *func_lit_visitor) Visit(node ast.Node) ast.Visitor { + if t, ok := node.(*ast.FuncLit); ok && v.ctx.cursor_in(t.Body) { + s := v.ctx.scope + v.ctx.scope = new_scope(v.ctx.scope) + + v.ctx.process_field_list(t.Type.Params, s) + v.ctx.process_field_list(t.Type.Results, s) + v.ctx.process_block_stmt(t.Body) + + return nil + } + return v +} + +func (f *auto_complete_file) process_stmt(stmt ast.Stmt) { + switch t := stmt.(type) { + case *ast.DeclStmt: + f.process_decl(t.Decl) + case *ast.AssignStmt: + f.process_assign_stmt(t) + case *ast.IfStmt: + if f.cursor_in_if_head(t) { + f.process_stmt(t.Init) + } else if f.cursor_in_if_stmt(t) { + f.scope, _ = advance_scope(f.scope) + f.process_stmt(t.Init) + f.process_block_stmt(t.Body) + f.process_stmt(t.Else) + } + case *ast.BlockStmt: + f.process_block_stmt(t) + case *ast.RangeStmt: + f.process_range_stmt(t) + case *ast.ForStmt: + if f.cursor_in_for_head(t) { + f.process_stmt(t.Init) + } else if f.cursor_in(t.Body) { + f.scope, _ = advance_scope(f.scope) + + f.process_stmt(t.Init) + f.process_block_stmt(t.Body) + } + case *ast.SwitchStmt: + f.process_switch_stmt(t) + case *ast.TypeSwitchStmt: + f.process_type_switch_stmt(t) + case *ast.SelectStmt: + f.process_select_stmt(t) + case *ast.LabeledStmt: + f.process_stmt(t.Stmt) + } +} + +func (f *auto_complete_file) process_select_stmt(a *ast.SelectStmt) { + if !f.cursor_in(a.Body) { + return + } + var prevscope *scope + f.scope, prevscope = advance_scope(f.scope) + + var last_cursor_after *ast.CommClause + for _, s := range a.Body.List { + if cc := s.(*ast.CommClause); f.cursor > f.offset(cc.Colon) { + last_cursor_after = cc + } + } + + if last_cursor_after != nil { + if last_cursor_after.Comm != nil { + //if lastCursorAfter.Lhs != nil && lastCursorAfter.Tok == token.DEFINE { + if astmt, ok := last_cursor_after.Comm.(*ast.AssignStmt); ok && astmt.Tok == token.DEFINE { + vname := astmt.Lhs[0].(*ast.Ident).Name + v := new_decl_var(vname, nil, astmt.Rhs[0], -1, prevscope) + if v != nil { + f.scope.add_named_decl(v) + } + } + } + for _, s := range last_cursor_after.Body { + f.process_stmt(s) + } + } +} + +func (f *auto_complete_file) process_type_switch_stmt(a *ast.TypeSwitchStmt) { + if !f.cursor_in(a.Body) { + return + } + var prevscope *scope + f.scope, prevscope = advance_scope(f.scope) + + f.process_stmt(a.Init) + // type var + var tv *decl + if a, ok := a.Assign.(*ast.AssignStmt); ok { + lhs := a.Lhs + rhs := a.Rhs + if lhs != nil && len(lhs) == 1 { + tvname := lhs[0].(*ast.Ident).Name + tv = new_decl_var(tvname, nil, rhs[0], -1, prevscope) + } + } + + var last_cursor_after *ast.CaseClause + for _, s := range a.Body.List { + if cc := s.(*ast.CaseClause); f.cursor > f.offset(cc.Colon) { + last_cursor_after = cc + } + } + + if last_cursor_after != nil { + if tv != nil { + if last_cursor_after.List != nil && len(last_cursor_after.List) == 1 { + tv.typ = last_cursor_after.List[0] + tv.value = nil + } + f.scope.add_named_decl(tv) + } + for _, s := range last_cursor_after.Body { + f.process_stmt(s) + } + } +} + +func (f *auto_complete_file) process_switch_stmt(a *ast.SwitchStmt) { + if !f.cursor_in(a.Body) { + return + } + f.scope, _ = advance_scope(f.scope) + + f.process_stmt(a.Init) + var last_cursor_after *ast.CaseClause + for _, s := range a.Body.List { + if cc := s.(*ast.CaseClause); f.cursor > f.offset(cc.Colon) { + last_cursor_after = cc + } + } + if last_cursor_after != nil { + for _, s := range last_cursor_after.Body { + f.process_stmt(s) + } + } +} + +func (f *auto_complete_file) process_range_stmt(a *ast.RangeStmt) { + if !f.cursor_in(a.Body) { + return + } + var prevscope *scope + f.scope, prevscope = advance_scope(f.scope) + + if a.Tok == token.DEFINE { + if t, ok := a.Key.(*ast.Ident); ok { + d := new_decl_var(t.Name, nil, a.X, 0, prevscope) + if d != nil { + d.flags |= decl_rangevar + f.scope.add_named_decl(d) + } + } + + if a.Value != nil { + if t, ok := a.Value.(*ast.Ident); ok { + d := new_decl_var(t.Name, nil, a.X, 1, prevscope) + if d != nil { + d.flags |= decl_rangevar + f.scope.add_named_decl(d) + } + } + } + } + + f.process_block_stmt(a.Body) +} + +func (f *auto_complete_file) process_assign_stmt(a *ast.AssignStmt) { + if a.Tok != token.DEFINE || f.offset(a.TokPos) > f.cursor { + return + } + + names := make([]*ast.Ident, len(a.Lhs)) + for i, name := range a.Lhs { + id, ok := name.(*ast.Ident) + if !ok { + // something is wrong, just ignore the whole stmt + return + } + names[i] = id + } + + var prevscope *scope + f.scope, prevscope = advance_scope(f.scope) + + pack := decl_pack{names, nil, a.Rhs} + for i, name := range pack.names { + typ, v, vi := pack.type_value_index(i) + d := new_decl_var(name.Name, typ, v, vi, prevscope) + if d == nil { + continue + } + + f.scope.add_named_decl(d) + } +} + +func (f *auto_complete_file) process_field_list(field_list *ast.FieldList, s *scope) { + if field_list != nil { + decls := ast_field_list_to_decls(field_list, decl_var, 0, s, false) + for _, d := range decls { + f.scope.add_named_decl(d) + } + } +} + +func (f *auto_complete_file) cursor_in_if_head(s *ast.IfStmt) bool { + if f.cursor > f.offset(s.If) && f.cursor <= f.offset(s.Body.Lbrace) { + return true + } + return false +} + +func (f *auto_complete_file) cursor_in_if_stmt(s *ast.IfStmt) bool { + if f.cursor > f.offset(s.If) { + // magic -10 comes from auto_complete_file.offset method, see + // len() expr in there + if f.offset(s.End()) == -10 || f.cursor < f.offset(s.End()) { + return true + } + } + return false +} + +func (f *auto_complete_file) cursor_in_for_head(s *ast.ForStmt) bool { + if f.cursor > f.offset(s.For) && f.cursor <= f.offset(s.Body.Lbrace) { + return true + } + return false +} + +func (f *auto_complete_file) cursor_in(block *ast.BlockStmt) bool { + if f.cursor == -1 || block == nil { + return false + } + + if f.cursor > f.offset(block.Lbrace) && f.cursor <= f.offset(block.Rbrace) { + return true + } + return false +} diff --git a/langserver/internal/gocode/config.go b/langserver/internal/gocode/config.go new file mode 100644 index 00000000..8bc0ddaf --- /dev/null +++ b/langserver/internal/gocode/config.go @@ -0,0 +1,177 @@ +package gocode + +import ( + "bytes" + "encoding/json" + "fmt" + "io" + "io/ioutil" + "os" + "reflect" + "strconv" +) + +//------------------------------------------------------------------------- +// config +// +// Structure represents persistent config storage of the gocode daemon. Usually +// the config is located somewhere in ~/.config/gocode directory. +//------------------------------------------------------------------------- + +type config struct { + ProposeBuiltins bool `json:"propose-builtins"` + LibPath string `json:"lib-path"` + CustomPkgPrefix string `json:"custom-pkg-prefix"` + CustomVendorDir string `json:"custom-vendor-dir"` + Autobuild bool `json:"autobuild"` + ForceDebugOutput string `json:"force-debug-output"` + PackageLookupMode string `json:"package-lookup-mode"` + CloseTimeout int `json:"close-timeout"` + UnimportedPackages bool `json:"unimported-packages"` +} + +var g_config = config{ + ProposeBuiltins: false, + LibPath: "", + CustomPkgPrefix: "", + Autobuild: false, + ForceDebugOutput: "", + PackageLookupMode: "go", + CloseTimeout: 1800, + UnimportedPackages: false, +} + +var g_string_to_bool = map[string]bool{ + "t": true, + "true": true, + "y": true, + "yes": true, + "on": true, + "1": true, + "f": false, + "false": false, + "n": false, + "no": false, + "off": false, + "0": false, +} + +func set_value(v reflect.Value, value string) { + switch t := v; t.Kind() { + case reflect.Bool: + v, ok := g_string_to_bool[value] + if ok { + t.SetBool(v) + } + case reflect.String: + t.SetString(value) + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + v, err := strconv.ParseInt(value, 10, 64) + if err == nil { + t.SetInt(v) + } + case reflect.Float32, reflect.Float64: + v, err := strconv.ParseFloat(value, 64) + if err == nil { + t.SetFloat(v) + } + } +} + +func list_value(v reflect.Value, name string, w io.Writer) { + switch t := v; t.Kind() { + case reflect.Bool: + fmt.Fprintf(w, "%s %v\n", name, t.Bool()) + case reflect.String: + fmt.Fprintf(w, "%s \"%v\"\n", name, t.String()) + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + fmt.Fprintf(w, "%s %v\n", name, t.Int()) + case reflect.Float32, reflect.Float64: + fmt.Fprintf(w, "%s %v\n", name, t.Float()) + } +} + +func (this *config) list() string { + str, typ := this.value_and_type() + buf := bytes.NewBuffer(make([]byte, 0, 256)) + for i := 0; i < str.NumField(); i++ { + v := str.Field(i) + name := typ.Field(i).Tag.Get("json") + list_value(v, name, buf) + } + return buf.String() +} + +func (this *config) list_option(name string) string { + str, typ := this.value_and_type() + buf := bytes.NewBuffer(make([]byte, 0, 256)) + for i := 0; i < str.NumField(); i++ { + v := str.Field(i) + nm := typ.Field(i).Tag.Get("json") + if nm == name { + list_value(v, name, buf) + } + } + return buf.String() +} + +func (this *config) set_option(name, value string) string { + str, typ := this.value_and_type() + buf := bytes.NewBuffer(make([]byte, 0, 256)) + for i := 0; i < str.NumField(); i++ { + v := str.Field(i) + nm := typ.Field(i).Tag.Get("json") + if nm == name { + set_value(v, value) + list_value(v, name, buf) + } + } + this.write() + return buf.String() + +} + +func (this *config) value_and_type() (reflect.Value, reflect.Type) { + v := reflect.ValueOf(this).Elem() + return v, v.Type() +} + +func (this *config) write() error { + data, err := json.Marshal(this) + if err != nil { + return err + } + + // make sure config dir exists + dir := config_dir() + if !file_exists(dir) { + os.MkdirAll(dir, 0755) + } + + f, err := os.Create(config_file()) + if err != nil { + return err + } + defer f.Close() + + _, err = f.Write(data) + if err != nil { + return err + } + + return nil +} + +func (this *config) read() error { + data, err := ioutil.ReadFile(config_file()) + if err != nil { + return err + } + + err = json.Unmarshal(data, this) + if err != nil { + return err + } + + return nil +} diff --git a/langserver/internal/gocode/cursorcontext.go b/langserver/internal/gocode/cursorcontext.go new file mode 100644 index 00000000..76f610a9 --- /dev/null +++ b/langserver/internal/gocode/cursorcontext.go @@ -0,0 +1,584 @@ +package gocode + +import ( + "bytes" + "go/ast" + "go/parser" + "go/scanner" + "go/token" + "log" +) + +type cursor_context struct { + decl *decl + partial string + struct_field bool + decl_import bool + + // store expression that was supposed to be deduced to "decl", however + // if decl is nil, then deduction failed, we could try to resolve it to + // unimported package instead + expr ast.Expr +} + +type token_iterator struct { + tokens []token_item + token_index int +} + +type token_item struct { + off int + tok token.Token + lit string +} + +func (i token_item) literal() string { + if i.tok.IsLiteral() { + return i.lit + } else { + return i.tok.String() + } + return "" +} + +func new_token_iterator(src []byte, cursor int) token_iterator { + tokens := make([]token_item, 0, 1000) + var s scanner.Scanner + fset := token.NewFileSet() + file := fset.AddFile("", fset.Base(), len(src)) + s.Init(file, src, nil, 0) + for { + pos, tok, lit := s.Scan() + off := fset.Position(pos).Offset + if tok == token.EOF || cursor <= off { + break + } + tokens = append(tokens, token_item{ + off: off, + tok: tok, + lit: lit, + }) + } + return token_iterator{ + tokens: tokens, + token_index: len(tokens) - 1, + } +} + +func (this *token_iterator) token() token_item { + return this.tokens[this.token_index] +} + +func (this *token_iterator) go_back() bool { + if this.token_index <= 0 { + return false + } + this.token_index-- + return true +} + +var bracket_pairs_map = map[token.Token]token.Token{ + token.RPAREN: token.LPAREN, + token.RBRACK: token.LBRACK, + token.RBRACE: token.LBRACE, +} + +func (ti *token_iterator) skip_to_left(left, right token.Token) bool { + if ti.token().tok == left { + return true + } + balance := 1 + for balance != 0 { + if !ti.go_back() { + return false + } + switch ti.token().tok { + case right: + balance++ + case left: + balance-- + } + } + return true +} + +// when the cursor is at the ')' or ']' or '}', move the cursor to an opposite +// bracket pair, this functions takes nested bracket pairs into account +func (this *token_iterator) skip_to_balanced_pair() bool { + right := this.token().tok + left := bracket_pairs_map[right] + return this.skip_to_left(left, right) +} + +// Move the cursor to the open brace of the current block, taking nested blocks +// into account. +func (this *token_iterator) skip_to_left_curly() bool { + return this.skip_to_left(token.LBRACE, token.RBRACE) +} + +func (ti *token_iterator) extract_type_alike() string { + if ti.token().tok != token.IDENT { // not Foo, return nothing + return "" + } + b := ti.token().literal() + if !ti.go_back() { // just Foo + return b + } + if ti.token().tok != token.PERIOD { // not .Foo, return Foo + return b + } + if !ti.go_back() { // just .Foo, return Foo (best choice recovery) + return b + } + if ti.token().tok != token.IDENT { // not lib.Foo, return Foo + return b + } + out := ti.token().literal() + "." + b // lib.Foo + ti.go_back() + return out +} + +// Extract the type expression right before the enclosing curly bracket block. +// Examples (# - the cursor): +// &lib.Struct{Whatever: 1, Hel#} // returns "lib.Struct" +// X{#} // returns X +// The idea is that we check if this type expression is a type and it is, we +// can apply special filtering for autocompletion results. +// Sadly, this doesn't cover anonymous structs. +func (ti *token_iterator) extract_struct_type() string { + if !ti.skip_to_left_curly() { + return "" + } + if !ti.go_back() { + return "" + } + if ti.token().tok == token.LBRACE { // Foo{#{}} + if !ti.go_back() { + return "" + } + } else if ti.token().tok == token.COMMA { // Foo{abc,#{}} + return ti.extract_struct_type() + } + typ := ti.extract_type_alike() + if typ == "" { + return "" + } + if ti.token().tok == token.RPAREN || ti.token().tok == token.MUL { + return "" + } + return typ +} + +// Starting from the token under the cursor move back and extract something +// that resembles a valid Go primary expression. Examples of primary expressions +// from Go spec: +// x +// 2 +// (s + ".txt") +// f(3.1415, true) +// Point{1, 2} +// m["foo"] +// s[i : j + 1] +// obj.color +// f.p[i].x() +// +// As you can see we can move through all of them using balanced bracket +// matching and applying simple rules +// E.g. +// Point{1, 2}.m["foo"].s[i : j + 1].MethodCall(a, func(a, b int) int { return a + b }). +// Can be seen as: +// Point{ }.m[ ].s[ ].MethodCall( ). +// Which boils the rules down to these connected via dots: +// ident +// ident[] +// ident{} +// ident() +// Of course there are also slightly more complicated rules for brackets: +// ident{}.ident()[5][4](), etc. +func (this *token_iterator) extract_go_expr() string { + orig := this.token_index + + // Contains the type of the previously scanned token (initialized with + // the token right under the cursor). This is the token to the *right* of + // the current one. + prev := this.token().tok +loop: + for { + if !this.go_back() { + return token_items_to_string(this.tokens[:orig]) + } + switch this.token().tok { + case token.PERIOD: + // If the '.' is not followed by IDENT, it's invalid. + if prev != token.IDENT { + break loop + } + case token.IDENT: + // Valid tokens after IDENT are '.', '[', '{' and '('. + switch prev { + case token.PERIOD, token.LBRACK, token.LBRACE, token.LPAREN: + // all ok + default: + break loop + } + case token.RBRACE: + // This one can only be a part of type initialization, like: + // Dummy{}.Hello() + // It is valid Go if Hello method is defined on a non-pointer receiver. + if prev != token.PERIOD { + break loop + } + this.skip_to_balanced_pair() + case token.RPAREN, token.RBRACK: + // After ']' and ')' their opening counterparts are valid '[', '(', + // as well as the dot. + switch prev { + case token.PERIOD, token.LBRACK, token.LPAREN: + // all ok + default: + break loop + } + this.skip_to_balanced_pair() + default: + break loop + } + prev = this.token().tok + } + expr := token_items_to_string(this.tokens[this.token_index+1 : orig]) + if *g_debug { + log.Printf("extracted expression tokens: %s", expr) + } + return expr +} + +// Given a slice of token_item, reassembles them into the original literal +// expression. +func token_items_to_string(tokens []token_item) string { + var buf bytes.Buffer + for _, t := range tokens { + buf.WriteString(t.literal()) + } + return buf.String() +} + +// this function is called when the cursor is at the '.' and you need to get the +// declaration before that dot +func (c *auto_complete_context) deduce_cursor_decl(iter *token_iterator) (*decl, ast.Expr) { + expr, err := parser.ParseExpr(iter.extract_go_expr()) + if err != nil { + return nil, nil + } + return expr_to_decl(expr, c.current.scope), expr +} + +// try to find and extract the surrounding struct literal type +func (c *auto_complete_context) deduce_struct_type_decl(iter *token_iterator) *decl { + typ := iter.extract_struct_type() + if typ == "" { + return nil + } + + expr, err := parser.ParseExpr(typ) + if err != nil { + return nil + } + decl := type_to_decl(expr, c.current.scope) + if decl == nil { + return nil + } + + // we allow only struct types here, but also support type aliases + if decl.is_alias() { + dd := decl.type_dealias() + if _, ok := dd.typ.(*ast.StructType); !ok { + return nil + } + } else if _, ok := decl.typ.(*ast.StructType); !ok { + return nil + } + return decl +} + +// Entry point from autocompletion, the function looks at text before the cursor +// and figures out the declaration the cursor is on. This declaration is +// used in filtering the resulting set of autocompletion suggestions. +func (c *auto_complete_context) deduce_cursor_context(file []byte, cursor int) (cursor_context, bool) { + if cursor <= 0 { + return cursor_context{}, true + } + + iter := new_token_iterator(file, cursor) + if len(iter.tokens) == 0 { + return cursor_context{}, false + } + + // figure out what is just before the cursor + switch tok := iter.token(); tok.tok { + case token.STRING: + // make sure cursor is inside the string + s := tok.literal() + if len(s) > 1 && s[len(s)-1] == '"' && tok.off+len(s) <= cursor { + return cursor_context{}, true + } + // now figure out if inside an import declaration + var ptok = token.STRING + for iter.go_back() { + itok := iter.token().tok + switch itok { + case token.STRING: + switch ptok { + case token.SEMICOLON, token.IDENT, token.PERIOD: + default: + return cursor_context{}, true + } + case token.LPAREN, token.SEMICOLON: + switch ptok { + case token.STRING, token.IDENT, token.PERIOD: + default: + return cursor_context{}, true + } + case token.IDENT, token.PERIOD: + switch ptok { + case token.STRING: + default: + return cursor_context{}, true + } + case token.IMPORT: + switch ptok { + case token.STRING, token.IDENT, token.PERIOD, token.LPAREN: + path_len := cursor - tok.off + path := s[1:path_len] + return cursor_context{decl_import: true, partial: path}, true + default: + return cursor_context{}, true + } + default: + return cursor_context{}, true + } + ptok = itok + } + case token.PERIOD: + // we're '.' + // figure out decl, Partial is "" + decl, expr := c.deduce_cursor_decl(&iter) + return cursor_context{decl: decl, expr: expr}, decl != nil + case token.IDENT, token.TYPE, token.CONST, token.VAR, token.FUNC, token.PACKAGE: + // we're '.' + // parse as Partial and figure out decl + var partial string + if tok.tok == token.IDENT { + // Calculate the offset of the cursor position within the identifier. + // For instance, if we are 'ab#c', we want partial_len = 2 and partial = ab. + partial_len := cursor - tok.off + + // If it happens that the cursor is past the end of the literal, + // means there is a space between the literal and the cursor, think + // of it as no context, because that's what it really is. + if partial_len > len(tok.literal()) { + return cursor_context{}, true + } + partial = tok.literal()[0:partial_len] + } else { + // Do not try to truncate if it is not an identifier. + partial = tok.literal() + } + + iter.go_back() + switch iter.token().tok { + case token.PERIOD: + decl, expr := c.deduce_cursor_decl(&iter) + return cursor_context{decl: decl, partial: partial, expr: expr}, decl != nil + case token.COMMA, token.LBRACE: + // This can happen for struct fields: + // &Struct{Hello: 1, Wor#} // (# - the cursor) + // Let's try to find the struct type + decl := c.deduce_struct_type_decl(&iter) + return cursor_context{ + decl: decl, + partial: partial, + struct_field: decl != nil, + }, true + default: + return cursor_context{partial: partial}, true + } + case token.COMMA, token.LBRACE: + // Try to parse the current expression as a structure initialization. + decl := c.deduce_struct_type_decl(&iter) + return cursor_context{ + decl: decl, + partial: "", + struct_field: decl != nil, + }, true + } + + return cursor_context{}, true +} + +// Decl deduction failed, but we're on ".", this ident can be an +// unexported package, let's try to match the ident against a set of known +// packages and if it matches try to import it. +// TODO: Right now I've made a static list of built-in packages, but in theory +// we could scan all GOPATH packages as well. Now, don't forget that default +// package name has nothing to do with package file name, that's why we need to +// scan the packages. And many of them will have conflicts. Can we make a smart +// prediction algorithm which will prefer certain packages over another ones? +func resolveKnownPackageIdent(ident string, filename string, context *package_lookup_context) *package_file_cache { + importPath, ok := knownPackageIdents[ident] + if !ok { + return nil + } + + path, ok := abs_path_for_package(filename, importPath, context) + if !ok { + return nil + } + + p := new_package_file_cache(path, importPath) + p.update_cache() + return p +} + +var knownPackageIdents = map[string]string{ + "adler32": "hash/adler32", + "aes": "crypto/aes", + "ascii85": "encoding/ascii85", + "asn1": "encoding/asn1", + "ast": "go/ast", + "atomic": "sync/atomic", + "base32": "encoding/base32", + "base64": "encoding/base64", + "big": "math/big", + "binary": "encoding/binary", + "bufio": "bufio", + "build": "go/build", + "bytes": "bytes", + "bzip2": "compress/bzip2", + "cgi": "net/http/cgi", + "cgo": "runtime/cgo", + "cipher": "crypto/cipher", + "cmplx": "math/cmplx", + "color": "image/color", + "constant": "go/constant", + "context": "context", + "cookiejar": "net/http/cookiejar", + "crc32": "hash/crc32", + "crc64": "hash/crc64", + "crypto": "crypto", + "csv": "encoding/csv", + "debug": "runtime/debug", + "des": "crypto/des", + "doc": "go/doc", + "draw": "image/draw", + "driver": "database/sql/driver", + "dsa": "crypto/dsa", + "dwarf": "debug/dwarf", + "ecdsa": "crypto/ecdsa", + "elf": "debug/elf", + "elliptic": "crypto/elliptic", + "encoding": "encoding", + "errors": "errors", + "exec": "os/exec", + "expvar": "expvar", + "fcgi": "net/http/fcgi", + "filepath": "path/filepath", + "flag": "flag", + "flate": "compress/flate", + "fmt": "fmt", + "fnv": "hash/fnv", + "format": "go/format", + "gif": "image/gif", + "gob": "encoding/gob", + "gosym": "debug/gosym", + "gzip": "compress/gzip", + "hash": "hash", + "heap": "container/heap", + "hex": "encoding/hex", + "hmac": "crypto/hmac", + "hpack": "vendor/golang_org/x/net/http2/hpack", + "html": "html", + "http": "net/http", + "httplex": "vendor/golang_org/x/net/lex/httplex", + "httptest": "net/http/httptest", + "httptrace": "net/http/httptrace", + "httputil": "net/http/httputil", + "image": "image", + "importer": "go/importer", + "io": "io", + "iotest": "testing/iotest", + "ioutil": "io/ioutil", + "jpeg": "image/jpeg", + "json": "encoding/json", + "jsonrpc": "net/rpc/jsonrpc", + "list": "container/list", + "log": "log", + "lzw": "compress/lzw", + "macho": "debug/macho", + "mail": "net/mail", + "math": "math", + "md5": "crypto/md5", + "mime": "mime", + "multipart": "mime/multipart", + "net": "net", + "os": "os", + "palette": "image/color/palette", + "parse": "text/template/parse", + "parser": "go/parser", + "path": "path", + "pe": "debug/pe", + "pem": "encoding/pem", + "pkix": "crypto/x509/pkix", + "plan9obj": "debug/plan9obj", + "png": "image/png", + "pprof": "net/http/pprof", + "printer": "go/printer", + "quick": "testing/quick", + "quotedprintable": "mime/quotedprintable", + "race": "runtime/race", + "rand": "math/rand", + "rc4": "crypto/rc4", + "reflect": "reflect", + "regexp": "regexp", + "ring": "container/ring", + "rpc": "net/rpc", + "rsa": "crypto/rsa", + "runtime": "runtime", + "scanner": "text/scanner", + "sha1": "crypto/sha1", + "sha256": "crypto/sha256", + "sha512": "crypto/sha512", + "signal": "os/signal", + "smtp": "net/smtp", + "sort": "sort", + "sql": "database/sql", + "strconv": "strconv", + "strings": "strings", + "subtle": "crypto/subtle", + "suffixarray": "index/suffixarray", + "sync": "sync", + "syntax": "regexp/syntax", + "syscall": "syscall", + "syslog": "log/syslog", + "tabwriter": "text/tabwriter", + "tar": "archive/tar", + "template": "html/template", + "testing": "testing", + "textproto": "net/textproto", + "time": "time", + "tls": "crypto/tls", + "token": "go/token", + "trace": "runtime/trace", + "types": "go/types", + "unicode": "unicode", + "url": "net/url", + "user": "os/user", + "utf16": "unicode/utf16", + "utf8": "unicode/utf8", + "x509": "crypto/x509", + "xml": "encoding/xml", + "zip": "archive/zip", + "zlib": "compress/zlib", + //"scanner": "go/scanner", // DUP: prefer text/scanner + //"template": "text/template", // DUP: prefer html/template + //"pprof": "runtime/pprof", // DUP: prefer net/http/pprof + //"rand": "crypto/rand", // DUP: prefer math/rand +} diff --git a/langserver/internal/gocode/decl.go b/langserver/internal/gocode/decl.go new file mode 100644 index 00000000..988f0627 --- /dev/null +++ b/langserver/internal/gocode/decl.go @@ -0,0 +1,1473 @@ +package gocode + +import ( + "bytes" + "fmt" + "go/ast" + "go/token" + "io" + "reflect" + "strings" + "sync" +) + +// decl.class +type decl_class int16 + +const ( + decl_invalid = decl_class(-1 + iota) + + // these are in a sorted order + decl_const + decl_func + decl_import + decl_package + decl_type + decl_var + + // this one serves as a temporary type for those methods that were + // declared before their actual owner + decl_methods_stub +) + +func (this decl_class) String() string { + switch this { + case decl_invalid: + return "PANIC" + case decl_const: + return "const" + case decl_func: + return "func" + case decl_import: + return "import" + case decl_package: + return "package" + case decl_type: + return "type" + case decl_var: + return "var" + case decl_methods_stub: + return "IF YOU SEE THIS, REPORT A BUG" // :D + } + panic("unreachable") +} + +// decl.flags +type decl_flags int16 + +const ( + decl_foreign decl_flags = 1 << iota // imported from another package + + // means that the decl is a part of the range statement + // its type is inferred in a special way + decl_rangevar + + // decl of decl_type class is a type alias + decl_alias + + // for preventing infinite recursions and loops in type inference code + decl_visited +) + +//------------------------------------------------------------------------- +// decl +// +// The most important data structure of the whole gocode project. It +// describes a single declaration and its children. +//------------------------------------------------------------------------- + +type decl struct { + // Name starts with '$' if the declaration describes an anonymous type. + // '$s_%d' for anonymous struct types + // '$i_%d' for anonymous interface types + name string + typ ast.Expr + class decl_class + flags decl_flags + + // functions for interface type, fields+methods for struct type + children map[string]*decl + + // embedded types + embedded []ast.Expr + + // if the type is unknown at AST building time, I'm using these + value ast.Expr + + // if it's a multiassignment and the Value is a CallExpr, it is being set + // to an index into the return value tuple, otherwise it's a -1 + value_index int + + // scope where this Decl was declared in (not its visibilty scope!) + // Decl uses it for type inference + scope *scope +} + +func ast_decl_type(d ast.Decl) ast.Expr { + switch t := d.(type) { + case *ast.GenDecl: + switch t.Tok { + case token.CONST, token.VAR: + c := t.Specs[0].(*ast.ValueSpec) + return c.Type + case token.TYPE: + t := t.Specs[0].(*ast.TypeSpec) + return t.Type + } + case *ast.FuncDecl: + return t.Type + } + panic("unreachable") + return nil +} + +func ast_decl_flags(d ast.Decl) decl_flags { + switch t := d.(type) { + case *ast.GenDecl: + switch t.Tok { + case token.TYPE: + if isAliasTypeSpec(t.Specs[0].(*ast.TypeSpec)) { + return decl_alias + } + } + } + return 0 +} + +func ast_decl_class(d ast.Decl) decl_class { + switch t := d.(type) { + case *ast.GenDecl: + switch t.Tok { + case token.VAR: + return decl_var + case token.CONST: + return decl_const + case token.TYPE: + return decl_type + } + case *ast.FuncDecl: + return decl_func + } + panic("unreachable") +} + +func ast_decl_convertable(d ast.Decl) bool { + switch t := d.(type) { + case *ast.GenDecl: + switch t.Tok { + case token.VAR, token.CONST, token.TYPE: + return true + } + case *ast.FuncDecl: + return true + } + return false +} + +func ast_field_list_to_decls(f *ast.FieldList, class decl_class, flags decl_flags, scope *scope, add_anonymous bool) map[string]*decl { + count := 0 + for _, field := range f.List { + count += len(field.Names) + } + + decls := make(map[string]*decl, count) + for _, field := range f.List { + for _, name := range field.Names { + if flags&decl_foreign != 0 && !ast.IsExported(name.Name) { + continue + } + d := &decl{ + name: name.Name, + typ: field.Type, + class: class, + flags: flags, + scope: scope, + value_index: -1, + } + decls[d.name] = d + } + + // add anonymous field as a child (type embedding) + if class == decl_var && field.Names == nil && add_anonymous { + tp := get_type_path(field.Type) + if flags&decl_foreign != 0 && !ast.IsExported(tp.name) { + continue + } + d := &decl{ + name: tp.name, + typ: field.Type, + class: class, + flags: flags, + scope: scope, + value_index: -1, + } + decls[d.name] = d + } + } + return decls +} + +func ast_field_list_to_embedded(f *ast.FieldList) []ast.Expr { + count := 0 + for _, field := range f.List { + if field.Names == nil || field.Names[0].Name == "?" { + count++ + } + } + + if count == 0 { + return nil + } + + embedded := make([]ast.Expr, count) + i := 0 + for _, field := range f.List { + if field.Names == nil || field.Names[0].Name == "?" { + embedded[i] = field.Type + i++ + } + } + + return embedded +} + +func ast_type_to_embedded(ty ast.Expr) []ast.Expr { + switch t := ty.(type) { + case *ast.StructType: + return ast_field_list_to_embedded(t.Fields) + case *ast.InterfaceType: + return ast_field_list_to_embedded(t.Methods) + } + return nil +} + +func ast_type_to_children(ty ast.Expr, flags decl_flags, scope *scope) map[string]*decl { + switch t := ty.(type) { + case *ast.StructType: + return ast_field_list_to_decls(t.Fields, decl_var, flags, scope, true) + case *ast.InterfaceType: + return ast_field_list_to_decls(t.Methods, decl_func, flags, scope, false) + } + return nil +} + +//------------------------------------------------------------------------- +// anonymous_id_gen +// +// ID generator for anonymous types (thread-safe) +//------------------------------------------------------------------------- + +type anonymous_id_gen struct { + sync.Mutex + i int +} + +func (a *anonymous_id_gen) gen() (id int) { + a.Lock() + defer a.Unlock() + id = a.i + a.i++ + return +} + +var g_anon_gen anonymous_id_gen + +//------------------------------------------------------------------------- + +func check_for_anon_type(t ast.Expr, flags decl_flags, s *scope) ast.Expr { + if t == nil { + return nil + } + var name string + + switch t.(type) { + case *ast.StructType: + name = fmt.Sprintf("$s_%d", g_anon_gen.gen()) + case *ast.InterfaceType: + name = fmt.Sprintf("$i_%d", g_anon_gen.gen()) + } + + if name != "" { + anonymify_ast(t, flags, s) + d := new_decl_full(name, decl_type, flags, t, nil, -1, s) + s.add_named_decl(d) + return ast.NewIdent(name) + } + return t +} + +//------------------------------------------------------------------------- + +func new_decl_full(name string, class decl_class, flags decl_flags, typ, v ast.Expr, vi int, s *scope) *decl { + if name == "_" { + return nil + } + d := new(decl) + d.name = name + d.class = class + d.flags = flags + d.typ = typ + d.value = v + d.value_index = vi + d.scope = s + d.children = ast_type_to_children(d.typ, flags, s) + d.embedded = ast_type_to_embedded(d.typ) + return d +} + +func new_decl(name string, class decl_class, scope *scope) *decl { + decl := new(decl) + decl.name = name + decl.class = class + decl.value_index = -1 + decl.scope = scope + return decl +} + +func new_decl_var(name string, typ ast.Expr, value ast.Expr, vindex int, scope *scope) *decl { + if name == "_" { + return nil + } + decl := new(decl) + decl.name = name + decl.class = decl_var + decl.typ = typ + decl.value = value + decl.value_index = vindex + decl.scope = scope + return decl +} + +func method_of(d ast.Decl) string { + if t, ok := d.(*ast.FuncDecl); ok { + if t.Recv != nil && len(t.Recv.List) != 0 { + switch t := t.Recv.List[0].Type.(type) { + case *ast.StarExpr: + if se, ok := t.X.(*ast.SelectorExpr); ok { + return se.Sel.Name + } + if ident, ok := t.X.(*ast.Ident); ok { + return ident.Name + } + return "" + case *ast.Ident: + return t.Name + default: + return "" + } + } + } + return "" +} + +func (other *decl) deep_copy() *decl { + d := new(decl) + d.name = other.name + d.class = other.class + d.flags = other.flags + d.typ = other.typ + d.value = other.value + d.value_index = other.value_index + d.children = make(map[string]*decl, len(other.children)) + for key, value := range other.children { + d.children[key] = value + } + if other.embedded != nil { + d.embedded = make([]ast.Expr, len(other.embedded)) + copy(d.embedded, other.embedded) + } + d.scope = other.scope + return d +} + +func (d *decl) is_rangevar() bool { + return d.flags&decl_rangevar != 0 +} + +func (d *decl) is_alias() bool { + return d.flags&decl_alias != 0 +} + +func (d *decl) is_visited() bool { + return d.flags&decl_visited != 0 +} + +func (d *decl) set_visited() { + d.flags |= decl_visited +} + +func (d *decl) clear_visited() { + d.flags &^= decl_visited +} + +func (d *decl) expand_or_replace(other *decl) { + // expand only if it's a methods stub, otherwise simply keep it as is + if d.class != decl_methods_stub && other.class != decl_methods_stub { + return + } + + if d.class == decl_methods_stub { + d.typ = other.typ + d.class = other.class + d.flags = other.flags + } + + if other.children != nil { + for _, c := range other.children { + d.add_child(c) + } + } + + if other.embedded != nil { + d.embedded = other.embedded + d.scope = other.scope + } +} + +func (d *decl) matches() bool { + if strings.HasPrefix(d.name, "$") || d.class == decl_methods_stub { + return false + } + return true +} + +func (d *decl) pretty_print_type(out io.Writer, canonical_aliases map[string]string) { + switch d.class { + case decl_type: + switch d.typ.(type) { + case *ast.StructType: + // TODO: not used due to anonymify? + fmt.Fprintf(out, "struct") + case *ast.InterfaceType: + // TODO: not used due to anonymify? + fmt.Fprintf(out, "interface") + default: + if d.typ != nil { + pretty_print_type_expr(out, d.typ, canonical_aliases) + } + } + case decl_var: + if d.typ != nil { + pretty_print_type_expr(out, d.typ, canonical_aliases) + } + case decl_func: + pretty_print_type_expr(out, d.typ, canonical_aliases) + } +} + +func (d *decl) add_child(cd *decl) { + if d.children == nil { + d.children = make(map[string]*decl) + } + d.children[cd.name] = cd +} + +func check_for_builtin_funcs(typ *ast.Ident, c *ast.CallExpr, scope *scope) (ast.Expr, *scope) { + if strings.HasPrefix(typ.Name, "func(") { + if t, ok := c.Fun.(*ast.Ident); ok { + switch t.Name { + case "new": + if len(c.Args) > 0 { + e := new(ast.StarExpr) + e.X = c.Args[0] + return e, scope + } + case "make": + if len(c.Args) > 0 { + return c.Args[0], scope + } + case "append": + if len(c.Args) > 0 { + t, scope, _ := infer_type(c.Args[0], scope, -1) + return t, scope + } + case "complex": + // TODO: fix it + return ast.NewIdent("complex"), g_universe_scope + case "closed": + return ast.NewIdent("bool"), g_universe_scope + case "cap": + return ast.NewIdent("int"), g_universe_scope + case "copy": + return ast.NewIdent("int"), g_universe_scope + case "len": + return ast.NewIdent("int"), g_universe_scope + } + // TODO: + // func recover() interface{} + // func imag(c ComplexType) FloatType + // func real(c ComplexType) FloatType + } + } + return nil, nil +} + +func func_return_type(f *ast.FuncType, index int) ast.Expr { + if f.Results == nil { + return nil + } + + if index == -1 { + return f.Results.List[0].Type + } + + i := 0 + var field *ast.Field + for _, field = range f.Results.List { + n := 1 + if field.Names != nil { + n = len(field.Names) + } + if i <= index && index < i+n { + return field.Type + } + i += n + } + return nil +} + +type type_path struct { + pkg string + name string +} + +func (tp *type_path) is_nil() bool { + return tp.pkg == "" && tp.name == "" +} + +// converts type expressions like: +// ast.Expr +// *ast.Expr +// $ast$go/ast.Expr +// to a path that can be used to lookup a type related Decl +func get_type_path(e ast.Expr) (r type_path) { + if e == nil { + return type_path{"", ""} + } + + switch t := e.(type) { + case *ast.Ident: + r.name = t.Name + case *ast.StarExpr: + r = get_type_path(t.X) + case *ast.SelectorExpr: + if ident, ok := t.X.(*ast.Ident); ok { + r.pkg = ident.Name + } + r.name = t.Sel.Name + } + return +} + +func lookup_path(tp type_path, scope *scope) *decl { + if tp.is_nil() { + return nil + } + var decl *decl + if tp.pkg != "" { + decl = scope.lookup(tp.pkg) + // return nil early if the package wasn't found but it's part + // of the type specification + if decl == nil { + return nil + } + } + + if decl != nil { + if tp.name != "" { + return decl.find_child(tp.name) + } else { + return decl + } + } + + return scope.lookup(tp.name) +} + +func lookup_pkg(tp type_path, scope *scope) string { + if tp.is_nil() { + return "" + } + if tp.pkg == "" { + return "" + } + decl := scope.lookup(tp.pkg) + if decl == nil { + return "" + } + return decl.name +} + +func type_to_decl(t ast.Expr, scope *scope) *decl { + tp := get_type_path(t) + d := lookup_path(tp, scope) + if d != nil && d.class == decl_var { + // weird variable declaration pointing to itself + return nil + } + return d +} + +func expr_to_decl(e ast.Expr, scope *scope) *decl { + t, scope, _ := infer_type(e, scope, -1) + return type_to_decl(t, scope) +} + +//------------------------------------------------------------------------- +// Type inference +//------------------------------------------------------------------------- + +type type_predicate func(ast.Expr) bool + +func advance_to_type(pred type_predicate, v ast.Expr, scope *scope) (ast.Expr, *scope) { + if pred(v) { + return v, scope + } + + decl := type_to_decl(v, scope) + if decl == nil { + return nil, nil + } + + if decl.is_visited() { + return nil, nil + } + decl.set_visited() + defer decl.clear_visited() + + return advance_to_type(pred, decl.typ, decl.scope) +} + +func advance_to_struct_or_interface(decl *decl) *decl { + if decl.is_visited() { + return nil + } + decl.set_visited() + defer decl.clear_visited() + + if struct_interface_predicate(decl.typ) { + return decl + } + + decl = type_to_decl(decl.typ, decl.scope) + if decl == nil { + return nil + } + return advance_to_struct_or_interface(decl) +} + +func struct_interface_predicate(v ast.Expr) bool { + switch v.(type) { + case *ast.StructType, *ast.InterfaceType: + return true + } + return false +} + +func chan_predicate(v ast.Expr) bool { + _, ok := v.(*ast.ChanType) + return ok +} + +func index_predicate(v ast.Expr) bool { + switch v.(type) { + case *ast.ArrayType, *ast.MapType, *ast.Ellipsis: + return true + } + return false +} + +func star_predicate(v ast.Expr) bool { + _, ok := v.(*ast.StarExpr) + return ok +} + +func func_predicate(v ast.Expr) bool { + _, ok := v.(*ast.FuncType) + return ok +} + +func range_predicate(v ast.Expr) bool { + switch t := v.(type) { + case *ast.Ident: + if t.Name == "string" { + return true + } + case *ast.ArrayType, *ast.MapType, *ast.ChanType, *ast.Ellipsis: + return true + } + return false +} + +type anonymous_typer struct { + flags decl_flags + scope *scope +} + +func (a *anonymous_typer) Visit(node ast.Node) ast.Visitor { + switch t := node.(type) { + case *ast.CompositeLit: + t.Type = check_for_anon_type(t.Type, a.flags, a.scope) + case *ast.MapType: + t.Key = check_for_anon_type(t.Key, a.flags, a.scope) + t.Value = check_for_anon_type(t.Value, a.flags, a.scope) + case *ast.ArrayType: + t.Elt = check_for_anon_type(t.Elt, a.flags, a.scope) + case *ast.Ellipsis: + t.Elt = check_for_anon_type(t.Elt, a.flags, a.scope) + case *ast.ChanType: + t.Value = check_for_anon_type(t.Value, a.flags, a.scope) + case *ast.Field: + t.Type = check_for_anon_type(t.Type, a.flags, a.scope) + case *ast.CallExpr: + t.Fun = check_for_anon_type(t.Fun, a.flags, a.scope) + case *ast.ParenExpr: + t.X = check_for_anon_type(t.X, a.flags, a.scope) + case *ast.StarExpr: + t.X = check_for_anon_type(t.X, a.flags, a.scope) + case *ast.GenDecl: + switch t.Tok { + case token.VAR: + for _, s := range t.Specs { + vs := s.(*ast.ValueSpec) + vs.Type = check_for_anon_type(vs.Type, a.flags, a.scope) + } + } + } + return a +} + +func anonymify_ast(node ast.Node, flags decl_flags, scope *scope) { + v := anonymous_typer{flags, scope} + ast.Walk(&v, node) +} + +// RETURNS: +// - type expression which represents a full name of a type +// - bool whether a type expression is actually a type (used internally) +// - scope in which type makes sense +func infer_type(v ast.Expr, scope *scope, index int) (ast.Expr, *scope, bool) { + switch t := v.(type) { + case *ast.CompositeLit: + return t.Type, scope, true + case *ast.Ident: + if d := scope.lookup(t.Name); d != nil { + if d.class == decl_package { + return ast.NewIdent(t.Name), scope, false + } + typ, scope := d.infer_type() + return typ, scope, d.class == decl_type + } + case *ast.UnaryExpr: + switch t.Op { + case token.AND: + // &a makes sense only with values, don't even check for type + it, s, _ := infer_type(t.X, scope, -1) + if it == nil { + break + } + + e := new(ast.StarExpr) + e.X = it + return e, s, false + case token.ARROW: + // <-a makes sense only with values + it, s, _ := infer_type(t.X, scope, -1) + if it == nil { + break + } + switch index { + case -1, 0: + it, s = advance_to_type(chan_predicate, it, s) + return it.(*ast.ChanType).Value, s, false + case 1: + // technically it's a value, but in case of index == 1 + // it is always the last infer operation + return ast.NewIdent("bool"), g_universe_scope, false + } + case token.ADD, token.NOT, token.SUB, token.XOR: + it, s, _ := infer_type(t.X, scope, -1) + if it == nil { + break + } + return it, s, false + } + case *ast.BinaryExpr: + switch t.Op { + case token.EQL, token.NEQ, token.LSS, token.LEQ, + token.GTR, token.GEQ, token.LOR, token.LAND: + // logic operations, the result is a bool, always + return ast.NewIdent("bool"), g_universe_scope, false + case token.ADD, token.SUB, token.MUL, token.QUO, token.OR, + token.XOR, token.REM, token.AND, token.AND_NOT: + // try X, then Y, they should be the same anyway + it, s, _ := infer_type(t.X, scope, -1) + if it == nil { + it, s, _ = infer_type(t.Y, scope, -1) + if it == nil { + break + } + } + return it, s, false + case token.SHL, token.SHR: + // try only X for shifts, Y is always uint + it, s, _ := infer_type(t.X, scope, -1) + if it == nil { + break + } + return it, s, false + } + case *ast.IndexExpr: + // something[another] always returns a value and it works on a value too + it, s, _ := infer_type(t.X, scope, -1) + if it == nil { + break + } + it, s = advance_to_type(index_predicate, it, s) + switch t := it.(type) { + case *ast.ArrayType: + return t.Elt, s, false + case *ast.Ellipsis: + return t.Elt, s, false + case *ast.MapType: + switch index { + case -1, 0: + return t.Value, s, false + case 1: + return ast.NewIdent("bool"), g_universe_scope, false + } + } + case *ast.SliceExpr: + // something[start : end] always returns a value + it, s, _ := infer_type(t.X, scope, -1) + if it == nil { + break + } + it, s = advance_to_type(index_predicate, it, s) + switch t := it.(type) { + case *ast.ArrayType: + e := new(ast.ArrayType) + e.Elt = t.Elt + return e, s, false + } + case *ast.StarExpr: + it, s, is_type := infer_type(t.X, scope, -1) + if it == nil { + break + } + if is_type { + // if it's a type, add * modifier, make it a 'pointer of' type + e := new(ast.StarExpr) + e.X = it + return e, s, true + } else { + it, s := advance_to_type(star_predicate, it, s) + if se, ok := it.(*ast.StarExpr); ok { + return se.X, s, false + } + } + case *ast.CallExpr: + // this is a function call or a type cast: + // myFunc(1,2,3) or int16(myvar) + it, s, is_type := infer_type(t.Fun, scope, -1) + if it == nil { + break + } + + if is_type { + // a type cast + return it, scope, false + } else { + // it must be a function call or a built-in function + // first check for built-in + if ct, ok := it.(*ast.Ident); ok { + ty, s := check_for_builtin_funcs(ct, t, scope) + if ty != nil { + return ty, s, false + } + } + + // then check for an ordinary function call + it, scope = advance_to_type(func_predicate, it, s) + if ct, ok := it.(*ast.FuncType); ok { + return func_return_type(ct, index), s, false + } + } + case *ast.ParenExpr: + it, s, is_type := infer_type(t.X, scope, -1) + if it == nil { + break + } + return it, s, is_type + case *ast.SelectorExpr: + it, s, _ := infer_type(t.X, scope, -1) + if it == nil { + break + } + + if d := type_to_decl(it, s); d != nil { + c := d.find_child_and_in_embedded(t.Sel.Name) + if c != nil { + if c.class == decl_type { + return t, scope, true + } else { + typ, s := c.infer_type() + return typ, s, false + } + } + } + case *ast.FuncLit: + // it's a value, but I think most likely we don't even care, cause we can only + // call it, and CallExpr uses the type itself to figure out + return t.Type, scope, false + case *ast.TypeAssertExpr: + if t.Type == nil { + return infer_type(t.X, scope, -1) + } + switch index { + case -1, 0: + // converting a value to a different type, but return thing is a value + it, _, _ := infer_type(t.Type, scope, -1) + return it, scope, false + case 1: + return ast.NewIdent("bool"), g_universe_scope, false + } + case *ast.ArrayType, *ast.MapType, *ast.ChanType, *ast.Ellipsis, + *ast.FuncType, *ast.StructType, *ast.InterfaceType: + return t, scope, true + default: + _ = reflect.TypeOf(v) + //fmt.Println(ty) + } + return nil, nil, false +} + +// Uses Value, ValueIndex and Scope to infer the type of this +// declaration. Returns the type itself and the scope where this type +// makes sense. +func (d *decl) infer_type() (ast.Expr, *scope) { + // special case for range vars + if d.is_rangevar() { + var scope *scope + d.typ, scope = infer_range_type(d.value, d.scope, d.value_index) + return d.typ, scope + } + + switch d.class { + case decl_package: + // package is handled specially in inferType + return nil, nil + case decl_type: + return ast.NewIdent(d.name), d.scope + } + + // shortcut + if d.typ != nil && d.value == nil { + return d.typ, d.scope + } + + // prevent loops + if d.is_visited() { + return nil, nil + } + d.set_visited() + defer d.clear_visited() + + var scope *scope + d.typ, scope, _ = infer_type(d.value, d.scope, d.value_index) + return d.typ, scope +} + +func (d *decl) type_dealias() *decl { + if d.is_visited() { + return nil + } + d.set_visited() + defer d.clear_visited() + + dd := type_to_decl(d.typ, d.scope) + if dd != nil && dd.is_alias() { + return dd.type_dealias() + } + return dd +} + +func (d *decl) find_child(name string) *decl { + // type aliases don't really have any children on their own, but they + // point to a different type, let's try to find one + if d.is_alias() { + dd := d.type_dealias() + if dd != nil { + return dd.find_child(name) + } + + // note that type alias can also point to a type literal, something like + // type A = struct { A int } + // in this case we rely on "advance_to_struct_or_interface" below + } + + if d.children != nil { + if c, ok := d.children[name]; ok { + return c + } + } + + decl := advance_to_struct_or_interface(d) + if decl != nil && decl != d { + if d.is_visited() { + return nil + } + d.set_visited() + defer d.clear_visited() + + return decl.find_child(name) + } + return nil +} + +func (d *decl) find_child_and_in_embedded(name string) *decl { + if d == nil { + return nil + } + + c := d.find_child(name) + if c == nil { + for _, e := range d.embedded { + typedecl := type_to_decl(e, d.scope) + c = typedecl.find_child_and_in_embedded(name) + if c != nil { + break + } + } + } + return c +} + +// Special type inference for range statements. +// [int], [int] := range [string] +// [int], [value] := range [slice or array] +// [key], [value] := range [map] +// [value], [nil] := range [chan] +func infer_range_type(e ast.Expr, sc *scope, valueindex int) (ast.Expr, *scope) { + t, s, _ := infer_type(e, sc, -1) + t, s = advance_to_type(range_predicate, t, s) + if t != nil { + var t1, t2 ast.Expr + var s1, s2 *scope + s1 = s + s2 = s + + switch t := t.(type) { + case *ast.Ident: + // string + if t.Name == "string" { + t1 = ast.NewIdent("int") + t2 = ast.NewIdent("rune") + s1 = g_universe_scope + s2 = g_universe_scope + } else { + t1, t2 = nil, nil + } + case *ast.ArrayType: + t1 = ast.NewIdent("int") + s1 = g_universe_scope + t2 = t.Elt + case *ast.Ellipsis: + t1 = ast.NewIdent("int") + s1 = g_universe_scope + t2 = t.Elt + case *ast.MapType: + t1 = t.Key + t2 = t.Value + case *ast.ChanType: + t1 = t.Value + t2 = nil + default: + t1, t2 = nil, nil + } + + switch valueindex { + case 0: + return t1, s1 + case 1: + return t2, s2 + } + } + return nil, nil +} + +//------------------------------------------------------------------------- +// Pretty printing +//------------------------------------------------------------------------- + +func get_array_len(e ast.Expr) string { + switch t := e.(type) { + case *ast.BasicLit: + return string(t.Value) + case *ast.Ellipsis: + return "..." + } + return "" +} + +func pretty_print_type_expr(out io.Writer, e ast.Expr, canonical_aliases map[string]string) { + switch t := e.(type) { + case *ast.StarExpr: + fmt.Fprintf(out, "*") + pretty_print_type_expr(out, t.X, canonical_aliases) + case *ast.Ident: + if strings.HasPrefix(t.Name, "$") { + // beautify anonymous types + switch t.Name[1] { + case 's': + fmt.Fprintf(out, "struct") + case 'i': + // ok, in most cases anonymous interface is an + // empty interface, I'll just pretend that + // it's always true + fmt.Fprintf(out, "interface{}") + } + } else if !*g_debug && strings.HasPrefix(t.Name, "!") { + // these are full package names for disambiguating and pretty + // printing packages within packages, e.g. + // !go/ast!ast vs. !github.com/nsf/my/ast!ast + // another ugly hack, if people are punished in hell for ugly hacks + // I'm screwed... + emarkIdx := strings.LastIndex(t.Name, "!") + path := t.Name[1:emarkIdx] + alias := canonical_aliases[path] + if alias == "" { + alias = t.Name[emarkIdx+1:] + } + fmt.Fprintf(out, alias) + } else { + fmt.Fprintf(out, t.Name) + } + case *ast.ArrayType: + al := "" + if t.Len != nil { + al = get_array_len(t.Len) + } + if al != "" { + fmt.Fprintf(out, "[%s]", al) + } else { + fmt.Fprintf(out, "[]") + } + pretty_print_type_expr(out, t.Elt, canonical_aliases) + case *ast.SelectorExpr: + pretty_print_type_expr(out, t.X, canonical_aliases) + fmt.Fprintf(out, ".%s", t.Sel.Name) + case *ast.FuncType: + fmt.Fprintf(out, "func(") + pretty_print_func_field_list(out, t.Params, canonical_aliases) + fmt.Fprintf(out, ")") + + buf := bytes.NewBuffer(make([]byte, 0, 256)) + nresults := pretty_print_func_field_list(buf, t.Results, canonical_aliases) + if nresults > 0 { + results := buf.String() + if strings.IndexAny(results, ", ") != -1 { + results = "(" + results + ")" + } + fmt.Fprintf(out, " %s", results) + } + case *ast.MapType: + fmt.Fprintf(out, "map[") + pretty_print_type_expr(out, t.Key, canonical_aliases) + fmt.Fprintf(out, "]") + pretty_print_type_expr(out, t.Value, canonical_aliases) + case *ast.InterfaceType: + fmt.Fprintf(out, "interface{}") + case *ast.Ellipsis: + fmt.Fprintf(out, "...") + pretty_print_type_expr(out, t.Elt, canonical_aliases) + case *ast.StructType: + fmt.Fprintf(out, "struct") + case *ast.ChanType: + switch t.Dir { + case ast.RECV: + fmt.Fprintf(out, "<-chan ") + case ast.SEND: + fmt.Fprintf(out, "chan<- ") + case ast.SEND | ast.RECV: + fmt.Fprintf(out, "chan ") + } + pretty_print_type_expr(out, t.Value, canonical_aliases) + case *ast.ParenExpr: + fmt.Fprintf(out, "(") + pretty_print_type_expr(out, t.X, canonical_aliases) + fmt.Fprintf(out, ")") + case *ast.BadExpr: + // TODO: probably I should check that in a separate function + // and simply discard declarations with BadExpr as a part of their + // type + default: + // the element has some weird type, just ignore it + } +} + +func pretty_print_func_field_list(out io.Writer, f *ast.FieldList, canonical_aliases map[string]string) int { + count := 0 + if f == nil { + return count + } + for i, field := range f.List { + // names + if field.Names != nil { + hasNonblank := false + for j, name := range field.Names { + if name.Name != "?" { + hasNonblank = true + fmt.Fprintf(out, "%s", name.Name) + if j != len(field.Names)-1 { + fmt.Fprintf(out, ", ") + } + } + count++ + } + if hasNonblank { + fmt.Fprintf(out, " ") + } + } else { + count++ + } + + // type + pretty_print_type_expr(out, field.Type, canonical_aliases) + + // , + if i != len(f.List)-1 { + fmt.Fprintf(out, ", ") + } + } + return count +} + +func ast_decl_names(d ast.Decl) []*ast.Ident { + var names []*ast.Ident + + switch t := d.(type) { + case *ast.GenDecl: + switch t.Tok { + case token.CONST: + c := t.Specs[0].(*ast.ValueSpec) + names = make([]*ast.Ident, len(c.Names)) + for i, name := range c.Names { + names[i] = name + } + case token.TYPE: + t := t.Specs[0].(*ast.TypeSpec) + names = make([]*ast.Ident, 1) + names[0] = t.Name + case token.VAR: + v := t.Specs[0].(*ast.ValueSpec) + names = make([]*ast.Ident, len(v.Names)) + for i, name := range v.Names { + names[i] = name + } + } + case *ast.FuncDecl: + names = make([]*ast.Ident, 1) + names[0] = t.Name + } + + return names +} + +func ast_decl_values(d ast.Decl) []ast.Expr { + // TODO: CONST values here too + switch t := d.(type) { + case *ast.GenDecl: + switch t.Tok { + case token.VAR: + v := t.Specs[0].(*ast.ValueSpec) + if v.Values != nil { + return v.Values + } + } + } + return nil +} + +func ast_decl_split(d ast.Decl) []ast.Decl { + var decls []ast.Decl + if t, ok := d.(*ast.GenDecl); ok { + decls = make([]ast.Decl, len(t.Specs)) + for i, s := range t.Specs { + decl := new(ast.GenDecl) + *decl = *t + decl.Specs = make([]ast.Spec, 1) + decl.Specs[0] = s + decls[i] = decl + } + } else { + decls = make([]ast.Decl, 1) + decls[0] = d + } + return decls +} + +//------------------------------------------------------------------------- +// decl_pack +//------------------------------------------------------------------------- + +type decl_pack struct { + names []*ast.Ident + typ ast.Expr + values []ast.Expr +} + +type foreach_decl_struct struct { + decl_pack + decl ast.Decl +} + +func (f *decl_pack) value(i int) ast.Expr { + if f.values == nil { + return nil + } + if len(f.values) > 1 { + return f.values[i] + } + return f.values[0] +} + +func (f *decl_pack) value_index(i int) (v ast.Expr, vi int) { + // default: nil value + v = nil + vi = -1 + + if f.values != nil { + // A = B, if there is only one name, the value is solo too + if len(f.names) == 1 { + return f.values[0], -1 + } + + if len(f.values) > 1 { + // in case if there are multiple values, it's a usual + // multiassignment + if i >= len(f.values) { + i = len(f.values) - 1 + } + v = f.values[i] + } else { + // in case if there is one value, but many names, it's + // a tuple unpack.. use index here + v = f.values[0] + vi = i + } + } + return +} + +func (f *decl_pack) type_value_index(i int) (ast.Expr, ast.Expr, int) { + if f.typ != nil { + // If there is a type, we don't care about value, just return the type + // and zero value. + return f.typ, nil, -1 + } + + // And otherwise we simply return nil type and a valid value for later inferring. + v, vi := f.value_index(i) + return nil, v, vi +} + +type foreach_decl_func func(data *foreach_decl_struct) + +func foreach_decl(decl ast.Decl, do foreach_decl_func) { + decls := ast_decl_split(decl) + var data foreach_decl_struct + for _, decl := range decls { + if !ast_decl_convertable(decl) { + continue + } + data.names = ast_decl_names(decl) + data.typ = ast_decl_type(decl) + data.values = ast_decl_values(decl) + data.decl = decl + + do(&data) + } +} + +//------------------------------------------------------------------------- +// Built-in declarations +//------------------------------------------------------------------------- + +var g_universe_scope = new_scope(nil) + +func init() { + builtin := ast.NewIdent("built-in") + + add_type := func(name string) { + d := new_decl(name, decl_type, g_universe_scope) + d.typ = builtin + g_universe_scope.add_named_decl(d) + } + add_type("bool") + add_type("byte") + add_type("complex64") + add_type("complex128") + add_type("float32") + add_type("float64") + add_type("int8") + add_type("int16") + add_type("int32") + add_type("int64") + add_type("string") + add_type("uint8") + add_type("uint16") + add_type("uint32") + add_type("uint64") + add_type("int") + add_type("uint") + add_type("uintptr") + add_type("rune") + + add_const := func(name string) { + d := new_decl(name, decl_const, g_universe_scope) + d.typ = builtin + g_universe_scope.add_named_decl(d) + } + add_const("true") + add_const("false") + add_const("iota") + add_const("nil") + + add_func := func(name, typ string) { + d := new_decl(name, decl_func, g_universe_scope) + d.typ = ast.NewIdent(typ) + g_universe_scope.add_named_decl(d) + } + add_func("append", "func([]type, ...type) []type") + add_func("cap", "func(container) int") + add_func("close", "func(channel)") + add_func("complex", "func(real, imag) complex") + add_func("copy", "func(dst, src)") + add_func("delete", "func(map[typeA]typeB, typeA)") + add_func("imag", "func(complex)") + add_func("len", "func(container) int") + add_func("make", "func(type, len[, cap]) type") + add_func("new", "func(type) *type") + add_func("panic", "func(interface{})") + add_func("print", "func(...interface{})") + add_func("println", "func(...interface{})") + add_func("real", "func(complex)") + add_func("recover", "func() interface{}") + + // built-in error interface + d := new_decl("error", decl_type, g_universe_scope) + d.typ = &ast.InterfaceType{} + d.children = make(map[string]*decl) + d.children["Error"] = new_decl("Error", decl_func, g_universe_scope) + d.children["Error"].typ = &ast.FuncType{ + Results: &ast.FieldList{ + List: []*ast.Field{ + { + Type: ast.NewIdent("string"), + }, + }, + }, + } + g_universe_scope.add_named_decl(d) +} diff --git a/langserver/internal/gocode/declcache.go b/langserver/internal/gocode/declcache.go new file mode 100644 index 00000000..9841b731 --- /dev/null +++ b/langserver/internal/gocode/declcache.go @@ -0,0 +1,532 @@ +package gocode + +import ( + "fmt" + "go/ast" + "go/build" + "go/parser" + "go/token" + "log" + "os" + "os/exec" + "path/filepath" + "strings" + "sync" +) + +//------------------------------------------------------------------------- +// []package_import +//------------------------------------------------------------------------- + +type package_import struct { + alias string + abspath string + path string +} + +// Parses import declarations until the first non-import declaration and fills +// `packages` array with import information. +func collect_package_imports(filename string, decls []ast.Decl, context *package_lookup_context) []package_import { + pi := make([]package_import, 0, 16) + for _, decl := range decls { + if gd, ok := decl.(*ast.GenDecl); ok && gd.Tok == token.IMPORT { + for _, spec := range gd.Specs { + imp := spec.(*ast.ImportSpec) + path, alias := path_and_alias(imp) + abspath, ok := abs_path_for_package(filename, path, context) + if ok && alias != "_" { + pi = append(pi, package_import{alias, abspath, path}) + } + } + } else { + break + } + } + return pi +} + +//------------------------------------------------------------------------- +// decl_file_cache +// +// Contains cache for top-level declarations of a file as well as its +// contents, AST and import information. +//------------------------------------------------------------------------- + +type decl_file_cache struct { + name string // file name + mtime int64 // last modification time + + decls map[string]*decl // top-level declarations + error error // last error + packages []package_import // import information + filescope *scope + + fset *token.FileSet + context *package_lookup_context +} + +func new_decl_file_cache(name string, context *package_lookup_context) *decl_file_cache { + return &decl_file_cache{ + name: name, + context: context, + } +} + +func (f *decl_file_cache) update() { + stat, err := os.Stat(f.name) + if err != nil { + f.decls = nil + f.error = err + f.fset = nil + return + } + + statmtime := stat.ModTime().UnixNano() + if f.mtime == statmtime { + return + } + + f.mtime = statmtime + f.read_file() +} + +func (f *decl_file_cache) read_file() { + var data []byte + data, f.error = file_reader.read_file(f.name) + if f.error != nil { + return + } + data, _ = filter_out_shebang(data) + + f.process_data(data) +} + +func (f *decl_file_cache) process_data(data []byte) { + var file *ast.File + f.fset = token.NewFileSet() + file, f.error = parser.ParseFile(f.fset, "", data, 0) + f.filescope = new_scope(nil) + for _, d := range file.Decls { + anonymify_ast(d, 0, f.filescope) + } + f.packages = collect_package_imports(f.name, file.Decls, f.context) + f.decls = make(map[string]*decl, len(file.Decls)) + for _, decl := range file.Decls { + append_to_top_decls(f.decls, decl, f.filescope) + } +} + +func append_to_top_decls(decls map[string]*decl, decl ast.Decl, scope *scope) { + foreach_decl(decl, func(data *foreach_decl_struct) { + class := ast_decl_class(data.decl) + for i, name := range data.names { + typ, v, vi := data.type_value_index(i) + + d := new_decl_full(name.Name, class, ast_decl_flags(data.decl), typ, v, vi, scope) + if d == nil { + return + } + + methodof := method_of(decl) + if methodof != "" { + decl, ok := decls[methodof] + if ok { + decl.add_child(d) + } else { + decl = new_decl(methodof, decl_methods_stub, scope) + decls[methodof] = decl + decl.add_child(d) + } + } else { + decl, ok := decls[d.name] + if ok { + decl.expand_or_replace(d) + } else { + decls[d.name] = d + } + } + } + }) +} + +func abs_path_for_package(filename, p string, context *package_lookup_context) (string, bool) { + dir, _ := filepath.Split(filename) + if len(p) == 0 { + return "", false + } + if p[0] == '.' { + return fmt.Sprintf("%s.a", filepath.Join(dir, p)), true + } + pkg, ok := find_go_dag_package(p, dir) + if ok { + return pkg, true + } + return find_global_file(p, context) +} + +func path_and_alias(imp *ast.ImportSpec) (string, string) { + path := "" + if imp.Path != nil && len(imp.Path.Value) > 0 { + path = string(imp.Path.Value) + path = path[1 : len(path)-1] + } + alias := "" + if imp.Name != nil { + alias = imp.Name.Name + } + return path, alias +} + +func find_go_dag_package(imp, filedir string) (string, bool) { + // Support godag directory structure + dir, pkg := filepath.Split(imp) + godag_pkg := filepath.Join(filedir, "..", dir, "_obj", pkg+".a") + if file_exists(godag_pkg) { + return godag_pkg, true + } + return "", false +} + +// autobuild compares the mod time of the source files of the package, and if any of them is newer +// than the package object file will rebuild it. +func autobuild(p *build.Package) error { + if p.Dir == "" { + return fmt.Errorf("no files to build") + } + ps, err := os.Stat(p.PkgObj) + if err != nil { + // Assume package file does not exist and build for the first time. + return build_package(p) + } + pt := ps.ModTime() + fs, err := readdir_lstat(p.Dir) + if err != nil { + return err + } + for _, f := range fs { + if f.IsDir() { + continue + } + if f.ModTime().After(pt) { + // Source file is newer than package file; rebuild. + return build_package(p) + } + } + return nil +} + +// build_package builds the package by calling `go install package/import`. If everything compiles +// correctly, the newly compiled package should then be in the usual place in the `$GOPATH/pkg` +// directory, and gocode will pick it up from there. +func build_package(p *build.Package) error { + if *g_debug { + log.Printf("-------------------") + log.Printf("rebuilding package %s", p.Name) + log.Printf("package import: %s", p.ImportPath) + log.Printf("package object: %s", p.PkgObj) + log.Printf("package source dir: %s", p.Dir) + log.Printf("package source files: %v", p.GoFiles) + log.Printf("GOPATH: %v", g_daemon.context.GOPATH) + log.Printf("GOROOT: %v", g_daemon.context.GOROOT) + } + env := os.Environ() + for i, v := range env { + if strings.HasPrefix(v, "GOPATH=") { + env[i] = "GOPATH=" + g_daemon.context.GOPATH + } else if strings.HasPrefix(v, "GOROOT=") { + env[i] = "GOROOT=" + g_daemon.context.GOROOT + } + } + + cmd := exec.Command("go", "install", p.ImportPath) + cmd.Env = env + + // TODO: Should read STDERR rather than STDOUT. + out, err := cmd.CombinedOutput() + if err != nil { + return err + } + if *g_debug { + log.Printf("build out: %s\n", string(out)) + } + return nil +} + +// executes autobuild function if autobuild option is enabled, logs error and +// ignores it +func try_autobuild(p *build.Package) { + if g_config.Autobuild { + err := autobuild(p) + if err != nil && *g_debug { + log.Printf("Autobuild error: %s\n", err) + } + } +} + +func log_found_package_maybe(imp, pkgpath string) { + if *g_debug { + log.Printf("Found %q at %q\n", imp, pkgpath) + } +} + +func log_build_context(context *package_lookup_context) { + log.Printf(" GOROOT: %s\n", context.GOROOT) + log.Printf(" GOPATH: %s\n", context.GOPATH) + log.Printf(" GOOS: %s\n", context.GOOS) + log.Printf(" GOARCH: %s\n", context.GOARCH) + log.Printf(" BzlProjectRoot: %q\n", context.BzlProjectRoot) + log.Printf(" GBProjectRoot: %q\n", context.GBProjectRoot) + log.Printf(" lib-path: %q\n", g_config.LibPath) +} + +// find_global_file returns the file path of the compiled package corresponding to the specified +// import, and a boolean stating whether such path is valid. +// TODO: Return only one value, possibly empty string if not found. +func find_global_file(imp string, context *package_lookup_context) (string, bool) { + // gocode synthetically generates the builtin package + // "unsafe", since the "unsafe.a" package doesn't really exist. + // Thus, when the user request for the package "unsafe" we + // would return synthetic global file that would be used + // just as a key name to find this synthetic package + if imp == "unsafe" { + return "unsafe", true + } + + pkgfile := fmt.Sprintf("%s.a", imp) + + // if lib-path is defined, use it + if g_config.LibPath != "" { + for _, p := range filepath.SplitList(g_config.LibPath) { + pkg_path := filepath.Join(p, pkgfile) + if file_exists(pkg_path) { + log_found_package_maybe(imp, pkg_path) + return pkg_path, true + } + // Also check the relevant pkg/OS_ARCH dir for the libpath, if provided. + pkgdir := fmt.Sprintf("%s_%s", context.GOOS, context.GOARCH) + pkg_path = filepath.Join(p, "pkg", pkgdir, pkgfile) + if file_exists(pkg_path) { + log_found_package_maybe(imp, pkg_path) + return pkg_path, true + } + } + } + + // gb-specific lookup mode, only if the root dir was found + if g_config.PackageLookupMode == "gb" && context.GBProjectRoot != "" { + root := context.GBProjectRoot + pkgdir := filepath.Join(root, "pkg", context.GOOS+"-"+context.GOARCH) + if !is_dir(pkgdir) { + pkgdir = filepath.Join(root, "pkg", context.GOOS+"-"+context.GOARCH+"-race") + } + pkg_path := filepath.Join(pkgdir, pkgfile) + if file_exists(pkg_path) { + log_found_package_maybe(imp, pkg_path) + return pkg_path, true + } + } + + // bzl-specific lookup mode, only if the root dir was found + if g_config.PackageLookupMode == "bzl" && context.BzlProjectRoot != "" { + var root, impath string + if strings.HasPrefix(imp, g_config.CustomPkgPrefix+"/") { + root = filepath.Join(context.BzlProjectRoot, "bazel-bin") + impath = imp[len(g_config.CustomPkgPrefix)+1:] + } else if g_config.CustomVendorDir != "" { + // Try custom vendor dir. + root = filepath.Join(context.BzlProjectRoot, "bazel-bin", g_config.CustomVendorDir) + impath = imp + } + + if root != "" && impath != "" { + // There might be more than one ".a" files in the pkg path with bazel. + // But the best practice is to keep one go_library build target in each + // pakcage directory so that it follows the standard Go package + // structure. Thus here we assume there is at most one ".a" file existing + // in the pkg path. + if d, err := os.Open(filepath.Join(root, impath)); err == nil { + defer d.Close() + + if fis, err := d.Readdir(-1); err == nil { + for _, fi := range fis { + if !fi.IsDir() && filepath.Ext(fi.Name()) == ".a" { + pkg_path := filepath.Join(root, impath, fi.Name()) + log_found_package_maybe(imp, pkg_path) + return pkg_path, true + } + } + } + } + } + } + + if context.CurrentPackagePath != "" { + // Try vendor path first, see GO15VENDOREXPERIMENT. + // We don't check this environment variable however, seems like there is + // almost no harm in doing so (well.. if you experiment with vendoring, + // gocode will fail after enabling/disabling the flag, and you'll be + // forced to get rid of vendor binaries). But asking users to set this + // env var is up will bring more trouble. Because we also need to pass + // it from client to server, make sure their editors set it, etc. + // So, whatever, let's just pretend it's always on. + package_path := context.CurrentPackagePath + for { + limp := filepath.Join(package_path, "vendor", imp) + if p, err := context.Import(limp, "", build.AllowBinary|build.FindOnly); err == nil { + try_autobuild(p) + if file_exists(p.PkgObj) { + log_found_package_maybe(imp, p.PkgObj) + return p.PkgObj, true + } + } + if package_path == "" { + break + } + next_path := filepath.Dir(package_path) + // let's protect ourselves from inf recursion here + if next_path == package_path { + break + } + package_path = next_path + } + } + + if p, err := context.Import(imp, "", build.AllowBinary|build.FindOnly); err == nil { + try_autobuild(p) + if file_exists(p.PkgObj) { + log_found_package_maybe(imp, p.PkgObj) + return p.PkgObj, true + } + } + + if *g_debug { + log.Printf("Import path %q was not resolved\n", imp) + log.Println("Gocode's build context is:") + log_build_context(context) + } + return "", false +} + +func package_name(file *ast.File) string { + if file.Name != nil { + return file.Name.Name + } + return "" +} + +//------------------------------------------------------------------------- +// decl_cache +// +// Thread-safe collection of DeclFileCache entities. +//------------------------------------------------------------------------- + +type package_lookup_context struct { + build.Context + BzlProjectRoot string + GBProjectRoot string + CurrentPackagePath string +} + +// gopath returns the list of Go path directories. +func (ctxt *package_lookup_context) gopath() []string { + var all []string + for _, p := range filepath.SplitList(ctxt.GOPATH) { + if p == "" || p == ctxt.GOROOT { + // Empty paths are uninteresting. + // If the path is the GOROOT, ignore it. + // People sometimes set GOPATH=$GOROOT. + // Do not get confused by this common mistake. + continue + } + if strings.HasPrefix(p, "~") { + // Path segments starting with ~ on Unix are almost always + // users who have incorrectly quoted ~ while setting GOPATH, + // preventing it from expanding to $HOME. + // The situation is made more confusing by the fact that + // bash allows quoted ~ in $PATH (most shells do not). + // Do not get confused by this, and do not try to use the path. + // It does not exist, and printing errors about it confuses + // those users even more, because they think "sure ~ exists!". + // The go command diagnoses this situation and prints a + // useful error. + // On Windows, ~ is used in short names, such as c:\progra~1 + // for c:\program files. + continue + } + all = append(all, p) + } + return all +} + +func (ctxt *package_lookup_context) pkg_dirs() (string, []string) { + pkgdir := fmt.Sprintf("%s_%s", ctxt.GOOS, ctxt.GOARCH) + + var currentPackagePath string + var all []string + if ctxt.GOROOT != "" { + dir := filepath.Join(ctxt.GOROOT, "pkg", pkgdir) + if is_dir(dir) { + all = append(all, dir) + } + } + + switch g_config.PackageLookupMode { + case "go": + currentPackagePath = ctxt.CurrentPackagePath + for _, p := range ctxt.gopath() { + dir := filepath.Join(p, "pkg", pkgdir) + if is_dir(dir) { + all = append(all, dir) + } + dir = filepath.Join(dir, currentPackagePath, "vendor") + if is_dir(dir) { + all = append(all, dir) + } + } + case "gb": + if ctxt.GBProjectRoot != "" { + pkgdir := fmt.Sprintf("%s-%s", ctxt.GOOS, ctxt.GOARCH) + if !is_dir(pkgdir) { + pkgdir = fmt.Sprintf("%s-%s-race", ctxt.GOOS, ctxt.GOARCH) + } + dir := filepath.Join(ctxt.GBProjectRoot, "pkg", pkgdir) + if is_dir(dir) { + all = append(all, dir) + } + } + case "bzl": + // TODO: Support bazel mode + } + return currentPackagePath, all +} + +type decl_cache struct { + cache map[string]*decl_file_cache + context *package_lookup_context + sync.Mutex +} + +func new_decl_cache(context *package_lookup_context) *decl_cache { + return &decl_cache{ + cache: make(map[string]*decl_file_cache), + context: context, + } +} + +func (c *decl_cache) get(filename string) *decl_file_cache { + c.Lock() + defer c.Unlock() + + f, ok := c.cache[filename] + if !ok { + f = new_decl_file_cache(filename, c.context) + c.cache[filename] = f + } + return f +} + +func (c *decl_cache) get_and_update(filename string) *decl_file_cache { + f := c.get(filename) + f.update() + return f +} diff --git a/langserver/internal/gocode/export.go b/langserver/internal/gocode/export.go new file mode 100644 index 00000000..ac533fff --- /dev/null +++ b/langserver/internal/gocode/export.go @@ -0,0 +1,38 @@ +package gocode + +import ( + "go/build" +) + +var bctx go_build_context + +func InitDaemon(bc *build.Context) { + bctx = pack_build_context(bc) + g_config.ProposeBuiltins = true + g_daemon = new(daemon) + g_daemon.drop_cache() +} + +func SetBuildContext(bc *build.Context) { + bctx = pack_build_context(bc) +} + +func AutoComplete(file []byte, filename string, offset int) ([]candidate, int) { + return server_auto_complete(file, filename, offset, bctx) +} + +// dumb vars for unused parts of the package +var ( + g_sock *string + g_addr *string + fals = false + g_debug = &fals + get_socket_filename func() string + config_dir func() string + config_file func() string +) + +// dumb types for unused parts of the package +type ( + RPC struct{} +) diff --git a/langserver/internal/gocode/package.go b/langserver/internal/gocode/package.go new file mode 100644 index 00000000..59928df7 --- /dev/null +++ b/langserver/internal/gocode/package.go @@ -0,0 +1,254 @@ +package gocode + +import ( + "bytes" + "fmt" + "go/ast" + "os" + "strings" +) + +type package_parser interface { + parse_export(callback func(pkg string, decl ast.Decl)) +} + +//------------------------------------------------------------------------- +// package_file_cache +// +// Structure that represents a cache for an imported pacakge. In other words +// these are the contents of an archive (*.a) file. +//------------------------------------------------------------------------- + +type package_file_cache struct { + name string // file name + import_name string + mtime int64 + defalias string + + scope *scope + main *decl // package declaration + others map[string]*decl +} + +func new_package_file_cache(absname, name string) *package_file_cache { + m := new(package_file_cache) + m.name = absname + m.import_name = name + m.mtime = 0 + m.defalias = "" + return m +} + +// Creates a cache that stays in cache forever. Useful for built-in packages. +func new_package_file_cache_forever(name, defalias string) *package_file_cache { + m := new(package_file_cache) + m.name = name + m.mtime = -1 + m.defalias = defalias + return m +} + +func (m *package_file_cache) find_file() string { + if file_exists(m.name) { + return m.name + } + + n := len(m.name) + filename := m.name[:n-1] + "6" + if file_exists(filename) { + return filename + } + + filename = m.name[:n-1] + "8" + if file_exists(filename) { + return filename + } + + filename = m.name[:n-1] + "5" + if file_exists(filename) { + return filename + } + return m.name +} + +func (m *package_file_cache) update_cache() { + if m.mtime == -1 { + return + } + fname := m.find_file() + stat, err := os.Stat(fname) + if err != nil { + return + } + + statmtime := stat.ModTime().UnixNano() + if m.mtime != statmtime { + m.mtime = statmtime + + data, err := file_reader.read_file(fname) + if err != nil { + return + } + m.process_package_data(data) + } +} + +func (m *package_file_cache) process_package_data(data []byte) { + m.scope = new_named_scope(g_universe_scope, m.name) + + // find import section + i := bytes.Index(data, []byte{'\n', '$', '$'}) + if i == -1 { + panic(fmt.Sprintf("Can't find the import section in the package file %s", m.name)) + } + data = data[i+len("\n$$"):] + + // main package + m.main = new_decl(m.name, decl_package, nil) + // create map for other packages + m.others = make(map[string]*decl) + + var pp package_parser + if data[0] == 'B' { + // binary format, skip 'B\n' + data = data[2:] + var p gc_bin_parser + p.init(data, m) + pp = &p + } else { + // textual format, find the beginning of the package clause + i = bytes.Index(data, []byte{'p', 'a', 'c', 'k', 'a', 'g', 'e'}) + if i == -1 { + panic("Can't find the package clause") + } + data = data[i:] + + var p gc_parser + p.init(data, m) + pp = &p + } + + prefix := "!" + m.name + "!" + pp.parse_export(func(pkg string, decl ast.Decl) { + anonymify_ast(decl, decl_foreign, m.scope) + if pkg == "" || strings.HasPrefix(pkg, prefix) { + // main package + add_ast_decl_to_package(m.main, decl, m.scope) + } else { + // others + if _, ok := m.others[pkg]; !ok { + m.others[pkg] = new_decl(pkg, decl_package, nil) + } + add_ast_decl_to_package(m.others[pkg], decl, m.scope) + } + }) + + // hack, add ourselves to the package scope + mainName := "!" + m.name + "!" + m.defalias + m.add_package_to_scope(mainName, m.name) + + // replace dummy package decls in package scope to actual packages + for key := range m.scope.entities { + if !strings.HasPrefix(key, "!") { + continue + } + pkg, ok := m.others[key] + if !ok && key == mainName { + pkg = m.main + } + m.scope.replace_decl(key, pkg) + } +} + +func (m *package_file_cache) add_package_to_scope(alias, realname string) { + d := new_decl(realname, decl_package, nil) + m.scope.add_decl(alias, d) +} + +func add_ast_decl_to_package(pkg *decl, decl ast.Decl, scope *scope) { + foreach_decl(decl, func(data *foreach_decl_struct) { + class := ast_decl_class(data.decl) + for i, name := range data.names { + typ, v, vi := data.type_value_index(i) + + d := new_decl_full(name.Name, class, decl_foreign|ast_decl_flags(data.decl), typ, v, vi, scope) + if d == nil { + return + } + + if !name.IsExported() && d.class != decl_type { + return + } + + methodof := method_of(data.decl) + if methodof != "" { + decl := pkg.find_child(methodof) + if decl != nil { + decl.add_child(d) + } else { + decl = new_decl(methodof, decl_methods_stub, scope) + decl.add_child(d) + pkg.add_child(decl) + } + } else { + decl := pkg.find_child(d.name) + if decl != nil { + decl.expand_or_replace(d) + } else { + pkg.add_child(d) + } + } + } + }) +} + +//------------------------------------------------------------------------- +// package_cache +//------------------------------------------------------------------------- + +type package_cache map[string]*package_file_cache + +func new_package_cache() package_cache { + m := make(package_cache) + + // add built-in "unsafe" package + m.add_builtin_unsafe_package() + + return m +} + +// Function fills 'ps' set with packages from 'packages' import information. +// In case if package is not in the cache, it creates one and adds one to the cache. +func (c package_cache) append_packages(ps map[string]*package_file_cache, pkgs []package_import) { + for _, m := range pkgs { + if _, ok := ps[m.abspath]; ok { + continue + } + + if mod, ok := c[m.abspath]; ok { + ps[m.abspath] = mod + } else { + mod = new_package_file_cache(m.abspath, m.path) + ps[m.abspath] = mod + c[m.abspath] = mod + } + } +} + +var g_builtin_unsafe_package = []byte(` +import +$$ +package unsafe + type @"".Pointer uintptr + func @"".Offsetof (? any) uintptr + func @"".Sizeof (? any) uintptr + func @"".Alignof (? any) uintptr + +$$ +`) + +func (c package_cache) add_builtin_unsafe_package() { + pkg := new_package_file_cache_forever("unsafe", "unsafe") + pkg.process_package_data(g_builtin_unsafe_package) + c["unsafe"] = pkg +} diff --git a/langserver/internal/gocode/package_bin.go b/langserver/internal/gocode/package_bin.go new file mode 100644 index 00000000..576f1ba0 --- /dev/null +++ b/langserver/internal/gocode/package_bin.go @@ -0,0 +1,829 @@ +package gocode + +import ( + "encoding/binary" + "fmt" + "go/ast" + "go/token" + "strconv" + "strings" + "unicode" + "unicode/utf8" +) + +//------------------------------------------------------------------------- +// gc_bin_parser +// +// The following part of the code may contain portions of the code from the Go +// standard library, which tells me to retain their copyright notice: +// +// Copyright (c) 2012 The Go Authors. All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +//------------------------------------------------------------------------- + +type gc_bin_parser struct { + data []byte + buf []byte // for reading strings + version int // export format version + + // object lists + strList []string // in order of appearance + pathList []string // in order of appearance + pkgList []string // in order of appearance + typList []ast.Expr // in order of appearance + callback func(pkg string, decl ast.Decl) + pfc *package_file_cache + trackAllTypes bool + + // position encoding + posInfoFormat bool + prevFile string + prevLine int + + // debugging support + debugFormat bool + read int // bytes read + +} + +func (p *gc_bin_parser) init(data []byte, pfc *package_file_cache) { + p.data = data + p.version = -1 // unknown version + p.strList = []string{""} // empty string is mapped to 0 + p.pathList = []string{""} // empty string is mapped to 0 + p.pfc = pfc +} + +func (p *gc_bin_parser) parse_export(callback func(string, ast.Decl)) { + p.callback = callback + + // read version info + var versionstr string + if b := p.rawByte(); b == 'c' || b == 'd' { + // Go1.7 encoding; first byte encodes low-level + // encoding format (compact vs debug). + // For backward-compatibility only (avoid problems with + // old installed packages). Newly compiled packages use + // the extensible format string. + // TODO(gri) Remove this support eventually; after Go1.8. + if b == 'd' { + p.debugFormat = true + } + p.trackAllTypes = p.rawByte() == 'a' + p.posInfoFormat = p.int() != 0 + versionstr = p.string() + if versionstr == "v1" { + p.version = 0 + } + } else { + // Go1.8 extensible encoding + // read version string and extract version number (ignore anything after the version number) + versionstr = p.rawStringln(b) + if s := strings.SplitN(versionstr, " ", 3); len(s) >= 2 && s[0] == "version" { + if v, err := strconv.Atoi(s[1]); err == nil && v > 0 { + p.version = v + } + } + } + + // read version specific flags - extend as necessary + switch p.version { + // case 6: + // ... + // fallthrough + case 5, 4, 3, 2, 1: + p.debugFormat = p.rawStringln(p.rawByte()) == "debug" + p.trackAllTypes = p.int() != 0 + p.posInfoFormat = p.int() != 0 + case 0: + // Go1.7 encoding format - nothing to do here + default: + panic(fmt.Errorf("unknown export format version %d (%q)", p.version, versionstr)) + } + + // --- generic export data --- + + // populate typList with predeclared "known" types + p.typList = append(p.typList, predeclared...) + + // read package data + pkgName := p.pkg() + p.pfc.defalias = pkgName[strings.LastIndex(pkgName, "!")+1:] + + // read objects of phase 1 only (see cmd/compiler/internal/gc/bexport.go) + objcount := 0 + for { + tag := p.tagOrIndex() + if tag == endTag { + break + } + p.obj(tag) + objcount++ + } + + // self-verification + if count := p.int(); count != objcount { + panic(fmt.Sprintf("got %d objects; want %d", objcount, count)) + } +} + +func (p *gc_bin_parser) pkg() string { + // if the package was seen before, i is its index (>= 0) + i := p.tagOrIndex() + if i >= 0 { + return p.pkgList[i] + } + + // otherwise, i is the package tag (< 0) + if i != packageTag { + panic(fmt.Sprintf("unexpected package tag %d version %d", i, p.version)) + } + + // read package data + name := p.string() + var path string + if p.version >= 5 { + path = p.path() + } else { + path = p.string() + } + + // we should never see an empty package name + if name == "" { + panic("empty package name in import") + } + + // an empty path denotes the package we are currently importing; + // it must be the first package we see + if (path == "") != (len(p.pkgList) == 0) { + panic(fmt.Sprintf("package path %q for pkg index %d", path, len(p.pkgList))) + } + + var fullName string + if path != "" { + fullName = "!" + path + "!" + name + p.pfc.add_package_to_scope(fullName, path) + } else { + fullName = "!" + p.pfc.name + "!" + name + } + + // if the package was imported before, use that one; otherwise create a new one + p.pkgList = append(p.pkgList, fullName) + return p.pkgList[len(p.pkgList)-1] +} + +func (p *gc_bin_parser) obj(tag int) { + switch tag { + case constTag: + p.pos() + pkg, name := p.qualifiedName() + typ := p.typ("") + p.skipValue() // ignore const value, gocode's not interested + p.callback(pkg, &ast.GenDecl{ + Tok: token.CONST, + Specs: []ast.Spec{ + &ast.ValueSpec{ + Names: []*ast.Ident{ast.NewIdent(name)}, + Type: typ, + Values: []ast.Expr{&ast.BasicLit{Kind: token.INT, Value: "0"}}, + }, + }, + }) + + case aliasTag: + // TODO(gri) verify type alias hookup is correct + p.pos() + pkg, name := p.qualifiedName() + typ := p.typ("") + p.callback(pkg, &ast.GenDecl{ + Tok: token.TYPE, + Specs: []ast.Spec{typeAliasSpec(name, typ)}, + }) + + case typeTag: + _ = p.typ("") + + case varTag: + p.pos() + pkg, name := p.qualifiedName() + typ := p.typ("") + p.callback(pkg, &ast.GenDecl{ + Tok: token.VAR, + Specs: []ast.Spec{ + &ast.ValueSpec{ + Names: []*ast.Ident{ast.NewIdent(name)}, + Type: typ, + }, + }, + }) + + case funcTag: + p.pos() + pkg, name := p.qualifiedName() + params := p.paramList() + results := p.paramList() + p.callback(pkg, &ast.FuncDecl{ + Name: ast.NewIdent(name), + Type: &ast.FuncType{Params: params, Results: results}, + }) + + default: + panic(fmt.Sprintf("unexpected object tag %d", tag)) + } +} + +const deltaNewFile = -64 // see cmd/compile/internal/gc/bexport.go + +func (p *gc_bin_parser) pos() { + if !p.posInfoFormat { + return + } + + file := p.prevFile + line := p.prevLine + delta := p.int() + line += delta + if p.version >= 5 { + if delta == deltaNewFile { + if n := p.int(); n >= 0 { + // file changed + file = p.path() + line = n + } + } + } else { + if delta == 0 { + if n := p.int(); n >= 0 { + // file changed + file = p.prevFile[:n] + p.string() + line = p.int() + } + } + } + p.prevFile = file + p.prevLine = line + + // TODO(gri) register new position +} + +func (p *gc_bin_parser) qualifiedName() (pkg string, name string) { + name = p.string() + pkg = p.pkg() + return pkg, name +} + +func (p *gc_bin_parser) reserveMaybe() int { + if p.trackAllTypes { + p.typList = append(p.typList, nil) + return len(p.typList) - 1 + } else { + return -1 + } +} + +func (p *gc_bin_parser) recordMaybe(idx int, t ast.Expr) ast.Expr { + if idx == -1 { + return t + } + p.typList[idx] = t + return t +} + +func (p *gc_bin_parser) record(t ast.Expr) { + p.typList = append(p.typList, t) +} + +// parent is the package which declared the type; parent == nil means +// the package currently imported. The parent package is needed for +// exported struct fields and interface methods which don't contain +// explicit package information in the export data. +func (p *gc_bin_parser) typ(parent string) ast.Expr { + // if the type was seen before, i is its index (>= 0) + i := p.tagOrIndex() + if i >= 0 { + return p.typList[i] + } + + // otherwise, i is the type tag (< 0) + switch i { + case namedTag: + // read type object + p.pos() + parent, name := p.qualifiedName() + tdecl := &ast.GenDecl{ + Tok: token.TYPE, + Specs: []ast.Spec{ + &ast.TypeSpec{ + Name: ast.NewIdent(name), + }, + }, + } + + // record it right away (underlying type can contain refs to t) + t := &ast.SelectorExpr{X: ast.NewIdent(parent), Sel: ast.NewIdent(name)} + p.record(t) + + // parse underlying type + t0 := p.typ(parent) + tdecl.Specs[0].(*ast.TypeSpec).Type = t0 + + p.callback(parent, tdecl) + + // interfaces have no methods + if _, ok := t0.(*ast.InterfaceType); ok { + return t + } + + // read associated methods + for i := p.int(); i > 0; i-- { + // TODO(gri) replace this with something closer to fieldName + p.pos() + name := p.string() + if !exported(name) { + p.pkg() + } + + recv := p.paramList() + params := p.paramList() + results := p.paramList() + p.int() // go:nointerface pragma - discarded + + strip_method_receiver(recv) + p.callback(parent, &ast.FuncDecl{ + Recv: recv, + Name: ast.NewIdent(name), + Type: &ast.FuncType{Params: params, Results: results}, + }) + } + return t + case arrayTag: + i := p.reserveMaybe() + n := p.int64() + elt := p.typ(parent) + return p.recordMaybe(i, &ast.ArrayType{ + Len: &ast.BasicLit{Kind: token.INT, Value: fmt.Sprint(n)}, + Elt: elt, + }) + + case sliceTag: + i := p.reserveMaybe() + elt := p.typ(parent) + return p.recordMaybe(i, &ast.ArrayType{Len: nil, Elt: elt}) + + case dddTag: + i := p.reserveMaybe() + elt := p.typ(parent) + return p.recordMaybe(i, &ast.Ellipsis{Elt: elt}) + + case structTag: + i := p.reserveMaybe() + return p.recordMaybe(i, p.structType(parent)) + + case pointerTag: + i := p.reserveMaybe() + elt := p.typ(parent) + return p.recordMaybe(i, &ast.StarExpr{X: elt}) + + case signatureTag: + i := p.reserveMaybe() + params := p.paramList() + results := p.paramList() + return p.recordMaybe(i, &ast.FuncType{Params: params, Results: results}) + + case interfaceTag: + i := p.reserveMaybe() + var embeddeds []*ast.SelectorExpr + for n := p.int(); n > 0; n-- { + p.pos() + if named, ok := p.typ(parent).(*ast.SelectorExpr); ok { + embeddeds = append(embeddeds, named) + } + } + methods := p.methodList(parent) + for _, field := range embeddeds { + methods = append(methods, &ast.Field{Type: field}) + } + return p.recordMaybe(i, &ast.InterfaceType{Methods: &ast.FieldList{List: methods}}) + + case mapTag: + i := p.reserveMaybe() + key := p.typ(parent) + val := p.typ(parent) + return p.recordMaybe(i, &ast.MapType{Key: key, Value: val}) + + case chanTag: + i := p.reserveMaybe() + dir := ast.SEND | ast.RECV + switch d := p.int(); d { + case 1: + dir = ast.RECV + case 2: + dir = ast.SEND + case 3: + // already set + default: + panic(fmt.Sprintf("unexpected channel dir %d", d)) + } + elt := p.typ(parent) + return p.recordMaybe(i, &ast.ChanType{Dir: dir, Value: elt}) + + default: + panic(fmt.Sprintf("unexpected type tag %d", i)) + } +} + +func (p *gc_bin_parser) structType(parent string) *ast.StructType { + var fields []*ast.Field + if n := p.int(); n > 0 { + fields = make([]*ast.Field, n) + for i := range fields { + fields[i], _ = p.field(parent) // (*ast.Field, tag), not interested in tags + } + } + return &ast.StructType{Fields: &ast.FieldList{List: fields}} +} + +func (p *gc_bin_parser) field(parent string) (*ast.Field, string) { + p.pos() + _, name, _ := p.fieldName(parent) + typ := p.typ(parent) + tag := p.string() + + var names []*ast.Ident + if name != "" { + names = []*ast.Ident{ast.NewIdent(name)} + } + return &ast.Field{ + Names: names, + Type: typ, + }, tag +} + +func (p *gc_bin_parser) methodList(parent string) (methods []*ast.Field) { + if n := p.int(); n > 0 { + methods = make([]*ast.Field, n) + for i := range methods { + methods[i] = p.method(parent) + } + } + return +} + +func (p *gc_bin_parser) method(parent string) *ast.Field { + p.pos() + _, name, _ := p.fieldName(parent) + params := p.paramList() + results := p.paramList() + return &ast.Field{ + Names: []*ast.Ident{ast.NewIdent(name)}, + Type: &ast.FuncType{Params: params, Results: results}, + } +} + +func (p *gc_bin_parser) fieldName(parent string) (string, string, bool) { + name := p.string() + pkg := parent + if p.version == 0 && name == "_" { + // version 0 didn't export a package for _ fields + return pkg, name, false + } + var alias bool + switch name { + case "": + // 1) field name matches base type name and is exported: nothing to do + case "?": + // 2) field name matches base type name and is not exported: need package + name = "" + pkg = p.pkg() + case "@": + // 3) field name doesn't match type name (alias) + name = p.string() + alias = true + fallthrough + default: + if !exported(name) { + pkg = p.pkg() + } + } + return pkg, name, alias +} + +func (p *gc_bin_parser) paramList() *ast.FieldList { + n := p.int() + if n == 0 { + return nil + } + // negative length indicates unnamed parameters + named := true + if n < 0 { + n = -n + named = false + } + // n > 0 + flds := make([]*ast.Field, n) + for i := range flds { + flds[i] = p.param(named) + } + return &ast.FieldList{List: flds} +} + +func (p *gc_bin_parser) param(named bool) *ast.Field { + t := p.typ("") + + name := "?" + if named { + name = p.string() + if name == "" { + panic("expected named parameter") + } + if name != "_" { + p.pkg() + } + if i := strings.Index(name, "·"); i > 0 { + name = name[:i] // cut off gc-specific parameter numbering + } + } + + // read and discard compiler-specific info + p.string() + + return &ast.Field{ + Names: []*ast.Ident{ast.NewIdent(name)}, + Type: t, + } +} + +func exported(name string) bool { + ch, _ := utf8.DecodeRuneInString(name) + return unicode.IsUpper(ch) +} + +func (p *gc_bin_parser) skipValue() { + switch tag := p.tagOrIndex(); tag { + case falseTag, trueTag: + case int64Tag: + p.int64() + case floatTag: + p.float() + case complexTag: + p.float() + p.float() + case stringTag: + p.string() + default: + panic(fmt.Sprintf("unexpected value tag %d", tag)) + } +} + +func (p *gc_bin_parser) float() { + sign := p.int() + if sign == 0 { + return + } + + p.int() // exp + p.string() // mant +} + +// ---------------------------------------------------------------------------- +// Low-level decoders + +func (p *gc_bin_parser) tagOrIndex() int { + if p.debugFormat { + p.marker('t') + } + + return int(p.rawInt64()) +} + +func (p *gc_bin_parser) int() int { + x := p.int64() + if int64(int(x)) != x { + panic("exported integer too large") + } + return int(x) +} + +func (p *gc_bin_parser) int64() int64 { + if p.debugFormat { + p.marker('i') + } + + return p.rawInt64() +} + +func (p *gc_bin_parser) path() string { + if p.debugFormat { + p.marker('p') + } + // if the path was seen before, i is its index (>= 0) + // (the empty string is at index 0) + i := p.rawInt64() + if i >= 0 { + return p.pathList[i] + } + // otherwise, i is the negative path length (< 0) + a := make([]string, -i) + for n := range a { + a[n] = p.string() + } + s := strings.Join(a, "/") + p.pathList = append(p.pathList, s) + return s +} + +func (p *gc_bin_parser) string() string { + if p.debugFormat { + p.marker('s') + } + // if the string was seen before, i is its index (>= 0) + // (the empty string is at index 0) + i := p.rawInt64() + if i >= 0 { + return p.strList[i] + } + // otherwise, i is the negative string length (< 0) + if n := int(-i); n <= cap(p.buf) { + p.buf = p.buf[:n] + } else { + p.buf = make([]byte, n) + } + for i := range p.buf { + p.buf[i] = p.rawByte() + } + s := string(p.buf) + p.strList = append(p.strList, s) + return s +} + +func (p *gc_bin_parser) marker(want byte) { + if got := p.rawByte(); got != want { + panic(fmt.Sprintf("incorrect marker: got %c; want %c (pos = %d)", got, want, p.read)) + } + + pos := p.read + if n := int(p.rawInt64()); n != pos { + panic(fmt.Sprintf("incorrect position: got %d; want %d", n, pos)) + } +} + +// rawInt64 should only be used by low-level decoders. +func (p *gc_bin_parser) rawInt64() int64 { + i, err := binary.ReadVarint(p) + if err != nil { + panic(fmt.Sprintf("read error: %v", err)) + } + return i +} + +// rawStringln should only be used to read the initial version string. +func (p *gc_bin_parser) rawStringln(b byte) string { + p.buf = p.buf[:0] + for b != '\n' { + p.buf = append(p.buf, b) + b = p.rawByte() + } + return string(p.buf) +} + +// needed for binary.ReadVarint in rawInt64 +func (p *gc_bin_parser) ReadByte() (byte, error) { + return p.rawByte(), nil +} + +// byte is the bottleneck interface for reading p.data. +// It unescapes '|' 'S' to '$' and '|' '|' to '|'. +// rawByte should only be used by low-level decoders. +func (p *gc_bin_parser) rawByte() byte { + b := p.data[0] + r := 1 + if b == '|' { + b = p.data[1] + r = 2 + switch b { + case 'S': + b = '$' + case '|': + // nothing to do + default: + panic("unexpected escape sequence in export data") + } + } + p.data = p.data[r:] + p.read += r + return b + +} + +// ---------------------------------------------------------------------------- +// Export format + +// Tags. Must be < 0. +const ( + // Objects + packageTag = -(iota + 1) + constTag + typeTag + varTag + funcTag + endTag + + // Types + namedTag + arrayTag + sliceTag + dddTag + structTag + pointerTag + signatureTag + interfaceTag + mapTag + chanTag + + // Values + falseTag + trueTag + int64Tag + floatTag + fractionTag // not used by gc + complexTag + stringTag + nilTag // only used by gc (appears in exported inlined function bodies) + unknownTag // not used by gc (only appears in packages with errors) + + // Type aliases + aliasTag +) + +var predeclared = []ast.Expr{ + // basic types + ast.NewIdent("bool"), + ast.NewIdent("int"), + ast.NewIdent("int8"), + ast.NewIdent("int16"), + ast.NewIdent("int32"), + ast.NewIdent("int64"), + ast.NewIdent("uint"), + ast.NewIdent("uint8"), + ast.NewIdent("uint16"), + ast.NewIdent("uint32"), + ast.NewIdent("uint64"), + ast.NewIdent("uintptr"), + ast.NewIdent("float32"), + ast.NewIdent("float64"), + ast.NewIdent("complex64"), + ast.NewIdent("complex128"), + ast.NewIdent("string"), + + // basic type aliases + ast.NewIdent("byte"), + ast.NewIdent("rune"), + + // error + ast.NewIdent("error"), + + // TODO(nsf): don't think those are used in just package type info, + // maybe for consts, but we are not interested in that + // untyped types + ast.NewIdent(">_<"), // TODO: types.Typ[types.UntypedBool], + ast.NewIdent(">_<"), // TODO: types.Typ[types.UntypedInt], + ast.NewIdent(">_<"), // TODO: types.Typ[types.UntypedRune], + ast.NewIdent(">_<"), // TODO: types.Typ[types.UntypedFloat], + ast.NewIdent(">_<"), // TODO: types.Typ[types.UntypedComplex], + ast.NewIdent(">_<"), // TODO: types.Typ[types.UntypedString], + ast.NewIdent(">_<"), // TODO: types.Typ[types.UntypedNil], + + // package unsafe + &ast.SelectorExpr{X: ast.NewIdent("unsafe"), Sel: ast.NewIdent("Pointer")}, + + // invalid type + ast.NewIdent(">_<"), // TODO: types.Typ[types.Invalid], // only appears in packages with errors + + // used internally by gc; never used by this package or in .a files + ast.NewIdent("any"), +} diff --git a/langserver/internal/gocode/package_text.go b/langserver/internal/gocode/package_text.go new file mode 100644 index 00000000..9d4b5629 --- /dev/null +++ b/langserver/internal/gocode/package_text.go @@ -0,0 +1,678 @@ +package gocode + +import ( + "bytes" + "errors" + "fmt" + "go/ast" + "go/token" + "strconv" + "text/scanner" +) + +//------------------------------------------------------------------------- +// gc_parser +// +// The following part of the code may contain portions of the code from the Go +// standard library, which tells me to retain their copyright notice: +// +// Copyright (c) 2009 The Go Authors. All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +//------------------------------------------------------------------------- + +type gc_parser struct { + scanner scanner.Scanner + tok rune + lit string + path_to_name map[string]string + beautify bool + pfc *package_file_cache +} + +func (p *gc_parser) init(data []byte, pfc *package_file_cache) { + p.scanner.Init(bytes.NewReader(data)) + p.scanner.Error = func(_ *scanner.Scanner, msg string) { p.error(msg) } + p.scanner.Mode = scanner.ScanIdents | scanner.ScanInts | scanner.ScanStrings | + scanner.ScanComments | scanner.ScanChars | scanner.SkipComments + p.scanner.Whitespace = 1<<'\t' | 1<<' ' | 1<<'\r' | 1<<'\v' | 1<<'\f' + p.scanner.Filename = "package.go" + p.next() + // and the built-in "unsafe" package to the path_to_name map + p.path_to_name = map[string]string{"unsafe": "unsafe"} + p.pfc = pfc +} + +func (p *gc_parser) next() { + p.tok = p.scanner.Scan() + switch p.tok { + case scanner.Ident, scanner.Int, scanner.String: + p.lit = p.scanner.TokenText() + default: + p.lit = "" + } +} + +func (p *gc_parser) error(msg string) { + panic(errors.New(msg)) +} + +func (p *gc_parser) errorf(format string, args ...interface{}) { + p.error(fmt.Sprintf(format, args...)) +} + +func (p *gc_parser) expect(tok rune) string { + lit := p.lit + if p.tok != tok { + p.errorf("expected %s, got %s (%q)", scanner.TokenString(tok), + scanner.TokenString(p.tok), lit) + } + p.next() + return lit +} + +func (p *gc_parser) expect_keyword(keyword string) { + lit := p.expect(scanner.Ident) + if lit != keyword { + p.errorf("expected keyword: %s, got: %q", keyword, lit) + } +} + +func (p *gc_parser) expect_special(what string) { + i := 0 + for i < len(what) { + if p.tok != rune(what[i]) { + break + } + + nc := p.scanner.Peek() + if i != len(what)-1 && nc <= ' ' { + break + } + + p.next() + i++ + } + + if i < len(what) { + p.errorf("expected: %q, got: %q", what, what[0:i]) + } +} + +// dotIdentifier = "?" | ( ident | '·' ) { ident | int | '·' } . +// we're doing lexer job here, kind of +func (p *gc_parser) parse_dot_ident() string { + if p.tok == '?' { + p.next() + return "?" + } + + ident := "" + sep := 'x' + i, j := 0, -1 + for (p.tok == scanner.Ident || p.tok == scanner.Int || p.tok == '·') && sep > ' ' { + ident += p.lit + if p.tok == '·' { + ident += "·" + j = i + i++ + } + i += len(p.lit) + sep = p.scanner.Peek() + p.next() + } + // middot = \xc2\xb7 + if j != -1 && i > j+1 { + c := ident[j+2] + if c >= '0' && c <= '9' { + ident = ident[0:j] + } + } + return ident +} + +// ImportPath = string_lit . +// quoted name of the path, but we return it as an identifier, taking an alias +// from 'pathToAlias' map, it is filled by import statements +func (p *gc_parser) parse_package() *ast.Ident { + path, err := strconv.Unquote(p.expect(scanner.String)) + if err != nil { + panic(err) + } + + return ast.NewIdent(path) +} + +// ExportedName = "@" ImportPath "." dotIdentifier . +func (p *gc_parser) parse_exported_name() *ast.SelectorExpr { + p.expect('@') + pkg := p.parse_package() + if pkg.Name == "" { + pkg.Name = "!" + p.pfc.name + "!" + p.pfc.defalias + } else { + pkg.Name = p.path_to_name[pkg.Name] + } + p.expect('.') + name := ast.NewIdent(p.parse_dot_ident()) + return &ast.SelectorExpr{X: pkg, Sel: name} +} + +// Name = identifier | "?" | ExportedName . +func (p *gc_parser) parse_name() (string, ast.Expr) { + switch p.tok { + case scanner.Ident: + name := p.lit + p.next() + return name, ast.NewIdent(name) + case '?': + p.next() + return "?", ast.NewIdent("?") + case '@': + en := p.parse_exported_name() + return en.Sel.Name, en + } + p.error("name expected") + return "", nil +} + +// Field = Name Type [ string_lit ] . +func (p *gc_parser) parse_field() *ast.Field { + var tag string + name, _ := p.parse_name() + typ := p.parse_type() + if p.tok == scanner.String { + tag = p.expect(scanner.String) + } + + var names []*ast.Ident + if name != "?" { + names = []*ast.Ident{ast.NewIdent(name)} + } + + return &ast.Field{ + Names: names, + Type: typ, + Tag: &ast.BasicLit{Kind: token.STRING, Value: tag}, + } +} + +// Parameter = ( identifier | "?" ) [ "..." ] Type [ string_lit ] . +func (p *gc_parser) parse_parameter() *ast.Field { + // name + name, _ := p.parse_name() + + // type + var typ ast.Expr + if p.tok == '.' { + p.expect_special("...") + typ = &ast.Ellipsis{Elt: p.parse_type()} + } else { + typ = p.parse_type() + } + + var tag string + if p.tok == scanner.String { + tag = p.expect(scanner.String) + } + + return &ast.Field{ + Names: []*ast.Ident{ast.NewIdent(name)}, + Type: typ, + Tag: &ast.BasicLit{Kind: token.STRING, Value: tag}, + } +} + +// Parameters = "(" [ ParameterList ] ")" . +// ParameterList = { Parameter "," } Parameter . +func (p *gc_parser) parse_parameters() *ast.FieldList { + flds := []*ast.Field{} + parse_parameter := func() { + par := p.parse_parameter() + flds = append(flds, par) + } + + p.expect('(') + if p.tok != ')' { + parse_parameter() + for p.tok == ',' { + p.next() + parse_parameter() + } + } + p.expect(')') + return &ast.FieldList{List: flds} +} + +// Signature = Parameters [ Result ] . +// Result = Type | Parameters . +func (p *gc_parser) parse_signature() *ast.FuncType { + var params *ast.FieldList + var results *ast.FieldList + + params = p.parse_parameters() + switch p.tok { + case scanner.Ident, '[', '*', '<', '@': + fld := &ast.Field{Type: p.parse_type()} + results = &ast.FieldList{List: []*ast.Field{fld}} + case '(': + results = p.parse_parameters() + } + return &ast.FuncType{Params: params, Results: results} +} + +// MethodOrEmbedSpec = Name [ Signature ] . +func (p *gc_parser) parse_method_or_embed_spec() *ast.Field { + name, nameexpr := p.parse_name() + if p.tok == '(' { + typ := p.parse_signature() + return &ast.Field{ + Names: []*ast.Ident{ast.NewIdent(name)}, + Type: typ, + } + } + + return &ast.Field{ + Type: nameexpr, + } +} + +// int_lit = [ "-" | "+" ] { "0" ... "9" } . +func (p *gc_parser) parse_int() { + switch p.tok { + case '-', '+': + p.next() + } + p.expect(scanner.Int) +} + +// number = int_lit [ "p" int_lit ] . +func (p *gc_parser) parse_number() { + p.parse_int() + if p.lit == "p" { + p.next() + p.parse_int() + } +} + +//------------------------------------------------------------------------------- +// gc_parser.types +//------------------------------------------------------------------------------- + +// InterfaceType = "interface" "{" [ MethodOrEmbedList ] "}" . +// MethodOrEmbedList = MethodOrEmbedSpec { ";" MethodOrEmbedSpec } . +func (p *gc_parser) parse_interface_type() ast.Expr { + var methods []*ast.Field + parse_method := func() { + meth := p.parse_method_or_embed_spec() + methods = append(methods, meth) + } + + p.expect_keyword("interface") + p.expect('{') + if p.tok != '}' { + parse_method() + for p.tok == ';' { + p.next() + parse_method() + } + } + p.expect('}') + return &ast.InterfaceType{Methods: &ast.FieldList{List: methods}} +} + +// StructType = "struct" "{" [ FieldList ] "}" . +// FieldList = Field { ";" Field } . +func (p *gc_parser) parse_struct_type() ast.Expr { + var fields []*ast.Field + parse_field := func() { + fld := p.parse_field() + fields = append(fields, fld) + } + + p.expect_keyword("struct") + p.expect('{') + if p.tok != '}' { + parse_field() + for p.tok == ';' { + p.next() + parse_field() + } + } + p.expect('}') + return &ast.StructType{Fields: &ast.FieldList{List: fields}} +} + +// MapType = "map" "[" Type "]" Type . +func (p *gc_parser) parse_map_type() ast.Expr { + p.expect_keyword("map") + p.expect('[') + key := p.parse_type() + p.expect(']') + elt := p.parse_type() + return &ast.MapType{Key: key, Value: elt} +} + +// ChanType = ( "chan" [ "<-" ] | "<-" "chan" ) Type . +func (p *gc_parser) parse_chan_type() ast.Expr { + dir := ast.SEND | ast.RECV + if p.tok == scanner.Ident { + p.expect_keyword("chan") + if p.tok == '<' { + p.expect_special("<-") + dir = ast.SEND + } + } else { + p.expect_special("<-") + p.expect_keyword("chan") + dir = ast.RECV + } + + elt := p.parse_type() + return &ast.ChanType{Dir: dir, Value: elt} +} + +// ArrayOrSliceType = ArrayType | SliceType . +// ArrayType = "[" int_lit "]" Type . +// SliceType = "[" "]" Type . +func (p *gc_parser) parse_array_or_slice_type() ast.Expr { + p.expect('[') + if p.tok == ']' { + // SliceType + p.next() // skip ']' + return &ast.ArrayType{Len: nil, Elt: p.parse_type()} + } + + // ArrayType + lit := p.expect(scanner.Int) + p.expect(']') + return &ast.ArrayType{ + Len: &ast.BasicLit{Kind: token.INT, Value: lit}, + Elt: p.parse_type(), + } +} + +// Type = +// BasicType | TypeName | ArrayType | SliceType | StructType | +// PointerType | FuncType | InterfaceType | MapType | ChanType | +// "(" Type ")" . +// BasicType = ident . +// TypeName = ExportedName . +// SliceType = "[" "]" Type . +// PointerType = "*" Type . +// FuncType = "func" Signature . +func (p *gc_parser) parse_type() ast.Expr { + switch p.tok { + case scanner.Ident: + switch p.lit { + case "struct": + return p.parse_struct_type() + case "func": + p.next() + return p.parse_signature() + case "interface": + return p.parse_interface_type() + case "map": + return p.parse_map_type() + case "chan": + return p.parse_chan_type() + default: + lit := p.lit + p.next() + return ast.NewIdent(lit) + } + case '@': + return p.parse_exported_name() + case '[': + return p.parse_array_or_slice_type() + case '*': + p.next() + return &ast.StarExpr{X: p.parse_type()} + case '<': + return p.parse_chan_type() + case '(': + p.next() + typ := p.parse_type() + p.expect(')') + return typ + } + p.errorf("unexpected token: %s", scanner.TokenString(p.tok)) + return nil +} + +//------------------------------------------------------------------------------- +// gc_parser.declarations +//------------------------------------------------------------------------------- + +// ImportDecl = "import" identifier string_lit . +func (p *gc_parser) parse_import_decl() { + p.expect_keyword("import") + alias := p.expect(scanner.Ident) + path := p.parse_package() + fullName := "!" + path.Name + "!" + alias + p.path_to_name[path.Name] = fullName + p.pfc.add_package_to_scope(fullName, path.Name) +} + +// ConstDecl = "const" ExportedName [ Type ] "=" Literal . +// Literal = bool_lit | int_lit | float_lit | complex_lit | string_lit . +// bool_lit = "true" | "false" . +// complex_lit = "(" float_lit "+" float_lit ")" . +// rune_lit = "(" int_lit "+" int_lit ")" . +// string_lit = `"` { unicode_char } `"` . +func (p *gc_parser) parse_const_decl() (string, *ast.GenDecl) { + // TODO: do we really need actual const value? gocode doesn't use this + p.expect_keyword("const") + name := p.parse_exported_name() + + var typ ast.Expr + if p.tok != '=' { + typ = p.parse_type() + } + + p.expect('=') + + // skip the value + switch p.tok { + case scanner.Ident: + // must be bool, true or false + p.next() + case '-', '+', scanner.Int: + // number + p.parse_number() + case '(': + // complex_lit or rune_lit + p.next() // skip '(' + if p.tok == scanner.Char { + p.next() + } else { + p.parse_number() + } + p.expect('+') + p.parse_number() + p.expect(')') + case scanner.Char: + p.next() + case scanner.String: + p.next() + default: + p.error("expected literal") + } + + return name.X.(*ast.Ident).Name, &ast.GenDecl{ + Tok: token.CONST, + Specs: []ast.Spec{ + &ast.ValueSpec{ + Names: []*ast.Ident{name.Sel}, + Type: typ, + Values: []ast.Expr{&ast.BasicLit{Kind: token.INT, Value: "0"}}, + }, + }, + } +} + +// TypeDecl = "type" ExportedName Type . +func (p *gc_parser) parse_type_decl() (string, *ast.GenDecl) { + p.expect_keyword("type") + name := p.parse_exported_name() + typ := p.parse_type() + return name.X.(*ast.Ident).Name, &ast.GenDecl{ + Tok: token.TYPE, + Specs: []ast.Spec{ + &ast.TypeSpec{ + Name: name.Sel, + Type: typ, + }, + }, + } +} + +// VarDecl = "var" ExportedName Type . +func (p *gc_parser) parse_var_decl() (string, *ast.GenDecl) { + p.expect_keyword("var") + name := p.parse_exported_name() + typ := p.parse_type() + return name.X.(*ast.Ident).Name, &ast.GenDecl{ + Tok: token.VAR, + Specs: []ast.Spec{ + &ast.ValueSpec{ + Names: []*ast.Ident{name.Sel}, + Type: typ, + }, + }, + } +} + +// FuncBody = "{" ... "}" . +func (p *gc_parser) parse_func_body() { + p.expect('{') + for i := 1; i > 0; p.next() { + switch p.tok { + case '{': + i++ + case '}': + i-- + } + } +} + +// FuncDecl = "func" ExportedName Signature [ FuncBody ] . +func (p *gc_parser) parse_func_decl() (string, *ast.FuncDecl) { + // "func" was already consumed by lookahead + name := p.parse_exported_name() + typ := p.parse_signature() + if p.tok == '{' { + p.parse_func_body() + } + return name.X.(*ast.Ident).Name, &ast.FuncDecl{ + Name: name.Sel, + Type: typ, + } +} + +func strip_method_receiver(recv *ast.FieldList) string { + var sel *ast.SelectorExpr + + // find selector expression + typ := recv.List[0].Type + switch t := typ.(type) { + case *ast.StarExpr: + sel = t.X.(*ast.SelectorExpr) + case *ast.SelectorExpr: + sel = t + } + + // extract package path + pkg := sel.X.(*ast.Ident).Name + + // write back stripped type + switch t := typ.(type) { + case *ast.StarExpr: + t.X = sel.Sel + case *ast.SelectorExpr: + recv.List[0].Type = sel.Sel + } + + return pkg +} + +// MethodDecl = "func" Receiver Name Signature . +// Receiver = "(" ( identifier | "?" ) [ "*" ] ExportedName ")" [ FuncBody ] . +func (p *gc_parser) parse_method_decl() (string, *ast.FuncDecl) { + recv := p.parse_parameters() + pkg := strip_method_receiver(recv) + name, _ := p.parse_name() + typ := p.parse_signature() + if p.tok == '{' { + p.parse_func_body() + } + return pkg, &ast.FuncDecl{ + Recv: recv, + Name: ast.NewIdent(name), + Type: typ, + } +} + +// Decl = [ ImportDecl | ConstDecl | TypeDecl | VarDecl | FuncDecl | MethodDecl ] "\n" . +func (p *gc_parser) parse_decl() (pkg string, decl ast.Decl) { + switch p.lit { + case "import": + p.parse_import_decl() + case "const": + pkg, decl = p.parse_const_decl() + case "type": + pkg, decl = p.parse_type_decl() + case "var": + pkg, decl = p.parse_var_decl() + case "func": + p.next() + if p.tok == '(' { + pkg, decl = p.parse_method_decl() + } else { + pkg, decl = p.parse_func_decl() + } + } + p.expect('\n') + return +} + +// Export = PackageClause { Decl } "$$" . +// PackageClause = "package" identifier [ "safe" ] "\n" . +func (p *gc_parser) parse_export(callback func(string, ast.Decl)) { + p.expect_keyword("package") + p.pfc.defalias = p.expect(scanner.Ident) + if p.tok != '\n' { + p.expect_keyword("safe") + } + p.expect('\n') + + for p.tok != '$' && p.tok != scanner.EOF { + pkg, decl := p.parse_decl() + if decl != nil { + callback(pkg, decl) + } + } +} diff --git a/langserver/internal/gocode/pre_go17.go b/langserver/internal/gocode/pre_go17.go new file mode 100644 index 00000000..d961a0c5 --- /dev/null +++ b/langserver/internal/gocode/pre_go17.go @@ -0,0 +1,7 @@ +// +build !go1.7,!go1.8 + +package gocode + +func init() { + knownPackageIdents["context"] = "golang.org/x/net/context" +} diff --git a/langserver/internal/gocode/ripper.go b/langserver/internal/gocode/ripper.go new file mode 100644 index 00000000..05310572 --- /dev/null +++ b/langserver/internal/gocode/ripper.go @@ -0,0 +1,141 @@ +package gocode + +import ( + "go/scanner" + "go/token" +) + +// All the code in this file serves single purpose: +// It separates a function with the cursor inside and the rest of the code. I'm +// doing that, because sometimes parser is not able to recover itself from an +// error and the autocompletion results become less complete. + +type tok_pos_pair struct { + tok token.Token + pos token.Pos +} + +type tok_collection struct { + tokens []tok_pos_pair + fset *token.FileSet +} + +func (this *tok_collection) next(s *scanner.Scanner) bool { + pos, tok, _ := s.Scan() + if tok == token.EOF { + return false + } + + this.tokens = append(this.tokens, tok_pos_pair{tok, pos}) + return true +} + +func (this *tok_collection) find_decl_beg(pos int) int { + lowest := 0 + lowpos := -1 + lowi := -1 + cur := 0 + for i := pos; i >= 0; i-- { + t := this.tokens[i] + switch t.tok { + case token.RBRACE: + cur++ + case token.LBRACE: + cur-- + } + + if cur < lowest { + lowest = cur + lowpos = this.fset.Position(t.pos).Offset + lowi = i + } + } + + cur = lowest + for i := lowi - 1; i >= 0; i-- { + t := this.tokens[i] + switch t.tok { + case token.RBRACE: + cur++ + case token.LBRACE: + cur-- + } + if t.tok == token.SEMICOLON && cur == lowest { + lowpos = this.fset.Position(t.pos).Offset + break + } + } + + return lowpos +} + +func (this *tok_collection) find_decl_end(pos int) int { + highest := 0 + highpos := -1 + cur := 0 + + if this.tokens[pos].tok == token.LBRACE { + pos++ + } + + for i := pos; i < len(this.tokens); i++ { + t := this.tokens[i] + switch t.tok { + case token.RBRACE: + cur++ + case token.LBRACE: + cur-- + } + + if cur > highest { + highest = cur + highpos = this.fset.Position(t.pos).Offset + } + } + + return highpos +} + +func (this *tok_collection) find_outermost_scope(cursor int) (int, int) { + pos := 0 + + for i, t := range this.tokens { + if cursor <= this.fset.Position(t.pos).Offset { + break + } + pos = i + } + + return this.find_decl_beg(pos), this.find_decl_end(pos) +} + +// return new cursor position, file without ripped part and the ripped part itself +// variants: +// new-cursor, file-without-ripped-part, ripped-part +// old-cursor, file, nil +func (this *tok_collection) rip_off_decl(file []byte, cursor int) (int, []byte, []byte) { + this.fset = token.NewFileSet() + var s scanner.Scanner + s.Init(this.fset.AddFile("", this.fset.Base(), len(file)), file, nil, scanner.ScanComments) + for this.next(&s) { + } + + beg, end := this.find_outermost_scope(cursor) + if beg == -1 || end == -1 { + return cursor, file, nil + } + + ripped := make([]byte, end+1-beg) + copy(ripped, file[beg:end+1]) + + newfile := make([]byte, len(file)-len(ripped)) + copy(newfile, file[:beg]) + copy(newfile[beg:], file[end+1:]) + + return cursor - beg, newfile, ripped +} + +func rip_off_decl(file []byte, cursor int) (int, []byte, []byte) { + var tc tok_collection + return tc.rip_off_decl(file, cursor) +} diff --git a/langserver/internal/gocode/scope.go b/langserver/internal/gocode/scope.go new file mode 100644 index 00000000..14527148 --- /dev/null +++ b/langserver/internal/gocode/scope.go @@ -0,0 +1,77 @@ +package gocode + +//------------------------------------------------------------------------- +// scope +//------------------------------------------------------------------------- + +type scope struct { + // the package name that this scope resides in + pkgname string + parent *scope // nil for universe scope + entities map[string]*decl +} + +func new_named_scope(outer *scope, name string) *scope { + s := new_scope(outer) + s.pkgname = name + return s +} + +func new_scope(outer *scope) *scope { + s := new(scope) + if outer != nil { + s.pkgname = outer.pkgname + } + s.parent = outer + s.entities = make(map[string]*decl) + return s +} + +// returns: new, prev +func advance_scope(s *scope) (*scope, *scope) { + if len(s.entities) == 0 { + return s, s.parent + } + return new_scope(s), s +} + +// adds declaration or returns an existing one +func (s *scope) add_named_decl(d *decl) *decl { + return s.add_decl(d.name, d) +} + +func (s *scope) add_decl(name string, d *decl) *decl { + decl, ok := s.entities[name] + if !ok { + s.entities[name] = d + return d + } + return decl +} + +func (s *scope) replace_decl(name string, d *decl) { + s.entities[name] = d +} + +func (s *scope) merge_decl(d *decl) { + decl, ok := s.entities[d.name] + if !ok { + s.entities[d.name] = d + } else { + decl := decl.deep_copy() + decl.expand_or_replace(d) + s.entities[d.name] = decl + } +} + +func (s *scope) lookup(name string) *decl { + decl, ok := s.entities[name] + if !ok { + if s.parent != nil { + return s.parent.lookup(name) + } else { + return nil + } + } + return decl +} diff --git a/langserver/internal/gocode/server.go b/langserver/internal/gocode/server.go new file mode 100644 index 00000000..7bc2a7ca --- /dev/null +++ b/langserver/internal/gocode/server.go @@ -0,0 +1,243 @@ +package gocode + +import ( + "bytes" + "fmt" + "go/build" + "log" + "net" + "net/rpc" + "os" + "path/filepath" + "reflect" + "runtime" + "time" +) + +func do_server() int { + g_config.read() + if g_config.ForceDebugOutput != "" { + // forcefully enable debugging and redirect logging into the + // specified file + *g_debug = true + f, err := os.Create(g_config.ForceDebugOutput) + if err != nil { + panic(err) + } + log.SetOutput(f) + } + + addr := *g_addr + if *g_sock == "unix" { + addr = get_socket_filename() + if file_exists(addr) { + log.Printf("unix socket: '%s' already exists\n", addr) + return 1 + } + } + g_daemon = new_daemon(*g_sock, addr) + if *g_sock == "unix" { + // cleanup unix socket file + defer os.Remove(addr) + } + + rpc.Register(new(RPC)) + + g_daemon.loop() + return 0 +} + +//------------------------------------------------------------------------- +// daemon +//------------------------------------------------------------------------- + +type daemon struct { + listener net.Listener + cmd_in chan int + autocomplete *auto_complete_context + pkgcache package_cache + declcache *decl_cache + context package_lookup_context +} + +func new_daemon(network, address string) *daemon { + var err error + + d := new(daemon) + d.listener, err = net.Listen(network, address) + if err != nil { + panic(err) + } + + d.cmd_in = make(chan int, 1) + d.pkgcache = new_package_cache() + d.declcache = new_decl_cache(&d.context) + d.autocomplete = new_auto_complete_context(d.pkgcache, d.declcache) + return d +} + +func (this *daemon) drop_cache() { + this.pkgcache = new_package_cache() + this.declcache = new_decl_cache(&this.context) + this.autocomplete = new_auto_complete_context(this.pkgcache, this.declcache) +} + +const ( + daemon_close = iota +) + +func (this *daemon) loop() { + conn_in := make(chan net.Conn) + go func() { + for { + c, err := this.listener.Accept() + if err != nil { + panic(err) + } + conn_in <- c + } + }() + + timeout := time.Duration(g_config.CloseTimeout) * time.Second + countdown := time.NewTimer(timeout) + + for { + // handle connections or server CMDs (currently one CMD) + select { + case c := <-conn_in: + rpc.ServeConn(c) + countdown.Reset(timeout) + runtime.GC() + case cmd := <-this.cmd_in: + switch cmd { + case daemon_close: + return + } + case <-countdown.C: + return + } + } +} + +func (this *daemon) close() { + this.cmd_in <- daemon_close +} + +var g_daemon *daemon + +//------------------------------------------------------------------------- +// server_* functions +// +// Corresponding client_* functions are autogenerated by goremote. +//------------------------------------------------------------------------- + +func server_auto_complete(file []byte, filename string, cursor int, context_packed go_build_context) (c []candidate, d int) { + context := unpack_build_context(&context_packed) + defer func() { + if err := recover(); err != nil { + print_backtrace(err) + c = []candidate{ + {"PANIC", "PANIC", decl_invalid, "panic"}, + } + + // drop cache + g_daemon.drop_cache() + } + }() + // TODO: Probably we don't care about comparing all the fields, checking GOROOT and GOPATH + // should be enough. + if !reflect.DeepEqual(g_daemon.context.Context, context.Context) { + g_daemon.context = context + g_daemon.drop_cache() + } + switch g_config.PackageLookupMode { + case "bzl": + // when package lookup mode is bzl, we set GOPATH to "" explicitly and + // BzlProjectRoot becomes valid (or empty) + var err error + g_daemon.context.GOPATH = "" + g_daemon.context.BzlProjectRoot, err = find_bzl_project_root(g_config.LibPath, filename) + if *g_debug && err != nil { + log.Printf("Bzl project root not found: %s", err) + } + case "gb": + // when package lookup mode is gb, we set GOPATH to "" explicitly and + // GBProjectRoot becomes valid (or empty) + var err error + g_daemon.context.GOPATH = "" + g_daemon.context.GBProjectRoot, err = find_gb_project_root(filename) + if *g_debug && err != nil { + log.Printf("Gb project root not found: %s", err) + } + case "go": + // get current package path for GO15VENDOREXPERIMENT hack + g_daemon.context.CurrentPackagePath = "" + pkg, err := g_daemon.context.ImportDir(filepath.Dir(filename), build.FindOnly) + if err == nil { + if *g_debug { + log.Printf("Go project path: %s", pkg.ImportPath) + } + g_daemon.context.CurrentPackagePath = pkg.ImportPath + } else if *g_debug { + log.Printf("Go project path not found: %s", err) + } + } + if *g_debug { + var buf bytes.Buffer + log.Printf("Got autocompletion request for '%s'\n", filename) + log.Printf("Cursor at: %d\n", cursor) + if cursor > len(file) || cursor < 0 { + log.Println("ERROR! Cursor is outside of the boundaries of the buffer, " + + "this is most likely a text editor plugin bug. Text editor is responsible " + + "for passing the correct cursor position to gocode.") + } else { + buf.WriteString("-------------------------------------------------------\n") + buf.Write(file[:cursor]) + buf.WriteString("#") + buf.Write(file[cursor:]) + log.Print(buf.String()) + log.Println("-------------------------------------------------------") + } + } + candidates, d := g_daemon.autocomplete.apropos(file, filename, cursor) + if *g_debug { + log.Printf("Offset: %d\n", d) + log.Printf("Number of candidates found: %d\n", len(candidates)) + log.Printf("Candidates are:\n") + for _, c := range candidates { + abbr := fmt.Sprintf("%s %s %s", c.Class, c.Name, c.Type) + if c.Class == decl_func { + abbr = fmt.Sprintf("%s %s%s", c.Class, c.Name, c.Type[len("func"):]) + } + log.Printf(" %s\n", abbr) + } + log.Println("=======================================================") + } + return candidates, d +} + +func server_close(notused int) int { + g_daemon.close() + return 0 +} + +func server_status(notused int) string { + return g_daemon.autocomplete.status() +} + +func server_drop_cache(notused int) int { + // drop cache + g_daemon.drop_cache() + return 0 +} + +func server_set(key, value string) string { + if key == "\x00" { + return g_config.list() + } else if value == "\x00" { + return g_config.list_option(key) + } + // drop cache on settings changes + g_daemon.drop_cache() + return g_config.set_option(key, value) +} diff --git a/langserver/internal/gocode/type_alias_build_hack_18.go b/langserver/internal/gocode/type_alias_build_hack_18.go new file mode 100644 index 00000000..945e6ba7 --- /dev/null +++ b/langserver/internal/gocode/type_alias_build_hack_18.go @@ -0,0 +1,18 @@ +// +build !go1.9,!go1.8.typealias + +package gocode + +import ( + "go/ast" +) + +func typeAliasSpec(name string, typ ast.Expr) *ast.TypeSpec { + return &ast.TypeSpec{ + Name: ast.NewIdent(name), + Type: typ, + } +} + +func isAliasTypeSpec(t *ast.TypeSpec) bool { + return false +} diff --git a/langserver/internal/gocode/type_alias_build_hack_19.go b/langserver/internal/gocode/type_alias_build_hack_19.go new file mode 100644 index 00000000..4fc034d2 --- /dev/null +++ b/langserver/internal/gocode/type_alias_build_hack_19.go @@ -0,0 +1,19 @@ +// +build go1.9 go1.8.typealias + +package gocode + +import ( + "go/ast" +) + +func typeAliasSpec(name string, typ ast.Expr) *ast.TypeSpec { + return &ast.TypeSpec{ + Name: ast.NewIdent(name), + Assign: 1, + Type: typ, + } +} + +func isAliasTypeSpec(t *ast.TypeSpec) bool { + return t.Assign != 0 +} diff --git a/langserver/internal/gocode/utils.go b/langserver/internal/gocode/utils.go new file mode 100644 index 00000000..afee81eb --- /dev/null +++ b/langserver/internal/gocode/utils.go @@ -0,0 +1,296 @@ +package gocode + +import ( + "bytes" + "fmt" + "go/build" + "io/ioutil" + "os" + "path/filepath" + "runtime" + "strings" + "sync" + "unicode/utf8" +) + +// our own readdir, which skips the files it cannot lstat +func readdir_lstat(name string) ([]os.FileInfo, error) { + f, err := os.Open(name) + if err != nil { + return nil, err + } + defer f.Close() + + names, err := f.Readdirnames(-1) + if err != nil { + return nil, err + } + + out := make([]os.FileInfo, 0, len(names)) + for _, lname := range names { + s, err := os.Lstat(filepath.Join(name, lname)) + if err != nil { + continue + } + out = append(out, s) + } + return out, nil +} + +// our other readdir function, only opens and reads +func readdir(dirname string) []os.FileInfo { + f, err := os.Open(dirname) + if err != nil { + return nil + } + fi, err := f.Readdir(-1) + f.Close() + if err != nil { + panic(err) + } + return fi +} + +// returns truncated 'data' and amount of bytes skipped (for cursor pos adjustment) +func filter_out_shebang(data []byte) ([]byte, int) { + if len(data) > 2 && data[0] == '#' && data[1] == '!' { + newline := bytes.Index(data, []byte("\n")) + if newline != -1 && len(data) > newline+1 { + return data[newline+1:], newline + 1 + } + } + return data, 0 +} + +func file_exists(filename string) bool { + _, err := os.Stat(filename) + if err != nil { + return false + } + return true +} + +func is_dir(path string) bool { + fi, err := os.Stat(path) + return err == nil && fi.IsDir() +} + +func char_to_byte_offset(s []byte, offset_c int) (offset_b int) { + for offset_b = 0; offset_c > 0 && offset_b < len(s); offset_b++ { + if utf8.RuneStart(s[offset_b]) { + offset_c-- + } + } + return offset_b +} + +func xdg_home_dir() string { + xdghome := os.Getenv("XDG_CONFIG_HOME") + if xdghome == "" { + xdghome = filepath.Join(os.Getenv("HOME"), ".config") + } + return xdghome +} + +func has_prefix(s, prefix string, ignorecase bool) bool { + if ignorecase { + s = strings.ToLower(s) + prefix = strings.ToLower(prefix) + } + return strings.HasPrefix(s, prefix) +} + +func find_bzl_project_root(libpath, path string) (string, error) { + if libpath == "" { + return "", fmt.Errorf("could not find project root, libpath is empty") + } + + pathMap := map[string]struct{}{} + for _, lp := range strings.Split(libpath, ":") { + lp := strings.TrimSpace(lp) + pathMap[filepath.Clean(lp)] = struct{}{} + } + + path = filepath.Dir(path) + if path == "" { + return "", fmt.Errorf("project root is blank") + } + + start := path + for path != "/" { + if _, ok := pathMap[filepath.Clean(path)]; ok { + return path, nil + } + path = filepath.Dir(path) + } + return "", fmt.Errorf("could not find project root in %q or its parents", start) +} + +// Code taken directly from `gb`, I hope author doesn't mind. +func find_gb_project_root(path string) (string, error) { + path = filepath.Dir(path) + if path == "" { + return "", fmt.Errorf("project root is blank") + } + start := path + for path != "/" { + root := filepath.Join(path, "src") + if _, err := os.Stat(root); err != nil { + if os.IsNotExist(err) { + path = filepath.Dir(path) + continue + } + return "", err + } + path, err := filepath.EvalSymlinks(path) + if err != nil { + return "", err + } + return path, nil + } + return "", fmt.Errorf("could not find project root in %q or its parents", start) +} + +// vendorlessImportPath returns the devendorized version of the provided import path. +// e.g. "foo/bar/vendor/a/b" => "a/b" +func vendorlessImportPath(ipath string, currentPackagePath string) (string, bool) { + split := strings.Split(ipath, "vendor/") + // no vendor in path + if len(split) == 1 { + return ipath, true + } + // this import path does not belong to the current package + if currentPackagePath != "" && !strings.Contains(currentPackagePath, split[0]) { + return "", false + } + // Devendorize for use in import statement. + if i := strings.LastIndex(ipath, "/vendor/"); i >= 0 { + return ipath[i+len("/vendor/"):], true + } + if strings.HasPrefix(ipath, "vendor/") { + return ipath[len("vendor/"):], true + } + return ipath, true +} + +//------------------------------------------------------------------------- +// print_backtrace +// +// a nicer backtrace printer than the default one +//------------------------------------------------------------------------- + +var g_backtrace_mutex sync.Mutex + +func print_backtrace(err interface{}) { + g_backtrace_mutex.Lock() + defer g_backtrace_mutex.Unlock() + fmt.Printf("panic: %v\n", err) + i := 2 + for { + pc, file, line, ok := runtime.Caller(i) + if !ok { + break + } + f := runtime.FuncForPC(pc) + fmt.Printf("%d(%s): %s:%d\n", i-1, f.Name(), file, line) + i++ + } + fmt.Println("") +} + +//------------------------------------------------------------------------- +// File reader goroutine +// +// It's a bad idea to block multiple goroutines on file I/O. Creates many +// threads which fight for HDD. Therefore only single goroutine should read HDD +// at the same time. +//------------------------------------------------------------------------- + +type file_read_request struct { + filename string + out chan file_read_response +} + +type file_read_response struct { + data []byte + error error +} + +type file_reader_type struct { + in chan file_read_request +} + +func new_file_reader() *file_reader_type { + this := new(file_reader_type) + this.in = make(chan file_read_request) + go func() { + var rsp file_read_response + for { + req := <-this.in + rsp.data, rsp.error = ioutil.ReadFile(req.filename) + req.out <- rsp + } + }() + return this +} + +func (this *file_reader_type) read_file(filename string) ([]byte, error) { + req := file_read_request{ + filename, + make(chan file_read_response), + } + this.in <- req + rsp := <-req.out + return rsp.data, rsp.error +} + +var file_reader = new_file_reader() + +//------------------------------------------------------------------------- +// copy of the build.Context without func fields +//------------------------------------------------------------------------- + +type go_build_context struct { + GOARCH string + GOOS string + GOROOT string + GOPATH string + CgoEnabled bool + UseAllFiles bool + Compiler string + BuildTags []string + ReleaseTags []string + InstallSuffix string +} + +func pack_build_context(ctx *build.Context) go_build_context { + return go_build_context{ + GOARCH: ctx.GOARCH, + GOOS: ctx.GOOS, + GOROOT: ctx.GOROOT, + GOPATH: ctx.GOPATH, + CgoEnabled: ctx.CgoEnabled, + UseAllFiles: ctx.UseAllFiles, + Compiler: ctx.Compiler, + BuildTags: ctx.BuildTags, + ReleaseTags: ctx.ReleaseTags, + InstallSuffix: ctx.InstallSuffix, + } +} + +func unpack_build_context(ctx *go_build_context) package_lookup_context { + return package_lookup_context{ + Context: build.Context{ + GOARCH: ctx.GOARCH, + GOOS: ctx.GOOS, + GOROOT: ctx.GOROOT, + GOPATH: ctx.GOPATH, + CgoEnabled: ctx.CgoEnabled, + UseAllFiles: ctx.UseAllFiles, + Compiler: ctx.Compiler, + BuildTags: ctx.BuildTags, + ReleaseTags: ctx.ReleaseTags, + InstallSuffix: ctx.InstallSuffix, + }, + } +} diff --git a/langserver/langserver_test.go b/langserver/langserver_test.go index f0a62b7c..ff30ef75 100644 --- a/langserver/langserver_test.go +++ b/langserver/langserver_test.go @@ -21,6 +21,7 @@ import ( "testing" "github.com/sourcegraph/ctxvfs" + "github.com/sourcegraph/go-langserver/langserver/internal/gocode" "github.com/sourcegraph/go-langserver/pkg/lsp" "github.com/sourcegraph/go-langserver/pkg/lspext" "github.com/sourcegraph/jsonrpc2" @@ -67,6 +68,10 @@ var serverTestCases = map[string]serverTestCase{ "b.go:1:17": "/src/test/pkg/b.go:1:17 id:test/pkg/-/B name:B package:test/pkg packageName:p recv: vendor:false", "b.go:1:23": "/src/test/pkg/a.go:1:17 id:test/pkg/-/A name:A package:test/pkg packageName:p recv: vendor:false", }, + wantCompletion: map[string]string{ + //"a.go:1:24": "1:23-1:24 A function func()", // returns empty list for unknown reason. Works if the two statements are in seperate lines + "b.go:1:24": "1:23-1:24 A function func()", + }, wantReferences: map[string][]string{ "a.go:1:17": []string{ "/src/test/pkg/a.go:1:17", @@ -200,6 +205,11 @@ var serverTestCases = map[string]serverTestCase{ "a_test.go:1:16": "var X int", "a_test.go:1:20": "var A int", }, + wantCompletion: map[string]string{ + "x_test.go:1:45": "1:44-1:45 panic function func(interface{}), print function func(...interface{}), println function func(...interface{}), p module ", + "x_test.go:1:46": "1:46-1:46 A variable int", + "b_test.go:1:35": "1:34-1:35 X variable int", + }, wantSymbols: map[string][]string{ "y_test.go": []string{"/src/test/pkg/y_test.go:function:Y:1:22"}, "b_test.go": []string{"/src/test/pkg/b_test.go:function:Y:1:17"}, @@ -301,6 +311,10 @@ var serverTestCases = map[string]serverTestCase{ "d2/b.go:1:47": "/src/test/pkg/d/a.go:1:17 id:test/pkg/d/-/A name:A package:test/pkg/d packageName:d recv: vendor:false", "d2/b.go:1:52": "/src/test/pkg/d/d2/b.go:1:39 id:test/pkg/d/d2/-/B name:B package:test/pkg/d/d2 packageName:d2 recv: vendor:false", }, + wantCompletion: map[string]string{ + "d2/b.go:1:47": "1:47-1:47 A function func()", + //"d2/b.go:1:52": "1:52-1:52 d module , B function func()", // B not presented, see test case "go simple" + }, wantSymbols: map[string][]string{ "a.go": []string{"/src/test/pkg/d/a.go:function:A:1:17"}, "d2/b.go": []string{"/src/test/pkg/d/d2/b.go:function:B:1:39"}, @@ -443,6 +457,11 @@ package main; import "test/pkg"; func B() { p.A(); B() }`, wantXDefinition: map[string]string{ "a.go:1:40": "/goroot/src/fmt/print.go:1:19 id:fmt/-/Println name:Println package:fmt packageName:fmt recv: vendor:false", }, + wantCompletion: map[string]string{ + // use default GOROOT, since gocode needs package binaries + "a.go:1:21": "1:20-1:21 flag module , fmt module ", + "a.go:1:44": "1:38-1:44 Println function func(a ...interface{}) (n int, err error)", + }, wantSymbols: map[string][]string{ "a.go": []string{ "/src/test/pkg/a.go:variable:_:1:26", @@ -486,6 +505,10 @@ package main; import "test/pkg"; func B() { p.A(); B() }`, "a/a.go:1:17": "/src/test/pkg/a/a.go:1:17 id:test/pkg/a/-/A name:A package:test/pkg/a packageName:a recv: vendor:false", "b/b.go:1:43": "/src/test/pkg/a/a.go:1:17 id:test/pkg/a/-/A name:A package:test/pkg/a packageName:a recv: vendor:false", }, + wantCompletion: map[string]string{ + "b/b.go:1:26": "1:20-1:26 test/pkg/a module , test/pkg/b module ", + "b/b.go:1:43": "1:43-1:43 A function func()", + }, wantReferences: map[string][]string{ "a/a.go:1:17": []string{ "/src/test/pkg/a/a.go:1:17", @@ -533,6 +556,10 @@ package main; import "test/pkg"; func B() { p.A(); B() }`, wantXDefinition: map[string]string{ "a.go:1:61": "/src/test/pkg/vendor/github.com/v/vendored/v.go:1:24 id:test/pkg/vendor/github.com/v/vendored/-/V name:V package:test/pkg/vendor/github.com/v/vendored packageName:vendored recv: vendor:true", }, + wantCompletion: map[string]string{ + "a.go:1:34": "1:20-1:34 github.com/v/vendored module ", + "a.go:1:61": "1:61-1:61 V function func()", + }, wantReferences: map[string][]string{ "vendor/github.com/v/vendored/v.go:1:24": []string{ "/src/test/pkg/vendor/github.com/v/vendored/v.go:1:24", @@ -616,6 +643,10 @@ package main; import "test/pkg"; func B() { p.A(); B() }`, wantXDefinition: map[string]string{ "a.go:1:51": "/src/github.com/d/dep/d.go:1:19 id:github.com/d/dep/-/D name:D package:github.com/d/dep packageName:dep recv: vendor:false", }, + wantCompletion: map[string]string{ + "a.go:1:34": "1:20-1:34 github.com/d/dep module ", + "a.go:1:51": "1:51-1:51 D function func()", + }, wantReferences: map[string][]string{ "a.go:1:51": []string{ "/src/test/pkg/a.go:1:51", @@ -652,6 +683,9 @@ package main; import "test/pkg"; func B() { p.A(); B() }`, wantXDefinition: map[string]string{ "a.go:1:55": "/src/github.com/d/dep/vendor/vendp/vp.go:1:32 id:github.com/d/dep/vendor/vendp/-/V/F name:F package:github.com/d/dep/vendor/vendp packageName:vendp recv:V vendor:true", }, + wantCompletion: map[string]string{ + "a.go:1:55": "1:55-1:55 F variable int", + }, wantWorkspaceReferences: map[*lspext.WorkspaceReferencesParams][]string{ {Query: lspext.SymbolDescriptor{}}: []string{ "/src/test/pkg/a.go:1:19-1:37 -> id:github.com/d/dep name: package:github.com/d/dep packageName:dep recv: vendor:false", @@ -681,6 +715,10 @@ package main; import "test/pkg"; func B() { p.A(); B() }`, wantXDefinition: map[string]string{ "a.go:1:57": "/src/github.com/d/dep/subp/d.go:1:20 id:github.com/d/dep/subp/-/D name:D package:github.com/d/dep/subp packageName:subp recv: vendor:false", }, + wantCompletion: map[string]string{ + "a.go:1:34": "1:20-1:34 github.com/d/dep/subp module ", + "a.go:1:57": "1:57-1:57 D function func()", + }, wantWorkspaceReferences: map[*lspext.WorkspaceReferencesParams][]string{ {Query: lspext.SymbolDescriptor{}}: []string{ "/src/test/pkg/a.go:1:19-1:42 -> id:github.com/d/dep/subp name: package:github.com/d/dep/subp packageName:subp recv: vendor:false", @@ -719,6 +757,10 @@ package main; import "test/pkg"; func B() { p.A(); B() }`, "a.go:1:53": "/src/github.com/d/dep1/d1.go:1:48 id:github.com/d/dep1/-/D1 name:D1 package:github.com/d/dep1 packageName:dep1 recv: vendor:false", "a.go:1:58": "/src/github.com/d/dep2/d2.go:1:32 id:github.com/d/dep2/-/D2/D2 name:D2 package:github.com/d/dep2 packageName:dep2 recv:D2 vendor:false", }, + wantCompletion: map[string]string{ + //"a.go:1:53": "1:53-1:53 D1 function func() D2", // gocode does not handle D2 correctly + "a.go:1:58": "1:58-1:58 D2 variable int", + }, wantWorkspaceReferences: map[*lspext.WorkspaceReferencesParams][]string{ {Query: lspext.SymbolDescriptor{}}: []string{ "/src/test/pkg/a.go:1:19-1:38 -> id:github.com/d/dep1 name: package:github.com/d/dep1 packageName:dep1 recv: vendor:false", @@ -883,16 +925,16 @@ type Header struct { "a.go": `package p // Comments for A - func A(foo int, bar func(baz int) int) int { - return bar(foo) + func A(foo int, bar func(baz int) int) int { + return bar(foo) } - + func B() {} // Comments for C - func C(x int, y int) int { - return x+y + func C(x int, y int) int { + return x+y }`, "b.go": "package p; func main() { B(); A(); A(0,); A(0); C(1,2) }", }, @@ -908,6 +950,31 @@ type Header struct { }, }, }, + "completion": { + rootURI: "file:///src/test/pkg", + fs: map[string]string{ + "a.go": `package p + +import "strings" + +func s2() { + _ = strings.Title("s") + _ = new(strings.Replacer) +} + +const s1 = 42 + +var s3 int +var s4 func()`, + }, + cases: lspTestCases{ + wantCompletion: map[string]string{ + "a.go:6:7": "6:6-6:7 s1 constant , s2 function func(), strings module , string class built-in, s3 variable int, s4 variable func()", + "a.go:7:7": "7:6-7:7 nil constant , new function func(type) *type", + "a.go:12:11": "12:8-12:11 int class built-in, int16 class built-in, int32 class built-in, int64 class built-in, int8 class built-in", + }, + }, + }, "unexpected paths": { // notice the : and @ symbol rootURI: "file:///src/t:est/@hello/pkg", @@ -932,6 +999,8 @@ type Header struct { } func TestServer(t *testing.T) { + GocodeCompletionEnabled = true + for label, test := range serverTestCases { t.Run(label, func(t *testing.T) { h := &LangHandler{HandlerShared: &HandlerShared{}} @@ -949,8 +1018,13 @@ func TestServer(t *testing.T) { // Prepare the connection. ctx := context.Background() + tdCap := lsp.TextDocumentClientCapabilities{} + tdCap.Completion.CompletionItemKind.ValueSet = []lsp.CompletionItemKind{lsp.CIKConstant} if err := conn.Call(ctx, "initialize", InitializeParams{ - InitializeParams: lsp.InitializeParams{RootURI: test.rootURI}, + InitializeParams: lsp.InitializeParams{ + RootURI: test.rootURI, + Capabilities: lsp.ClientCapabilities{TextDocument: tdCap}, + }, NoOSFileSystemAccess: true, RootImportPath: strings.TrimPrefix(rootFSPath, "/src/"), BuildContext: &InitializeBuildContextParams{ @@ -1024,6 +1098,7 @@ type lspTestCases struct { wantHover, overrideGodefHover map[string]string wantDefinition, overrideGodefDefinition map[string]string wantXDefinition map[string]string + wantCompletion map[string]string wantReferences map[string][]string wantSymbols map[string][]string wantWorkspaceSymbols map[*lspext.WorkspaceSymbolParams][]string @@ -1094,7 +1169,7 @@ func lspTests(t testing.TB, ctx context.Context, fs *AtomicFS, c *jsonrpc2.Conn, wantGodefHover = cases.wantHover } - if len(wantGodefDefinition) > 0 || (len(wantGodefHover) > 0 && fs != nil) { + if len(wantGodefDefinition) > 0 || (len(wantGodefHover) > 0 && fs != nil) || len(cases.wantCompletion) > 0 { UseBinaryPkgCache = true // Copy the VFS into a temp directory, which will be our $GOPATH. @@ -1112,6 +1187,7 @@ func lspTests(t testing.TB, ctx context.Context, fs *AtomicFS, c *jsonrpc2.Conn, // look for $GOPATH/pkg .a files inside the $GOPATH that was set during // 'go test' instead of our tmp directory. build.Default.GOPATH = tmpDir + gocode.SetBuildContext(&build.Default) tmpRootPath := filepath.Join(tmpDir, uriToFilePath(rootURI)) // Install all Go packages in the $GOPATH. @@ -1142,6 +1218,11 @@ func lspTests(t testing.TB, ctx context.Context, fs *AtomicFS, c *jsonrpc2.Conn, hoverTest(t, ctx, c, pathToURI(tmpRootPath), pos, want) }) } + for pos, want := range cases.wantCompletion { + tbRun(t, fmt.Sprintf("completion-%s", strings.Replace(pos, "/", "-", -1)), func(t testing.TB) { + completionTest(t, ctx, c, pathToURI(tmpRootPath), pos, want) + }) + } UseBinaryPkgCache = false } @@ -1264,6 +1345,20 @@ func xdefinitionTest(t testing.TB, ctx context.Context, c *jsonrpc2.Conn, rootUR } } +func completionTest(t testing.TB, ctx context.Context, c *jsonrpc2.Conn, rootURI lsp.DocumentURI, pos, want string) { + file, line, char, err := parsePos(pos) + if err != nil { + t.Fatal(err) + } + completion, err := callCompletion(ctx, c, uriJoin(rootURI, file), line, char) + if err != nil { + t.Fatal(err) + } + if completion != want { + t.Fatalf("got %q, want %q", completion, want) + } +} + func referencesTest(t testing.TB, ctx context.Context, c *jsonrpc2.Conn, rootURI lsp.DocumentURI, pos string, want []string) { file, line, char, err := parsePos(pos) if err != nil { @@ -1442,6 +1537,28 @@ func callXDefinition(ctx context.Context, c *jsonrpc2.Conn, uri lsp.DocumentURI, return str, nil } +func callCompletion(ctx context.Context, c *jsonrpc2.Conn, uri lsp.DocumentURI, line, char int) (string, error) { + var res lsp.CompletionList + err := c.Call(ctx, "textDocument/completion", lsp.CompletionParams{TextDocumentPositionParams: lsp.TextDocumentPositionParams{ + TextDocument: lsp.TextDocumentIdentifier{URI: uri}, + Position: lsp.Position{Line: line, Character: char}, + }}, &res) + if err != nil { + return "", err + } + var str string + for i, it := range res.Items { + if i != 0 { + str += ", " + } else { + e := it.TextEdit.Range + str += fmt.Sprintf("%d:%d-%d:%d ", e.Start.Line+1, e.Start.Character+1, e.End.Line+1, e.End.Character+1) + } + str += fmt.Sprintf("%s %s %s", it.Label, it.Kind, it.Detail) + } + return str, nil +} + func callReferences(ctx context.Context, c *jsonrpc2.Conn, uri lsp.DocumentURI, line, char int) ([]string, error) { var res locations err := c.Call(ctx, "textDocument/references", lsp.ReferenceParams{ diff --git a/main.go b/main.go index f727bdf1..3b1acc68 100644 --- a/main.go +++ b/main.go @@ -29,6 +29,7 @@ var ( freeosmemory = flag.Bool("freeosmemory", true, "aggressively free memory back to the OS") usebinarypkgcache = flag.Bool("usebinarypkgcache", true, "use $GOPATH/pkg binary .a files (improves performance)") maxparallelism = flag.Int("maxparallelism", -1, "use at max N parallel goroutines to fulfill requests") + gocodecompletion = flag.Bool("gocodecompletion", false, "enable completion (extra memory burden)") ) // version is the version field we report back. If you are releasing a new version: @@ -63,6 +64,8 @@ func main() { } langserver.MaxParallelism = *maxparallelism + langserver.GocodeCompletionEnabled = *gocodecompletion + if err := run(); err != nil { fmt.Fprintln(os.Stderr, err) os.Exit(1) diff --git a/pkg/lsp/service.go b/pkg/lsp/service.go index 8b8ef65c..d8926e80 100644 --- a/pkg/lsp/service.go +++ b/pkg/lsp/service.go @@ -30,6 +30,10 @@ func (p *InitializeParams) Root() DocumentURI { type DocumentURI string type ClientCapabilities struct { + Workspace WorkspaceClientCapabilities `json:"workspace,omitempty"` + TextDocument TextDocumentClientCapabilities `json:"textDocument,omitempty"` + Experimental interface{} `json:"experimental,omitempty"` + // Below are Sourcegraph extensions. They do not live in lspext since // they are extending the field InitializeParams.Capabilities @@ -46,6 +50,16 @@ type ClientCapabilities struct { XCacheProvider bool `json:"xcacheProvider,omitempty"` } +type WorkspaceClientCapabilities struct{} + +type TextDocumentClientCapabilities struct { + Completion struct { + CompletionItemKind struct { + ValueSet []CompletionItemKind `json:"valueSet,omitempty"` + } `json:"completionItemKind,omitempty"` + } `json:"completion,omitempty"` +} + type InitializeResult struct { Capabilities ServerCapabilities `json:"capabilities,omitempty"` } @@ -173,36 +187,76 @@ type SignatureHelpOptions struct { type CompletionItemKind int const ( - CIKText CompletionItemKind = 1 - CIKMethod = 2 - CIKFunction = 3 - CIKConstructor = 4 - CIKField = 5 - CIKVariable = 6 - CIKClass = 7 - CIKInterface = 8 - CIKModule = 9 - CIKProperty = 10 - CIKUnit = 11 - CIKValue = 12 - CIKEnum = 13 - CIKKeyword = 14 - CIKSnippet = 15 - CIKColor = 16 - CIKFile = 17 - CIKReference = 18 + _ CompletionItemKind = iota + CIKText + CIKMethod + CIKFunction + CIKConstructor + CIKField + CIKVariable + CIKClass + CIKInterface + CIKModule + CIKProperty + CIKUnit + CIKValue + CIKEnum + CIKKeyword + CIKSnippet + CIKColor + CIKFile + CIKReference + CIKFolder + CIKEnumMember + CIKConstant + CIKStruct + CIKEvent + CIKOperator + CIKTypeParameter ) +func (c CompletionItemKind) String() string { + return completionItemKindName[c] +} + +var completionItemKindName = map[CompletionItemKind]string{ + CIKText: "text", + CIKMethod: "method", + CIKFunction: "function", + CIKConstructor: "constructor", + CIKField: "field", + CIKVariable: "variable", + CIKClass: "class", + CIKInterface: "interface", + CIKModule: "module", + CIKProperty: "property", + CIKUnit: "unit", + CIKValue: "value", + CIKEnum: "enum", + CIKKeyword: "keyword", + CIKSnippet: "snippet", + CIKColor: "color", + CIKFile: "file", + CIKReference: "reference", + CIKFolder: "folder", + CIKEnumMember: "enumMember", + CIKConstant: "constant", + CIKStruct: "struct", + CIKEvent: "event", + CIKOperator: "operator", + CIKTypeParameter: "typeParameter", +} + type CompletionItem struct { - Label string `json:"label"` - Kind int `json:"kind,omitempty"` - Detail string `json:"detail,omitempty"` - Documentation string `json:"documentation,omitempty"` - SortText string `json:"sortText,omitempty"` - FilterText string `json:"filterText,omitempty"` - InsertText string `json:"insertText,omitempty"` - TextEdit TextEdit `json:"textEdit,omitempty"` - Data interface{} `json:"data,omitempty"` + Label string `json:"label"` + Kind CompletionItemKind `json:"kind,omitempty"` + Detail string `json:"detail,omitempty"` + Documentation string `json:"documentation,omitempty"` + SortText string `json:"sortText,omitempty"` + FilterText string `json:"filterText,omitempty"` + InsertText string `json:"insertText,omitempty"` + TextEdit *TextEdit `json:"textEdit,omitempty"` + Data interface{} `json:"data,omitempty"` } type CompletionList struct { @@ -210,6 +264,23 @@ type CompletionList struct { Items []CompletionItem `json:"items"` } +type CompletionTriggerKind int + +const ( + CTKInvoked CompletionTriggerKind = 1 + CTKTriggerCharacter = 2 +) + +type CompletionContext struct { + TriggerKind CompletionTriggerKind `json:"triggerKind"` + TriggerCharacter string `json:"triggerCharacter,omitempty"` +} + +type CompletionParams struct { + TextDocumentPositionParams + Context CompletionContext `json:"context,omitempty"` +} + type Hover struct { Contents []MarkedString `json:"contents,omitempty"` Range *Range `json:"range,omitempty"`