From 05eaa109dfcbc32c1e96226012de1e40847d2542 Mon Sep 17 00:00:00 2001 From: Radek Simko Date: Mon, 23 Nov 2020 08:10:14 +0000 Subject: [PATCH] Implement textDocument/semanticTokens (semantic highlighting) --- go.mod | 2 +- go.sum | 5 +- internal/langserver/handlers/handlers_test.go | 7 + internal/langserver/handlers/initialize.go | 24 +- .../langserver/handlers/semantic_tokens.go | 64 ++++++ .../handlers/semantic_tokens_test.go | 121 ++++++++++ internal/langserver/handlers/service.go | 12 + internal/lsp/token_encoder.go | 134 ++++++++++++ internal/lsp/token_encoder_test.go | 207 ++++++++++++++++++ internal/lsp/token_types.go | 148 +++++++++++++ 10 files changed, 719 insertions(+), 5 deletions(-) create mode 100644 internal/langserver/handlers/semantic_tokens.go create mode 100644 internal/langserver/handlers/semantic_tokens_test.go create mode 100644 internal/lsp/token_encoder.go create mode 100644 internal/lsp/token_encoder_test.go create mode 100644 internal/lsp/token_types.go diff --git a/go.mod b/go.mod index 5801bd79a..5b6e5c230 100644 --- a/go.mod +++ b/go.mod @@ -11,7 +11,7 @@ require ( github.com/google/uuid v1.1.2 github.com/hashicorp/go-multierror v1.1.0 github.com/hashicorp/go-version v1.2.1 - github.com/hashicorp/hcl-lang v0.0.0-20201116081236-948e43712a65 + github.com/hashicorp/hcl-lang v0.0.0-20201207122824-8cd7a941aa8f github.com/hashicorp/hcl/v2 v2.6.0 github.com/hashicorp/terraform-exec v0.11.1-0.20201207223938-9186a7c3bb24 github.com/hashicorp/terraform-json v0.7.0 diff --git a/go.sum b/go.sum index 2d6ad92a1..7fad5c7ac 100644 --- a/go.sum +++ b/go.sum @@ -185,10 +185,9 @@ github.com/hashicorp/hcl v0.0.0-20170504190234-a4b07c25de5f h1:UdxlrJz4JOnY8W+Db github.com/hashicorp/hcl v0.0.0-20170504190234-a4b07c25de5f/go.mod h1:oZtUIOe8dh44I2q6ScRibXws4Ajl+d+nod3AaR9vL5w= github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= -github.com/hashicorp/hcl-lang v0.0.0-20201110071249-4e412924f52b h1:EjnMRaTQlomBMNRQfyWoLEg9IdqxeN1R2mb3ZZetCBs= github.com/hashicorp/hcl-lang v0.0.0-20201110071249-4e412924f52b/go.mod h1:vd3BPEDWrYMAgAnB0MRlBdZknrpUXf8Jk2PNaHIbwhg= -github.com/hashicorp/hcl-lang v0.0.0-20201116081236-948e43712a65 h1:kF6Dxt2kPNj8+Px7LyK7nxPDQjYKwGrKxxYnSu+LOXM= -github.com/hashicorp/hcl-lang v0.0.0-20201116081236-948e43712a65/go.mod h1:vd3BPEDWrYMAgAnB0MRlBdZknrpUXf8Jk2PNaHIbwhg= +github.com/hashicorp/hcl-lang v0.0.0-20201207122824-8cd7a941aa8f h1:XppSUhj2DLqAkl/TnlowKXdzC4geEEZi5wtgpKQDU8o= +github.com/hashicorp/hcl-lang v0.0.0-20201207122824-8cd7a941aa8f/go.mod h1:TZ5tpvmgJSHfmIndN4WP9SpZvyWK8tHPBY8LDRyU+pI= github.com/hashicorp/hcl/v2 v2.0.0/go.mod h1:oVVDG71tEinNGYCxinCYadcmKU9bglqW9pV3txagJ90= github.com/hashicorp/hcl/v2 v2.6.0 h1:3krZOfGY6SziUXa6H9PJU6TyohHn7I+ARYnhbeNBz+o= github.com/hashicorp/hcl/v2 v2.6.0/go.mod h1:bQTN5mpo+jewjJgh8jr0JUguIi7qPHUF6yIfAEN3jqY= diff --git a/internal/langserver/handlers/handlers_test.go b/internal/langserver/handlers/handlers_test.go index c438b72aa..41198d35e 100644 --- a/internal/langserver/handlers/handlers_test.go +++ b/internal/langserver/handlers/handlers_test.go @@ -47,6 +47,13 @@ func initializeResponse(t *testing.T, commandPrefix string) string { "commands": %s, "workDoneProgress":true }, + "semanticTokensProvider": { + "legend": { + "tokenTypes": [], + "tokenModifiers": [] + }, + "full": false + }, "workspace": { "workspaceFolders": {} } diff --git a/internal/langserver/handlers/initialize.go b/internal/langserver/handlers/initialize.go index 953184d94..b0959be45 100644 --- a/internal/langserver/handlers/initialize.go +++ b/internal/langserver/handlers/initialize.go @@ -51,7 +51,8 @@ func (lh *logHandler) Initialize(ctx context.Context, params lsp.InitializeParam return serverCaps, err } - err = lsctx.SetClientCapabilities(ctx, ¶ms.Capabilities) + clientCaps := params.Capabilities + err = lsctx.SetClientCapabilities(ctx, &clientCaps) if err != nil { return serverCaps, err } @@ -80,6 +81,27 @@ func (lh *logHandler) Initialize(ctx context.Context, params lsp.InitializeParam return serverCaps, err } + stCaps := clientCaps.TextDocument.SemanticTokens + semanticTokensOpts := lsp.SemanticTokensOptions{ + Legend: lsp.SemanticTokensLegend{ + TokenTypes: ilsp.TokenTypesLegend(stCaps.TokenTypes).AsStrings(), + TokenModifiers: ilsp.TokenModifiersLegend(stCaps.TokenModifiers).AsStrings(), + }, + } + type semanticTokensFull struct { + Delta bool `json:"delta,omitempty"` + } + switch fullSupported := stCaps.Requests.Full.(type) { + case bool: + semanticTokensOpts.Full = fullSupported + case nil: + semanticTokensOpts.Full = false + case semanticTokensFull: + semanticTokensOpts.Full = true + } + + serverCaps.Capabilities.SemanticTokensProvider = semanticTokensOpts + // set commandPrefix for session lsctx.SetCommandPrefix(ctx, out.Options.CommandPrefix) // apply prefix to executeCommand handler names diff --git a/internal/langserver/handlers/semantic_tokens.go b/internal/langserver/handlers/semantic_tokens.go new file mode 100644 index 000000000..61f74c6f4 --- /dev/null +++ b/internal/langserver/handlers/semantic_tokens.go @@ -0,0 +1,64 @@ +package handlers + +import ( + "context" + "fmt" + + lsctx "github.com/hashicorp/terraform-ls/internal/context" + ilsp "github.com/hashicorp/terraform-ls/internal/lsp" + lsp "github.com/hashicorp/terraform-ls/internal/protocol" +) + +func (lh *logHandler) TextDocumentSemanticTokensFull(ctx context.Context, params lsp.SemanticTokensParams) (lsp.SemanticTokens, error) { + tks := lsp.SemanticTokens{} + + cc, err := lsctx.ClientCapabilities(ctx) + if err != nil { + return tks, err + } + + ds, err := lsctx.DocumentStorage(ctx) + if err != nil { + return tks, err + } + + rmf, err := lsctx.RootModuleFinder(ctx) + if err != nil { + return tks, err + } + + fh := ilsp.FileHandlerFromDocumentURI(params.TextDocument.URI) + doc, err := ds.GetDocument(fh) + if err != nil { + return tks, err + } + + rm, err := rmf.RootModuleByPath(doc.Dir()) + if err != nil { + return tks, fmt.Errorf("finding compatible decoder failed: %w", err) + } + + schema, err := rmf.SchemaForPath(doc.Dir()) + if err != nil { + return tks, err + } + + d, err := rm.DecoderWithSchema(schema) + if err != nil { + return tks, err + } + + tokens, err := d.SemanticTokensInFile(doc.Filename()) + if err != nil { + return tks, err + } + + te := &ilsp.TokenEncoder{ + Lines: doc.Lines(), + Tokens: tokens, + ClientCaps: cc.TextDocument.SemanticTokens, + } + tks.Data = te.Encode() + + return tks, nil +} diff --git a/internal/langserver/handlers/semantic_tokens_test.go b/internal/langserver/handlers/semantic_tokens_test.go new file mode 100644 index 000000000..6355ea593 --- /dev/null +++ b/internal/langserver/handlers/semantic_tokens_test.go @@ -0,0 +1,121 @@ +package handlers + +import ( + "encoding/json" + "fmt" + "testing" + + "github.com/hashicorp/go-version" + tfjson "github.com/hashicorp/terraform-json" + "github.com/hashicorp/terraform-ls/internal/langserver" + "github.com/hashicorp/terraform-ls/internal/terraform/exec" + "github.com/hashicorp/terraform-ls/internal/terraform/rootmodule" + "github.com/stretchr/testify/mock" +) + +func TestSemanticTokensFull(t *testing.T) { + tmpDir := TempDir(t) + InitPluginCache(t, tmpDir.Dir()) + + var testSchema tfjson.ProviderSchemas + err := json.Unmarshal([]byte(testSchemaOutput), &testSchema) + if err != nil { + t.Fatal(err) + } + + ls := langserver.NewLangServerMock(t, NewMockSession(&MockSessionInput{ + RootModules: map[string]*rootmodule.RootModuleMock{ + tmpDir.Dir(): { + TfExecFactory: exec.NewMockExecutor([]*mock.Call{ + { + Method: "Version", + Repeatability: 1, + Arguments: []interface{}{ + mock.AnythingOfType(""), + }, + ReturnArguments: []interface{}{ + version.Must(version.NewVersion("0.12.0")), + nil, + nil, + }, + }, + { + Method: "GetExecPath", + Repeatability: 1, + ReturnArguments: []interface{}{ + "", + }, + }, + { + Method: "ProviderSchemas", + Repeatability: 1, + Arguments: []interface{}{ + mock.AnythingOfType(""), + }, + ReturnArguments: []interface{}{ + &testSchema, + nil, + }, + }, + }), + }, + }})) + stop := ls.Start(t) + defer stop() + + ls.Call(t, &langserver.CallRequest{ + Method: "initialize", + ReqParams: fmt.Sprintf(`{ + "capabilities": { + "textDocument": { + "semanticTokens": { + "tokenTypes": [ + "type", + "property", + "string" + ], + "tokenModifiers": [ + "deprecated", + "modification" + ], + "requests": { + "full": true + } + } + } + }, + "rootUri": %q, + "processId": 12345 + }`, TempDir(t).URI())}) + ls.Notify(t, &langserver.CallRequest{ + Method: "initialized", + ReqParams: "{}", + }) + ls.Call(t, &langserver.CallRequest{ + Method: "textDocument/didOpen", + ReqParams: fmt.Sprintf(`{ + "textDocument": { + "version": 0, + "languageId": "terraform", + "text": "provider \"test\" {\n\n}\n", + "uri": "%s/main.tf" + } + }`, TempDir(t).URI())}) + + ls.CallAndExpectResponse(t, &langserver.CallRequest{ + Method: "textDocument/semanticTokens/full", + ReqParams: fmt.Sprintf(`{ + "textDocument": { + "uri": "%s/main.tf" + } + }`, TempDir(t).URI())}, `{ + "jsonrpc": "2.0", + "id": 3, + "result": { + "data": [ + 0,0,8,0,0, + 0,9,6,1,2 + ] + } + }`) +} diff --git a/internal/langserver/handlers/service.go b/internal/langserver/handlers/service.go index 4f043dbce..7d9ad0de9 100644 --- a/internal/langserver/handlers/service.go +++ b/internal/langserver/handlers/service.go @@ -260,6 +260,18 @@ func (svc *service) Assigner() (jrpc2.Assigner, error) { return handle(ctx, req, lh.TextDocumentFormatting) }, + "textDocument/semanticTokens/full": func(ctx context.Context, req *jrpc2.Request) (interface{}, error) { + err := session.CheckInitializationIsConfirmed() + if err != nil { + return nil, err + } + + ctx = lsctx.WithDocumentStorage(ctx, svc.fs) + ctx = lsctx.WithClientCapabilities(ctx, cc) + ctx = lsctx.WithRootModuleFinder(ctx, svc.modMgr) + + return handle(ctx, req, lh.TextDocumentSemanticTokensFull) + }, "workspace/executeCommand": func(ctx context.Context, req *jrpc2.Request) (interface{}, error) { err := session.CheckInitializationIsConfirmed() if err != nil { diff --git a/internal/lsp/token_encoder.go b/internal/lsp/token_encoder.go new file mode 100644 index 000000000..d3c52ed28 --- /dev/null +++ b/internal/lsp/token_encoder.go @@ -0,0 +1,134 @@ +package lsp + +import ( + "bytes" + + "github.com/hashicorp/hcl-lang/lang" + lsp "github.com/hashicorp/terraform-ls/internal/protocol" + "github.com/hashicorp/terraform-ls/internal/source" +) + +type TokenEncoder struct { + Lines source.Lines + Tokens []lang.SemanticToken + ClientCaps lsp.SemanticTokensClientCapabilities +} + +func (te *TokenEncoder) Encode() []float64 { + data := make([]float64, 0) + + for i := range te.Tokens { + data = append(data, te.encodeTokenOfIndex(i)...) + } + + return data +} + +func (te *TokenEncoder) encodeTokenOfIndex(i int) []float64 { + token := te.Tokens[i] + + var tokenType TokenType + modifiers := make([]TokenModifier, 0) + + switch token.Type { + case lang.TokenBlockType: + tokenType = TokenTypeType + case lang.TokenBlockLabel: + tokenType = TokenTypeString + case lang.TokenAttrName: + tokenType = TokenTypeProperty + default: + return []float64{} + } + + if !te.tokenTypeSupported(tokenType) { + return []float64{} + } + + tokenTypeIdx := TokenTypesLegend(te.ClientCaps.TokenTypes).Index(tokenType) + + for _, m := range token.Modifiers { + switch m { + case lang.TokenModifierDependent: + if !te.tokenModifierSupported(TokenModifierModification) { + return []float64{} + } + modifiers = append(modifiers, TokenModifierModification) + case lang.TokenModifierDeprecated: + if !te.tokenModifierSupported(TokenModifierDeprecated) { + return []float64{} + } + modifiers = append(modifiers, TokenModifierDeprecated) + } + } + + modifierBitMask := TokenModifiersLegend(te.ClientCaps.TokenModifiers).BitMask(modifiers) + + data := make([]float64, 0) + + // Client may not support multiline tokens which would be indicated + // via lsp.SemanticTokensCapabilities.MultilineTokenSupport + // once it becomes available in gopls LSP structs. + // + // For now we just safely assume client does *not* support it. + + tokenLineDelta := token.Range.End.Line - token.Range.Start.Line + + previousLine := 0 + previousStartChar := 0 + if i > 0 { + previousLine = te.Tokens[i-1].Range.End.Line - 1 + previousStartChar = te.Tokens[i-1].Range.Start.Column - 1 + } + + if tokenLineDelta == 0 || false /* te.clientCaps.MultilineTokenSupport */ { + deltaLine := token.Range.Start.Line - 1 - previousLine + deltaStartChar := token.Range.Start.Column - 1 + tokenLength := token.Range.End.Byte - token.Range.Start.Byte + + data = append(data, []float64{ + float64(deltaLine), + float64(deltaStartChar), + float64(tokenLength), + float64(tokenTypeIdx), + float64(modifierBitMask), + }...) + } else { + // Add entry for each line of a multiline token + for tokenLine := token.Range.Start.Line - 1; tokenLine <= token.Range.End.Line-1; tokenLine++ { + deltaLine := tokenLine - previousLine + + deltaStartChar := 0 + if tokenLine == token.Range.Start.Line-1 { + deltaStartChar = token.Range.Start.Column - 1 - previousStartChar + } + + lineBytes := bytes.TrimRight(te.Lines[tokenLine].Bytes(), "\n\r") + length := len(lineBytes) + + if tokenLine == token.Range.End.Line-1 { + length = token.Range.End.Column - 1 + } + + data = append(data, []float64{ + float64(deltaLine), + float64(deltaStartChar), + float64(length), + float64(tokenTypeIdx), + float64(modifierBitMask), + }...) + + previousLine = tokenLine + } + } + + return data +} + +func (te *TokenEncoder) tokenTypeSupported(tokenType TokenType) bool { + return sliceContains(te.ClientCaps.TokenTypes, string(tokenType)) +} + +func (te *TokenEncoder) tokenModifierSupported(tokenModifier TokenModifier) bool { + return sliceContains(te.ClientCaps.TokenModifiers, string(tokenModifier)) +} diff --git a/internal/lsp/token_encoder_test.go b/internal/lsp/token_encoder_test.go new file mode 100644 index 000000000..34d5c86f9 --- /dev/null +++ b/internal/lsp/token_encoder_test.go @@ -0,0 +1,207 @@ +package lsp + +import ( + "testing" + + "github.com/google/go-cmp/cmp" + "github.com/hashicorp/hcl-lang/lang" + "github.com/hashicorp/hcl/v2" + "github.com/hashicorp/terraform-ls/internal/protocol" + "github.com/hashicorp/terraform-ls/internal/source" +) + +func TestTokenEncoder_singleLineTokens(t *testing.T) { + bytes := []byte(`myblock "mytype" { + str_attr = "something" + num_attr = 42 + bool_attr = true +}`) + te := &TokenEncoder{ + Lines: source.MakeSourceLines("test.tf", bytes), + Tokens: []lang.SemanticToken{ + { + Type: lang.TokenBlockType, + Range: hcl.Range{ + Filename: "test.tf", + Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, + End: hcl.Pos{Line: 1, Column: 8, Byte: 7}, + }, + }, + { + Type: lang.TokenBlockLabel, + Range: hcl.Range{ + Filename: "test.tf", + Start: hcl.Pos{Line: 1, Column: 9, Byte: 8}, + End: hcl.Pos{Line: 1, Column: 8, Byte: 16}, + }, + }, + { + Type: lang.TokenAttrName, + Range: hcl.Range{ + Filename: "test.tf", + Start: hcl.Pos{Line: 2, Column: 3, Byte: 21}, + End: hcl.Pos{Line: 2, Column: 11, Byte: 29}, + }, + }, + { + Type: lang.TokenAttrName, + Range: hcl.Range{ + Filename: "test.tf", + Start: hcl.Pos{Line: 3, Column: 3, Byte: 46}, + End: hcl.Pos{Line: 3, Column: 11, Byte: 54}, + }, + }, + { + Type: lang.TokenAttrName, + Range: hcl.Range{ + Filename: "test.tf", + Start: hcl.Pos{Line: 4, Column: 3, Byte: 62}, + End: hcl.Pos{Line: 4, Column: 12, Byte: 71}, + }, + }, + }, + ClientCaps: protocol.SemanticTokensClientCapabilities{ + TokenTypes: serverTokenTypes.AsStrings(), + TokenModifiers: serverTokenModifiers.AsStrings(), + }, + } + data := te.Encode() + expectedData := []float64{ + 0, 0, 7, 0, 0, + 0, 8, 8, 1, 0, + 1, 2, 8, 2, 0, + 1, 2, 8, 2, 0, + 1, 2, 9, 2, 0, + } + + if diff := cmp.Diff(expectedData, data); diff != "" { + t.Fatalf("unexpected encoded data.\nexpected: %#v\ngiven: %#v", + expectedData, data) + } +} + +func TestTokenEncoder_multiLineTokens(t *testing.T) { + bytes := []byte(`myblock "mytype" { + str_attr = "something" + num_attr = 42 + bool_attr = true +}`) + te := &TokenEncoder{ + Lines: source.MakeSourceLines("test.tf", bytes), + Tokens: []lang.SemanticToken{ + { + Type: lang.TokenAttrName, + Range: hcl.Range{ + Filename: "test.tf", + // Attribute name would actually never span + // multiple lines, but we don't have any token + // type that would *yet* + Start: hcl.Pos{Line: 2, Column: 3, Byte: 21}, + End: hcl.Pos{Line: 4, Column: 12, Byte: 71}, + }, + }, + }, + ClientCaps: protocol.SemanticTokensClientCapabilities{ + TokenTypes: serverTokenTypes.AsStrings(), + TokenModifiers: serverTokenModifiers.AsStrings(), + }, + } + data := te.Encode() + expectedData := []float64{ + 1, 2, 24, 2, 0, + 1, 0, 15, 2, 0, + 1, 0, 11, 2, 0, + } + + if diff := cmp.Diff(expectedData, data); diff != "" { + t.Fatalf("unexpected encoded data.\nexpected: %#v\ngiven: %#v", + expectedData, data) + } +} + +func TestTokenEncoder_tokenModifiers(t *testing.T) { + bytes := []byte(`myblock "mytype" { + str_attr = "something" + num_attr = 42 + bool_attr = true +}`) + te := &TokenEncoder{ + Lines: source.MakeSourceLines("test.tf", bytes), + Tokens: []lang.SemanticToken{ + { + Type: lang.TokenBlockType, + Range: hcl.Range{ + Filename: "test.tf", + Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, + End: hcl.Pos{Line: 1, Column: 8, Byte: 7}, + }, + }, + { + Type: lang.TokenBlockLabel, + Modifiers: []lang.SemanticTokenModifier{ + lang.TokenModifierDeprecated, + }, + Range: hcl.Range{ + Filename: "test.tf", + Start: hcl.Pos{Line: 1, Column: 9, Byte: 8}, + End: hcl.Pos{Line: 1, Column: 8, Byte: 16}, + }, + }, + { + Type: lang.TokenAttrName, + Modifiers: []lang.SemanticTokenModifier{ + lang.TokenModifierDeprecated, + }, + Range: hcl.Range{ + Filename: "test.tf", + Start: hcl.Pos{Line: 2, Column: 3, Byte: 21}, + End: hcl.Pos{Line: 2, Column: 11, Byte: 29}, + }, + }, + { + Type: lang.TokenAttrName, + Modifiers: []lang.SemanticTokenModifier{ + lang.TokenModifierDependent, + }, + Range: hcl.Range{ + Filename: "test.tf", + Start: hcl.Pos{Line: 3, Column: 3, Byte: 46}, + End: hcl.Pos{Line: 3, Column: 11, Byte: 54}, + }, + }, + { + Type: lang.TokenAttrName, + Modifiers: []lang.SemanticTokenModifier{ + lang.TokenModifierDeprecated, + lang.TokenModifierDependent, + }, + Range: hcl.Range{ + Filename: "test.tf", + Start: hcl.Pos{Line: 4, Column: 3, Byte: 62}, + End: hcl.Pos{Line: 4, Column: 12, Byte: 71}, + }, + }, + }, + ClientCaps: protocol.SemanticTokensClientCapabilities{ + TokenTypes: serverTokenTypes.AsStrings(), + TokenModifiers: serverTokenModifiers.AsStrings(), + }, + } + data := te.Encode() + expectedData := []float64{ + 0, 0, 7, 0, 0, + 0, 8, 8, 1, 1, + 1, 2, 8, 2, 1, + 1, 2, 8, 2, 2, + 1, 2, 9, 2, 3, + } + + if diff := cmp.Diff(expectedData, data); diff != "" { + t.Fatalf("unexpected encoded data.\nexpected: %#v\ngiven: %#v", + expectedData, data) + } +} + +// func TestTokenEncoder_unsupported(t *testing.T) { +// t.Fatal("TODO") +// } diff --git a/internal/lsp/token_types.go b/internal/lsp/token_types.go new file mode 100644 index 000000000..6897d5376 --- /dev/null +++ b/internal/lsp/token_types.go @@ -0,0 +1,148 @@ +package lsp + +import ( + "math" +) + +type TokenType string +type TokenTypes []TokenType + +func (tt TokenTypes) AsStrings() []string { + types := make([]string, len(tt)) + + for i, tokenType := range tt { + types[i] = string(tokenType) + } + + return types +} + +func (tt TokenTypes) Index(tokenType TokenType) int { + for i, t := range tt { + if t == tokenType { + return i + } + } + return -1 +} + +type TokenModifier string +type TokenModifiers []TokenModifier + +func (tm TokenModifiers) AsStrings() []string { + modifiers := make([]string, len(tm)) + + for i, tokenModifier := range tm { + modifiers[i] = string(tokenModifier) + } + + return modifiers +} + +func (tm TokenModifiers) BitMask(declaredModifiers TokenModifiers) int { + bitMask := 0b0 + + for i, modifier := range tm { + if isDeclared(modifier, declaredModifiers) { + bitMask |= int(math.Pow(2, float64(i))) + } + } + + return bitMask +} + +func isDeclared(mod TokenModifier, declaredModifiers TokenModifiers) bool { + for _, dm := range declaredModifiers { + if mod == dm { + return true + } + } + return false +} + +const ( + // Types predefined in LSP spec + TokenTypeClass TokenType = "class" + TokenTypeComment TokenType = "comment" + TokenTypeEnum TokenType = "enum" + TokenTypeEnumMember TokenType = "enumMember" + TokenTypeEvent TokenType = "event" + TokenTypeFunction TokenType = "function" + TokenTypeInterface TokenType = "interface" + TokenTypeKeyword TokenType = "keyword" + TokenTypeMacro TokenType = "macro" + TokenTypeMethod TokenType = "method" + TokenTypeModifier TokenType = "modifier" + TokenTypeNamespace TokenType = "namespace" + TokenTypeNumber TokenType = "number" + TokenTypeOperator TokenType = "operator" + TokenTypeParameter TokenType = "parameter" + TokenTypeProperty TokenType = "property" + TokenTypeRegexp TokenType = "regexp" + TokenTypeString TokenType = "string" + TokenTypeStruct TokenType = "struct" + TokenTypeType TokenType = "type" + TokenTypeTypeParameter TokenType = "typeParameter" + TokenTypeVariable TokenType = "variable" + + // Modifiers predefined in LSP spec + TokenModifierDeclaration TokenModifier = "declaration" + TokenModifierDefinition TokenModifier = "definition" + TokenModifierReadonly TokenModifier = "readonly" + TokenModifierStatic TokenModifier = "static" + TokenModifierDeprecated TokenModifier = "deprecated" + TokenModifierAbstract TokenModifier = "abstract" + TokenModifierAsync TokenModifier = "async" + TokenModifierModification TokenModifier = "modification" + TokenModifierDocumentation TokenModifier = "documentation" + TokenModifierDefaultLibrary TokenModifier = "defaultLibrary" +) + +// Registering types which are actually in use and known +// to be registered by VS Code by default, see https://git.io/JIeuV +var ( + serverTokenTypes = TokenTypes{ + TokenTypeType, + TokenTypeString, + TokenTypeProperty, + } + serverTokenModifiers = TokenModifiers{ + TokenModifierDeprecated, + TokenModifierModification, + } +) + +func TokenTypesLegend(clientSupported []string) TokenTypes { + legend := make(TokenTypes, 0) + + // Filter only supported token types + for _, tokenType := range serverTokenTypes { + if sliceContains(clientSupported, string(tokenType)) { + legend = append(legend, TokenType(tokenType)) + } + } + + return legend +} + +func TokenModifiersLegend(clientSupported []string) TokenModifiers { + legend := make(TokenModifiers, 0) + + // Filter only supported token modifiers + for _, modifier := range serverTokenModifiers { + if sliceContains(clientSupported, string(modifier)) { + legend = append(legend, TokenModifier(modifier)) + } + } + + return legend +} + +func sliceContains(slice []string, value string) bool { + for _, val := range slice { + if val == value { + return true + } + } + return false +}